Example #1
0
def prune(candidates, src, dst, ui):
    def linkfilter(src, dst, st):
        try:
            ts = os.stat(dst)
        except OSError:
            # Destination doesn't have this file?
            return False
        if util.samefile(src, dst):
            return False
        if not util.samedevice(src, dst):
            # No point in continuing
            raise error.Abort(
                _('source and destination are on different devices'))
        if st.st_size != ts.st_size:
            return False
        return st

    targets = []
    total = len(candidates)
    pos = 0
    for fn, st in candidates:
        pos += 1
        srcpath = os.path.join(src, fn)
        tgt = os.path.join(dst, fn)
        ts = linkfilter(srcpath, tgt, st)
        if not ts:
            ui.debug('not linkable: %s\n' % fn)
            continue
        targets.append((fn, ts.st_size))
        ui.progress(_('pruning'), pos, fn, _('files'), total)

    ui.progress(_('pruning'), None)
    ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
    return targets
Example #2
0
    def notify(self, ids, committer):
        '''tell bugzilla to send mail.'''

        self.ui.status(_('telling bugzilla to send mail:\n'))
        (user, userid) = self.get_bugzilla_user(committer)
        for id in ids:
            self.ui.status(_('  bug %s\n') % id)
            cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
            bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
            try:
                # Backwards-compatible with old notify string, which
                # took one string. This will throw with a new format
                # string.
                cmd = cmdfmt % id
            except TypeError:
                cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
            self.ui.note(_('running notify command %s\n') % cmd)
            fp = util.popen('(%s) 2>&1' % cmd)
            out = fp.read()
            ret = fp.close()
            if ret:
                self.ui.warn(out)
                raise util.Abort(_('bugzilla notify command %s') %
                                 util.explain_exit(ret)[0])
        self.ui.status(_('done\n'))
Example #3
0
    def from_ui(cls, ui):
        import hgsshsign.keys as keys
        from hgsshsign.keymanifest import KeyManifest

        try:
            public_key = absolute_path(ui.config("sshsign", "public_key"))

        except TypeError:
            raise util.Abort(
                _("You must define sshsign.public_key in your hgrc")), \
                None, sys.exc_info()[2]

        public_key = keys.PublicKey.from_file(public_key)

        manifest_file = ui.config("sshsign", "manifest_file")
        if manifest_file:
            manifest = KeyManifest.from_file(absolute_path(manifest_file))
        else:
            manifest = None
            ui.write(_("No key manifest set. You will not be able to verify"
                        " signatures.\n"))

        private_key = ui.config("sshsign", "private_key", None)
        agent_socket = os.environ.get(SSHAgent.AGENT_SOCK_NAME)
        if private_key:
            private_key = keys.PrivateKey.from_file(absolute_path(private_key))
        elif agent_socket:
            private_key = SSHAgent(agent_socket, key=public_key.blob)
        else:
            raise util.Abort(_("No private key set and no agent running."))

        return cls(public_key, manifest, private_key)
Example #4
0
def collect(src, ui):
    seplen = len(os.path.sep)
    candidates = []
    live = len(src['tip'].manifest())
    # Your average repository has some files which were deleted before
    # the tip revision. We account for that by assuming that there are
    # 3 tracked files for every 2 live files as of the tip version of
    # the repository.
    #
    # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
    total = live * 3 // 2
    src = src.store.path
    pos = 0
    ui.status(_("tip has %d files, estimated total number of files: %d\n")
              % (live, total))
    for dirpath, dirnames, filenames in os.walk(src):
        dirnames.sort()
        relpath = dirpath[len(src) + seplen:]
        for filename in sorted(filenames):
            if filename[-2:] not in ('.d', '.i'):
                continue
            st = os.stat(os.path.join(dirpath, filename))
            if not stat.S_ISREG(st.st_mode):
                continue
            pos += 1
            candidates.append((os.path.join(relpath, filename), st))
            ui.progress(_('collecting'), pos, filename, _('files'), total)

    ui.progress(_('collecting'), None)
    ui.status(_('collected %d candidate storage files\n') % len(candidates))
    return candidates
Example #5
0
    def recover(self, repo):
        '''commit working directory using journal metadata'''
        node, user, date, message, parents = self.readlog()
        merge = len(parents) == 2

        if not user or not date or not message or not parents[0]:
            raise util.Abort(_('transplant log file is corrupt'))

        extra = {'transplant_source': node}
        wlock = repo.wlock()
        try:
            p1, p2 = repo.dirstate.parents()
            if p1 != parents[0]:
                raise util.Abort(
                    _('working dir not at transplant parent %s') %
                                 revlog.hex(parents[0]))
            if merge:
                repo.dirstate.setparents(p1, parents[1])
            n = repo.commit(None, message, user, date, extra=extra)
            if not n:
                raise util.Abort(_('commit failed'))
            if not merge:
                self.transplants.set(n, node)
            self.unlog()

            return n, node
        finally:
            del wlock
Example #6
0
def readlegacytoken(repo, url):
    """Transitional support for old phabricator tokens.

    Remove before the 4.7 release.
    """
    groups = {}
    for key, val in repo.ui.configitems(b'phabricator.auth'):
        if b'.' not in key:
            repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n")
                         % key)
            continue
        group, setting = key.rsplit(b'.', 1)
        groups.setdefault(group, {})[setting] = val

    token = None
    for group, auth in groups.iteritems():
        if url != auth.get(b'url'):
            continue
        token = auth.get(b'token')
        if token:
            break

    global printed_token_warning

    if token and not printed_token_warning:
        printed_token_warning = True
        repo.ui.warn(_(b'phabricator.auth.token is deprecated - please '
                       b'migrate to auth.phabtoken.\n'))
    return token
Example #7
0
def readurltoken(repo):
    """return conduit url, token and make sure they exist

    Currently read from [auth] config section. In the future, it might
    make sense to read from .arcconfig and .arcrc as well.
    """
    url = repo.ui.config(b'phabricator', b'url')
    if not url:
        raise error.Abort(_(b'config %s.%s is required')
                          % (b'phabricator', b'url'))

    res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
    token = None

    if res:
        group, auth = res

        repo.ui.debug(b"using auth.%s.* for authentication\n" % group)

        token = auth.get(b'phabtoken')

    if not token:
        token = readlegacytoken(repo, url)
        if not token:
            raise error.Abort(_(b'Can\'t find conduit token associated to %s')
                              % (url,))

    return url, token
def fill_values(values, ui, api_server, finalize=False):
    cache = bzauth.load_configuration(ui, api_server, BINARY_CACHE_FILENAME)

    if 'PRODUCT' in values:
        values['PRODUCT'], values['COMPONENT'] = choose_prodcomponent(ui, cache, values['PRODUCT'],
                                                                      values['COMPONENT'], finalize=finalize)

    if 'PRODVERSION' in values:
        if values['PRODVERSION'] == '<default>' and values['PRODUCT'] not in [None, '<choose-from-menu>']:
            values['PRODVERSION'] = get_default_version(ui, api_server, values['PRODUCT'])
            ui.write("Using default version '%s' of product %s\n" %
                     (values['PRODVERSION'].encode('utf-8'), values['PRODUCT'].encode('utf-8')))

    # 'finalize' means we need the final values. (finalize will be set to false
    # for prepopulating fields that will be displayed in a form)
    if not finalize:
        return values

    if 'BUGTITLE' in values:
        if values['BUGTITLE'] in [None, '<required>']:
            values['BUGTITLE'] = ui.prompt(_("Bug title:"), default='')

    if 'BUGCOMMENT0' in values:
        if values['BUGCOMMENT0'] in [None, '<required>']:
            values['BUGCOMMENT0'] = ui.prompt(_("Bug description:"), default='')

    if 'ATTACHMENT_DESCRIPTION' in values:
        if values['ATTACHMENT_DESCRIPTION'] in [None, '<required>']:
            values['ATTACHMENT_DESCRIPTION'] = ui.prompt(_("Patch description:"), default=values['ATTACHMENT_FILENAME'])

    return values
Example #9
0
def getkeys(ui, repo, mygpg, sigdata, context):
    """get the keys who signed a data"""
    fn, ln = context
    node, version, sig = sigdata
    prefix = "%s:%d" % (fn, ln)
    node = hgnode.bin(node)

    data = node2txt(repo, node, version)
    sig = binascii.a2b_base64(sig)
    err, keys = mygpg.verify(data, sig)
    if err:
        ui.warn("%s:%d %s\n" % (fn, ln, err))
        return None

    validkeys = []
    # warn for expired key and/or sigs
    for key in keys:
        if key[0] == "BADSIG":
            ui.write(_('%s Bad signature from "%s"\n') % (prefix, key[2]))
            continue
        if key[0] == "EXPSIG":
            ui.write(_("%s Note: Signature has expired" ' (signed by: "%s")\n') % (prefix, key[2]))
        elif key[0] == "EXPKEYSIG":
            ui.write(_("%s Note: This key has expired" ' (signed by: "%s")\n') % (prefix, key[2]))
        validkeys.append((key[1], key[2], key[3]))
    return validkeys
Example #10
0
    def __init__(self, ui, repo, dest, opts):
        """Read options and load settings from hgrc"""
        self.ui = ui
        self.repo = repo
        self.opts = opts
        self.uploaded = None
        self.selected = None

        self.chmod_file = self.opts.get('file') or self.ui.config('ftp', 'chmod_file')
        self.chmod_dir = self.opts.get('dir') or self.ui.config('ftp', 'chmod_dir')

        self.url = self.ui.config('paths', dest) or dest or self.ui.config('paths', 'ftp')
        if not self.url:
            raise util.Abort(_('no ftp destination given'))
        self.url = _url(self.url)

        if self.url.scheme != 'ftp':
            raise util.Abort(_('Only "ftp" scheme is supported'))
        if not self.url.host:
            raise util.Abort(_('No host given'))

        self.useGlobal = self.opts.get('global') or self.ui.configbool('ftp', 'global_tags')

        self.tagname = self.opts.get('tag')
        if not self.tagname:
            prefix = self.ui.config('ftp', 'prefix_tags') or _('uploaded@')
            self.tagname = prefix + self.url.host
Example #11
0
def catfile(ui, repo, type=None, r=None, **opts):
    """cat a specific revision"""
    # in stdin mode, every line except the commit is prefixed with two
    # spaces.  This way the our caller can find the commit without magic
    # strings
    #
    prefix = ""
    if opts['stdin']:
        try:
            (type, r) = raw_input().split(' ')
            prefix = "    "
        except EOFError:
            return

    else:
        if not type or not r:
            ui.warn(_("cat-file: type or revision not supplied\n"))
            commands.help_(ui, 'cat-file')

    while r:
        if type != "commit":
            ui.warn(_("aborting hg cat-file only understands commits\n"))
            return 1
        n = repo.lookup(r)
        catcommit(ui, repo, n, prefix)
        if opts['stdin']:
            try:
                (type, r) = raw_input().split(' ')
            except EOFError:
                break
        else:
            break
Example #12
0
    def get(self, files):
        '''Get the specified largefiles from the store and write to local
        files under repo.root.  files is a list of (filename, hash)
        tuples.  Return (success, missing), lists of files successfully
        downloaded and those not found in the store.  success is a list
        of (filename, hash) tuples; missing is a list of filenames that
        we could not get.  (The detailed error message will already have
        been presented to the user, so missing is just supplied as a
        summary.)'''
        success = []
        missing = []
        ui = self.ui

        at = 0
        available = self.exists(set(hash for (_filename, hash) in files))
        for filename, hash in files:
            ui.progress(
                _('getting largefiles'), at, unit='lfile', total=len(files))
            at += 1
            ui.note(_('getting %s:%s\n') % (filename, hash))

            if not available.get(hash):
                ui.warn(
                    _('%s: largefile %s not available from %s\n') %
                    (filename, hash, util.hidepassword(self.url)))
                missing.append(filename)
                continue

            if self._gethash(filename, hash):
                success.append((filename, hash))
            else:
                missing.append(filename)

        ui.progress(_('getting largefiles'), None)
        return (success, missing)
Example #13
0
    def run(self):
        if not self.repo.local:
            raise util.Abort(_('Repository "%s" is not local') % self.repo.root)

        self._check_changed()
        upload, remove = self._get_files()

        if self.opts.get('show'):
            self.ui.write(_('Upload files:\n'))
            for f in upload:
                self.ui.write('\t%s\n' % f)
            self.ui.write(_('\nDelete files:\n'))
            for f in remove:
                self.ui.write('\t%s\n' % f)

        self.ui.write(_('Upload files: %s, delete files: %s\n') %
                (len(upload), len(remove)) )

        if self.opts.get('upload'):
            if upload or remove:
                self._ftp(upload, remove)

            if not self.opts.get('only'):
                commands.tag(self.ui, self.repo, self.tagname,
                        local=not self.useGlobal,
                        rev=str(self.selected),
                        force=True)

                self.ui.write(_('Added tag %s for changeset %d:%s\n') %
                        (self.tagname, self.selected, self.selected))
Example #14
0
def _checkhook(ui, repo, node, headsonly):
    # Get revisions to check and touched files at the same time
    files = set()
    revs = set()
    for rev in xrange(repo[node].rev(), len(repo)):
        revs.add(rev)
        if headsonly:
            ctx = repo[rev]
            files.update(ctx.files())
            for pctx in ctx.parents():
                revs.discard(pctx.rev())
    failed = []
    for rev in revs:
        ctx = repo[rev]
        eol = parseeol(ui, repo, [ctx.node()])
        if eol:
            failed.extend(eol.checkrev(repo, ctx, files))

    if failed:
        eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
        msgs = []
        for node, target, f in failed:
            msgs.append(_("  %s in %s should not have %s line endings") %
                        (f, node, eols[target]))
        raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
Example #15
0
    def verify(self, revs, contents=False):
        '''Verify the existence (and, optionally, contents) of every big
        file revision referenced by every changeset in revs.
        Return 0 if all is well, non-zero on any errors.'''
        failed = False

        self.ui.status(
            _('searching %d changesets for largefiles\n') % len(revs))
        verified = set()  # set of (filename, filenode) tuples

        for rev in revs:
            cctx = self.repo[rev]
            cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))

            for standin in cctx:
                if self._verifyfile(cctx, cset, contents, standin, verified):
                    failed = True

        numrevs = len(verified)
        numlfiles = len(set([fname for (fname, fnode) in verified]))
        if contents:
            self.ui.status(
                _('verified contents of %d revisions of %d largefiles\n') %
                (numrevs, numlfiles))
        else:
            self.ui.status(
                _('verified existence of %d revisions of %d largefiles\n') %
                (numrevs, numlfiles))
        return int(failed)
Example #16
0
    def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
                 filter=None):
        '''apply the patch in patchfile to the repository as a transplant'''
        (manifest, user, (time, timezone), files, message) = cl[:5]
        date = "%d %d" % (time, timezone)
        extra = {'transplant_source': node}
        if filter:
            (user, date, message) = self.filter(filter, node, cl, patchfile)

        if log:
            # we don't translate messages inserted into commits
            message += '\n(transplanted from %s)' % revlog.hex(node)

        self.ui.status(_('applying %s\n') % short(node))
        self.ui.note('%s %s\n%s\n' % (user, date, message))

        if not patchfile and not merge:
            raise util.Abort(_('can only omit patchfile if merging'))
        if patchfile:
            try:
                files = set()
                patch.patch(self.ui, repo, patchfile, files=files, eolmode=None)
                files = list(files)
            except Exception, inst:
                seriespath = os.path.join(self.path, 'series')
                if os.path.exists(seriespath):
                    os.unlink(seriespath)
                p1 = repo.dirstate.p1()
                p2 = node
                self.log(user, date, message, p1, p2, merge=merge)
                self.ui.write(str(inst) + '\n')
                raise TransplantError(_('fix up the merge and run '
                                        'hg transplant --continue'))
Example #17
0
def extsetup(ui):
    commands.globalopts.append(
        ('', 'color', 'auto',
         # i18n: 'always', 'auto', and 'never' are keywords and should
         # not be translated
         _("when to colorize (boolean, always, auto, or never)"),
         _('TYPE')))
Example #18
0
    def _parse(self):
        if self.changeset is not None:
            return
        self.changeset = {}

        maxrev = 0
        if self.revs:
            if len(self.revs) > 1:
                raise util.Abort(_('cvs source does not support specifying '
                                   'multiple revs'))
            # TODO: handle tags
            try:
                # patchset number?
                maxrev = int(self.revs[0])
            except ValueError:
                raise util.Abort(_('revision %s is not a patchset number')
                                 % self.revs[0])

        d = os.getcwd()
        try:
            os.chdir(self.path)
            id = None

            cache = 'update'
            if not self.ui.configbool('convert', 'cvsps.cache', True):
                cache = None
            db = cvsps.createlog(self.ui, cache=cache)
            db = cvsps.createchangeset(self.ui, db,
                fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
                mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
                mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))

            for cs in db:
                if maxrev and cs.id > maxrev:
                    break
                id = str(cs.id)
                cs.author = self.recode(cs.author)
                self.lastbranch[cs.branch] = id
                cs.comment = self.recode(cs.comment)
                if self.ui.configbool('convert', 'localtimezone'):
                    cs.date = makedatetimestamp(cs.date[0])
                date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
                self.tags.update(dict.fromkeys(cs.tags, id))

                files = {}
                for f in cs.entries:
                    files[f.file] = "%s%s" % ('.'.join([str(x)
                                                        for x in f.revision]),
                                              ['', '(DEAD)'][f.dead])

                # add current commit to set
                c = commit(author=cs.author, date=date,
                           parents=[str(p.id) for p in cs.parents],
                           desc=cs.comment, branch=cs.branch or '')
                self.changeset[id] = c
                self.files[id] = files

            self.heads = self.lastbranch.values()
        finally:
            os.chdir(d)
Example #19
0
    def filter(self, filter, node, changelog, patchfile):
        '''arbitrarily rewrite changeset before applying it'''

        self.ui.status(_('filtering %s\n') % patchfile)
        user, date, msg = (changelog[1], changelog[2], changelog[4])
        fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
        fp = os.fdopen(fd, 'w')
        fp.write("# HG changeset patch\n")
        fp.write("# User %s\n" % user)
        fp.write("# Date %d %d\n" % date)
        fp.write(msg + '\n')
        fp.close()

        try:
            util.system('%s %s %s' % (filter, util.shellquote(headerfile),
                                   util.shellquote(patchfile)),
                        environ={'HGUSER': changelog[1],
                                 'HGREVISION': revlog.hex(node),
                                 },
                        onerr=util.Abort, errprefix=_('filter failed'),
                        out=self.ui.fout)
            user, date, msg = self.parselog(file(headerfile))[1:4]
        finally:
            os.unlink(headerfile)

        return (user, date, msg)
Example #20
0
def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
    '''send email notifications to interested subscribers.

    if used as changegroup hook, send one email for all changesets in
    changegroup. else send one email per changeset.'''
    n = notifier(ui, repo, hooktype)
    if not n.subs:
        ui.debug(_('notify: no subscribers to repo %s\n') % n.root)
        return
    if n.skipsource(source):
        ui.debug(_('notify: changes have source "%s" - skipping\n') %
                 source)
        return
    node = bin(node)
    ui.pushbuffer()
    if hooktype == 'changegroup':
        start = repo.changelog.rev(node)
        end = repo.changelog.count()
        count = end - start
        for rev in xrange(start, end):
            n.node(repo.changelog.node(rev))
        n.diff(node, repo.changelog.tip())
    else:
        count = 1
        n.node(node)
        n.diff(node, node)
    data = ui.popbuffer()
    n.send(node, count, data)
    def __init__(self, rid):
        self.bug = None
        self.user = None

        if not rid:
            return

        # Assume digits are Bugzilla bugs.
        if rid.isdigit():
            rid = 'bz://%s' % rid

        if rid and not rid.startswith('bz://'):
            raise util.Abort(_('review identifier must begin with bz://'))

        full = rid
        paths = rid[5:].split('/')
        if not paths[0]:
            raise util.Abort(_('review identifier must not be bz://'))

        bug = paths[0]
        if not bug.isdigit():
            raise util.Abort(_('first path component of review identifier must be a bug number'))
        self.bug = int(bug)

        if len(paths) > 1:
            self.user = paths[1]

        if len(paths) > 2:
            raise util.Abort(_('unrecognized review id: %s') % rid)
Example #22
0
        def readsparseconfig(self, raw):
            """Takes a string sparse config and returns the includes,
            excludes, and profiles it specified.
            """
            includes = set()
            excludes = set()
            current = includes
            profiles = []
            for line in raw.split('\n'):
                line = line.strip()
                if not line or line.startswith('#'):
                    # empty or comment line, skip
                    continue
                elif line.startswith('%include '):
                    line = line[9:].strip()
                    if line:
                        profiles.append(line)
                elif line == '[include]':
                    if current != includes:
                        raise error.Abort(_('.hg/sparse cannot have includes ' +
                            'after excludes'))
                    continue
                elif line == '[exclude]':
                    current = excludes
                elif line:
                    if line.strip().startswith('/'):
                        self.ui.warn(_('warning: sparse profile cannot use' +
                                       ' paths starting with /, ignoring %s\n')
                                       % line)
                        continue
                    current.add(line)

            return includes, excludes, profiles
Example #23
0
def toposort_postorderreverse(ui, rl):
    # reverse-postorder of the reverse directed graph

    children = {}
    roots = set()
    ui.status(_('reading revs\n'))
    try:
        for rev in rl:
            ui.progress(_('reading'), rev, total=len(rl))
            (p1, p2) = rl.parentrevs(rev)
            if p1 == p2 == node.nullrev:
                roots.add(rev)
            children[rev] = []
            if p1 != node.nullrev:
                children[p1].append(rev)
            if p2 != node.nullrev:
                children[p2].append(rev)
    finally:
        ui.progress(_('reading'), None)

    roots = list(roots)
    roots.sort()

    ui.status(_('sorting revs\n'))
    result = postorder(roots, children)
    result.reverse()
    return result
Example #24
0
def _pullreviews(repo):
    reviews = repo.reviews
    if not reviews.remoteurl:
        raise util.Abort(_("We don't know of any review servers. Try " "creating a review first."))

    reviewdata = _pullreviewidentifiers(repo, sorted(reviews.identifiers))
    repo.ui.write(_("updated %d reviews\n") % len(reviewdata))
Example #25
0
def toposort_reversepostorder(ui, rl):
    # postorder of the reverse directed graph

    # map rev to list of parent revs (p2 first)
    parents = {}
    heads = set()
    ui.status(_('reading revs\n'))
    try:
        for rev in rl:
            ui.progress(_('reading'), rev, total=len(rl))
            (p1, p2) = rl.parentrevs(rev)
            if p1 == p2 == node.nullrev:
                parents[rev] = ()       # root node
            elif p1 == p2 or p2 == node.nullrev:
                parents[rev] = (p1,)    # normal node
            else:
                parents[rev] = (p2, p1) # merge node
            heads.add(rev)
            for p in parents[rev]:
                heads.discard(p)
    finally:
        ui.progress(_('reading'), None)

    heads = list(heads)
    heads.sort(reverse=True)

    ui.status(_('sorting revs\n'))
    return postorder(heads, parents)
Example #26
0
def uploadlfiles(ui, rsrc, rdst, files):
    '''upload largefiles to the central store'''

    if not files:
        return

    store = basestore._openstore(rsrc, rdst, put=True)

    at = 0
    ui.debug("sending statlfile command for %d largefiles\n" % len(files))
    retval = store.exists(files)
    files = filter(lambda h: not retval[h], files)
    ui.debug("%d largefiles need to be uploaded\n" % len(files))

    for hash in files:
        ui.progress(_('uploading largefiles'), at, unit='largefile',
                    total=len(files))
        source = lfutil.findfile(rsrc, hash)
        if not source:
            raise util.Abort(_('largefile %s missing from store'
                               ' (needs to be uploaded)') % hash)
        # XXX check for errors here
        store.put(source, hash)
        at += 1
    ui.progress(_('uploading largefiles'), None)
Example #27
0
def lfpull(ui, repo, source="default", **opts):
    """pull largefiles for the specified revisions from the specified source

    Pull largefiles that are referenced from local changesets but missing
    locally, pulling from a remote repository to the local cache.

    If SOURCE is omitted, the 'default' path will be used.
    See :hg:`help urls` for more information.

    .. container:: verbose

      Some examples:

      - pull largefiles for all branch heads::

          hg lfpull -r "head() and not closed()"

      - pull largefiles on the default branch::

          hg lfpull -r "branch(default)"
    """
    repo.lfpullsource = source

    revs = opts.get('rev', [])
    if not revs:
        raise util.Abort(_('no revisions specified'))
    revs = scmutil.revrange(repo, revs)

    numcached = 0
    for rev in revs:
        ui.note(_('pulling largefiles for revision %s\n') % rev)
        (cached, missing) = cachelfiles(ui, repo, rev)
        numcached += len(cached)
    ui.status(_("%d largefiles cached\n") % numcached)
Example #28
0
    def _verifyfiles(self, contents, filestocheck):
        failed = False
        expectedhashes = [expectedhash
                          for cset, filename, expectedhash in filestocheck]
        localhashes = self._hashesavailablelocally(expectedhashes)
        stats = self._stat([expectedhash for expectedhash in expectedhashes
                            if expectedhash not in localhashes])

        for cset, filename, expectedhash in filestocheck:
            if expectedhash in localhashes:
                filetocheck = (cset, filename, expectedhash)
                verifyresult = self._lstore._verifyfiles(contents,
                                                         [filetocheck])
                if verifyresult:
                    failed = True
            else:
                stat = stats[expectedhash]
                if stat:
                    if stat == 1:
                        self.ui.warn(
                            _('changeset %s: %s: contents differ\n')
                            % (cset, filename))
                        failed = True
                    elif stat == 2:
                        self.ui.warn(
                            _('changeset %s: %s missing\n')
                            % (cset, filename))
                        failed = True
                    else:
                        raise RuntimeError('verify failed: unexpected response '
                                           'from statlfile (%r)' % stat)
        return failed
Example #29
0
def abort(repo, originalwd, target, state):
    'Restore the repository to its original state'
    dstates = [s for s in state.values() if s > nullrev]
    immutable = [d for d in dstates if not repo[d].mutable()]
    cleanup = True
    if immutable:
        repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
                     % ', '.join(str(repo[r]) for r in immutable),
                     hint=_('see hg help phases for details'))
        cleanup = False

    descendants = set()
    if dstates:
        descendants = set(repo.changelog.descendants(dstates))
    if descendants - set(dstates):
        repo.ui.warn(_("warning: new changesets detected on target branch, "
                       "can't strip\n"))
        cleanup = False

    if cleanup:
        # Update away from the rebase if necessary
        if inrebase(repo, originalwd, state):
            merge.update(repo, repo[originalwd].rev(), False, True, False)

        # Strip from the first rebased revision
        rebased = filter(lambda x: x > -1 and x != target, state.values())
        if rebased:
            strippoints = [c.node()  for c in repo.set('roots(%ld)', rebased)]
            # no backup of rebased cset versions needed
            repair.strip(repo.ui, repo, strippoints)

    clearstatus(repo)
    repo.ui.warn(_('rebase aborted\n'))
    return 0
Example #30
0
    def __init__(self, ui, path, revs=None):
        super(gnuarch_source, self).__init__(ui, path, revs=revs)

        if not os.path.exists(os.path.join(path, '{arch}')):
            raise common.NoRepo(_("%s does not look like a GNU Arch repository")
                         % path)

        # Could use checktool, but we want to check for baz or tla.
        self.execmd = None
        if util.findexe('baz'):
            self.execmd = 'baz'
        else:
            if util.findexe('tla'):
                self.execmd = 'tla'
            else:
                raise error.Abort(_('cannot find a GNU Arch tool'))

        common.commandline.__init__(self, ui, self.execmd)

        self.path = os.path.realpath(path)
        self.tmppath = None

        self.treeversion = None
        self.lastrev = None
        self.changes = {}
        self.parents = {}
        self.tags = {}
        self.catlogparser = email.Parser.Parser()
        self.encoding = encoding.encoding
        self.archives = []
Example #31
0
def dorecord(ui, repo, commitfunc, *pats, **opts):
    try:
        if not ui.interactive():
            raise util.Abort(
                _('running non-interactively, use commit instead'))
    except TypeError:  # backwards compatibility with hg 1.1
        if not ui.interactive:
            raise util.Abort(
                _('running non-interactively, use commit instead'))

    def recordfunc(ui, repo, message, match, opts):
        """This is generic record driver.

        Its job is to interactively filter local changes, and accordingly
        prepare working dir into a state, where the job can be delegated to
        non-interactive commit command such as 'commit' or 'qrefresh'.

        After the actual job is done by non-interactive command, working dir
        state is restored to original.

        In the end we'll record interesting changes, and everything else will be
        left in place, so the user can continue his work.
        """

        merge = len(repo[None].parents()) > 1
        if merge:
            raise util.Abort(
                _('cannot partially commit a merge '
                  '(use hg commit instead)'))

        changes = repo.status(match=match)[:3]
        diffopts = mdiff.diffopts(git=True, nodates=True)
        chunks = patch.diff(repo, changes=changes, opts=diffopts)
        fp = cStringIO.StringIO()
        fp.write(''.join(chunks))
        fp.seek(0)

        # 1. filter patch, so we have intending-to apply subset of it
        chunks = crpatch.filterpatch(opts, crpatch.parsepatch(changes, fp),
                                     chunk_selector.chunkselector, ui)
        del fp

        contenders = set()
        for h in chunks:
            try:
                contenders.update(set(h.files()))
            except AttributeError:
                pass

        changed = changes[0] + changes[1] + changes[2]
        newfiles = [f for f in changed if f in contenders]

        if not newfiles:
            ui.status(_('no changes to record\n'))
            return 0

        modified = set(changes[0])

        # 2. backup changed files, so we can restore them in the end
        backups = {}
        backupdir = repo.join('record-backups')
        try:
            os.mkdir(backupdir)
        except OSError, err:
            if err.errno != errno.EEXIST:
                raise
        try:
            # backup continues
            for f in newfiles:
                if f not in modified:
                    continue
                fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_') +
                                               '.',
                                               dir=backupdir)
                os.close(fd)
                ui.debug('backup %r as %r\n' % (f, tmpname))
                util.copyfile(repo.wjoin(f), tmpname)
                backups[f] = tmpname

            fp = cStringIO.StringIO()
            for c in chunks:
                if c.filename() in backups:
                    c.write(fp)
            dopatch = fp.tell()
            fp.seek(0)

            # 2.5 optionally review / modify patch in text editor
            if opts['crecord_reviewpatch']:
                patchtext = fp.read()
                reviewedpatch = ui.edit(patchtext, "")
                fp.truncate(0)
                fp.write(reviewedpatch)
                fp.seek(0)

            # 3a. apply filtered patch to clean repo  (clean)
            if backups:
                hg.revert(repo,
                          repo.dirstate.parents()[0],
                          lambda key: key in backups)

            # 3b. (apply)
            if dopatch:
                try:
                    ui.debug('applying patch\n')
                    ui.debug(fp.getvalue())
                    if hasattr(patch, 'workingbackend'):  # detect 1.9
                        patch.internalpatch(ui,
                                            repo,
                                            fp,
                                            strip=1,
                                            eolmode=None)
                    else:
                        pfiles = {}
                        try:
                            patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                        except (TypeError, AttributeError):  # pre 17cea10c343e
                            try:
                                patch.internalpatch(ui,
                                                    repo,
                                                    fp,
                                                    1,
                                                    repo.root,
                                                    eolmode=None)
                            except (TypeError,
                                    AttributeError):  # pre 00a881581400
                                try:
                                    patch.internalpatch(fp,
                                                        ui,
                                                        1,
                                                        repo.root,
                                                        files=pfiles,
                                                        eolmode=None)
                                except TypeError:  # backwards compatible with hg 1.1
                                    patch.internalpatch(fp,
                                                        ui,
                                                        1,
                                                        repo.root,
                                                        files=pfiles)
                        try:
                            cmdutil.updatedir(ui, repo, pfiles)
                        except AttributeError:
                            try:
                                patch.updatedir(ui, repo, pfiles)
                            except AttributeError:
                                # from 00a881581400 onwards
                                pass
                except patch.PatchError, err:
                    s = str(err)
                    if s:
                        raise util.Abort(s)
                    else:
                        raise util.Abort(_('patch failed to apply'))
            del fp

            # 4. We prepared working directory according to filtered patch.
            #    Now is the time to delegate the job to commit/qrefresh or the like!

            # it is important to first chdir to repo root -- we'll call a
            # highlevel command with list of pathnames relative to repo root
            cwd = os.getcwd()
            os.chdir(repo.root)
            try:
                commitfunc(ui, repo, *newfiles, **opts)
            finally:
                os.chdir(cwd)

            return 0
Example #32
0
def demo(ui, repo, *args, **opts):
    """print [keywordmaps] configuration and an expansion example

    Show current, custom, or default keyword template maps and their
    expansions.

    Extend the current configuration by specifying maps as arguments
    and using -f/--rcfile to source an external hgrc file.

    Use -d/--default to disable current configuration.

    See :hg:`help templates` for information on templates and filters.
    """

    def demoitems(section, items):
        ui.write(b'[%s]\n' % section)
        for k, v in sorted(items):
            if isinstance(v, bool):
                v = stringutil.pprint(v)
            ui.write(b'%s = %s\n' % (k, v))

    fn = b'demo.txt'
    tmpdir = pycompat.mkdtemp(b'', b'kwdemo.')
    ui.note(_(b'creating temporary repository at %s\n') % tmpdir)
    if repo is None:
        baseui = ui
    else:
        baseui = repo.baseui
    repo = localrepo.instance(baseui, tmpdir, create=True)
    ui.setconfig(b'keyword', fn, b'', b'keyword')
    svn = ui.configbool(b'keywordset', b'svn')
    # explicitly set keywordset for demo output
    ui.setconfig(b'keywordset', b'svn', svn, b'keyword')

    uikwmaps = ui.configitems(b'keywordmaps')
    if args or opts.get('rcfile'):
        ui.status(_(b'\n\tconfiguration using custom keyword template maps\n'))
        if uikwmaps:
            ui.status(_(b'\textending current template maps\n'))
        if opts.get('default') or not uikwmaps:
            if svn:
                ui.status(_(b'\toverriding default svn keywordset\n'))
            else:
                ui.status(_(b'\toverriding default cvs keywordset\n'))
        if opts.get('rcfile'):
            ui.readconfig(opts.get(b'rcfile'))
        if args:
            # simulate hgrc parsing
            rcmaps = b'[keywordmaps]\n%s\n' % b'\n'.join(args)
            repo.vfs.write(b'hgrc', rcmaps)
            ui.readconfig(repo.vfs.join(b'hgrc'))
        kwmaps = dict(ui.configitems(b'keywordmaps'))
    elif opts.get('default'):
        if svn:
            ui.status(_(b'\n\tconfiguration using default svn keywordset\n'))
        else:
            ui.status(_(b'\n\tconfiguration using default cvs keywordset\n'))
        kwmaps = _defaultkwmaps(ui)
        if uikwmaps:
            ui.status(_(b'\tdisabling current template maps\n'))
            for k, v in pycompat.iteritems(kwmaps):
                ui.setconfig(b'keywordmaps', k, v, b'keyword')
    else:
        ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
        if uikwmaps:
            kwmaps = dict(uikwmaps)
        else:
            kwmaps = _defaultkwmaps(ui)

    uisetup(ui)
    reposetup(ui, repo)
    ui.writenoi18n(b'[extensions]\nkeyword =\n')
    demoitems(b'keyword', ui.configitems(b'keyword'))
    demoitems(b'keywordset', ui.configitems(b'keywordset'))
    demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
    keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
    repo.wvfs.write(fn, keywords)
    repo[None].add([fn])
    ui.note(_(b'\nkeywords written to %s:\n') % fn)
    ui.note(keywords)
    with repo.wlock():
        repo.dirstate.setbranch(b'demobranch')
    for name, cmd in ui.configitems(b'hooks'):
        if name.split(b'.', 1)[0].find(b'commit') > -1:
            repo.ui.setconfig(b'hooks', name, b'', b'keyword')
    msg = _(b'hg keyword configuration and expansion example')
    ui.note((b"hg ci -m '%s'\n" % msg))
    repo.commit(text=msg)
    ui.status(_(b'\n\tkeywords expanded\n'))
    ui.write(repo.wread(fn))
    repo.wvfs.rmtree(repo.root)
Example #33
0
    '''Selects files and passes them to kwtemplater.overwrite.'''
    wctx = repo[None]
    if len(wctx.parents()) > 1:
        raise error.Abort(_(b'outstanding uncommitted merge'))
    kwt = getattr(repo, '_keywordkwt', None)
    with repo.wlock():
        status = _status(ui, repo, wctx, kwt, *pats, **opts)
        if status.modified or status.added or status.removed or status.deleted:
            raise error.Abort(_(b'outstanding uncommitted changes'))
        kwt.overwrite(wctx, status.clean, True, expand)


@command(
    b'kwdemo',
    [
        (b'd', b'default', None, _(b'show default keyword template maps')),
        (b'f', b'rcfile', b'', _(b'read maps from rcfile'), _(b'FILE')),
    ],
    _(b'hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
    optionalrepo=True,
)
def demo(ui, repo, *args, **opts):
    """print [keywordmaps] configuration and an expansion example

    Show current, custom, or default keyword template maps and their
    expansions.

    Extend the current configuration by specifying maps as arguments
    and using -f/--rcfile to source an external hgrc file.

    Use -d/--default to disable current configuration.
Example #34
0
def diffsummary(statgen):
    '''Return a short summary of the diff.'''

    stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next()
    return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
        len(stats), addtotal, removetotal)
Example #35
0
def showstack(ui, repo, displayer):
    """current line of work"""
    wdirctx = repo['.']
    if wdirctx.rev() == nullrev:
        raise error.Abort(
            _('stack view only available when there is a '
              'working directory'))

    if wdirctx.phase() == phases.public:
        ui.write(
            _('(empty stack; working directory parent is a published '
              'changeset)\n'))
        return

    # TODO extract "find stack" into a function to facilitate
    # customization and reuse.

    baserev = destutil.stackbase(ui, repo)
    basectx = None

    if baserev is None:
        baserev = wdirctx.rev()
        stackrevs = {wdirctx.rev()}
    else:
        stackrevs = set(repo.revs('%d::.', baserev))

    ctx = repo[baserev]
    if ctx.p1().rev() != nullrev:
        basectx = ctx.p1()

    # And relevant descendants.
    branchpointattip = False
    cl = repo.changelog

    for rev in cl.descendants([wdirctx.rev()]):
        ctx = repo[rev]

        # Will only happen if . is public.
        if ctx.phase() == phases.public:
            break

        stackrevs.add(ctx.rev())

        # ctx.children() within a function iterating on descandants
        # potentially has severe performance concerns because revlog.children()
        # iterates over all revisions after ctx's node. However, the number of
        # draft changesets should be a reasonably small number. So even if
        # this is quadratic, the perf impact should be minimal.
        if len(ctx.children()) > 1:
            branchpointattip = True
            break

    stackrevs = list(sorted(stackrevs, reverse=True))

    # Find likely target heads for the current stack. These are likely
    # merge or rebase targets.
    if basectx:
        # TODO make this customizable?
        newheads = set(
            repo.revs('heads(%d::) - %ld - not public()', basectx.rev(),
                      stackrevs))
    else:
        newheads = set()

    allrevs = set(stackrevs) | newheads | {baserev}
    nodelen = longestshortest(repo, allrevs)

    try:
        cmdutil.findcmd('rebase', commands.table)
        haverebase = True
    except (error.AmbiguousCommand, error.UnknownCommand):
        haverebase = False

    # TODO use templating.
    # TODO consider using graphmod. But it may not be necessary given
    # our simplicity and the customizations required.
    # TODO use proper graph symbols from graphmod

    tres = formatter.templateresources(ui, repo)
    shortesttmpl = formatter.maketemplater(ui,
                                           '{shortest(node, %d)}' % nodelen,
                                           resources=tres)

    def shortest(ctx):
        return shortesttmpl.renderdefault({'ctx': ctx, 'node': ctx.hex()})

    # We write out new heads to aid in DAG awareness and to help with decision
    # making on how the stack should be reconciled with commits made since the
    # branch point.
    if newheads:
        # Calculate distance from base so we can render the count and so we can
        # sort display order by commit distance.
        revdistance = {}
        for head in newheads:
            # There is some redundancy in DAG traversal here and therefore
            # room to optimize.
            ancestors = cl.ancestors([head], stoprev=basectx.rev())
            revdistance[head] = len(list(ancestors))

        sourcectx = repo[stackrevs[-1]]

        sortedheads = sorted(newheads,
                             key=lambda x: revdistance[x],
                             reverse=True)

        for i, rev in enumerate(sortedheads):
            ctx = repo[rev]

            if i:
                ui.write(': ')
            else:
                ui.write('  ')

            ui.write(('o  '))
            displayer.show(ctx, nodelen=nodelen)
            displayer.flush(ctx)
            ui.write('\n')

            if i:
                ui.write(':/')
            else:
                ui.write(' /')

            ui.write('    (')
            ui.write(_('%d commits ahead') % revdistance[rev],
                     label='stack.commitdistance')

            if haverebase:
                # TODO may be able to omit --source in some scenarios
                ui.write('; ')
                ui.write(('hg rebase --source %s --dest %s' %
                          (shortest(sourcectx), shortest(ctx))),
                         label='stack.rebasehint')

            ui.write(')\n')

        ui.write(':\n:    ')
        ui.write(_('(stack head)\n'), label='stack.label')

    if branchpointattip:
        ui.write(' \\ /  ')
        ui.write(_('(multiple children)\n'), label='stack.label')
        ui.write('  |\n')

    for rev in stackrevs:
        ctx = repo[rev]
        symbol = '@' if rev == wdirctx.rev() else 'o'

        if newheads:
            ui.write(': ')
        else:
            ui.write('  ')

        ui.write(symbol, '  ')
        displayer.show(ctx, nodelen=nodelen)
        displayer.flush(ctx)
        ui.write('\n')

    # TODO display histedit hint?

    if basectx:
        # Vertically and horizontally separate stack base from parent
        # to reinforce stack boundary.
        if newheads:
            ui.write(':/   ')
        else:
            ui.write(' /   ')

        ui.write(_('(stack base)'), '\n', label='stack.label')
        ui.write(('o  '))

        displayer.show(basectx, nodelen=nodelen)
        displayer.flush(basectx)
        ui.write('\n')
Example #36
0
def show(ui, repo, view=None, template=None):
    """show various repository information

    A requested view of repository data is displayed.

    If no view is requested, the list of available views is shown and the
    command aborts.

    .. note::

       There are no backwards compatibility guarantees for the output of this
       command. Output may change in any future Mercurial release.

       Consumers wanting stable command output should specify a template via
       ``-T/--template``.

    List of available views:
    """
    if ui.plain() and not template:
        hint = _('invoke with -T/--template to control output format')
        raise error.Abort(_('must specify a template in plain mode'),
                          hint=hint)

    views = showview._table

    if not view:
        ui.pager('show')
        # TODO consider using formatter here so available views can be
        # rendered to custom format.
        ui.write(_('available views:\n'))
        ui.write('\n')

        for name, func in sorted(views.items()):
            ui.write(('%s\n') % pycompat.sysbytes(func.__doc__))

        ui.write('\n')
        raise error.Abort(_('no view requested'),
                          hint=_('use "hg show VIEW" to choose a view'))

    # TODO use same logic as dispatch to perform prefix matching.
    if view not in views:
        raise error.Abort(_('unknown view: %s') % view,
                          hint=_('run "hg show" to see available views'))

    template = template or 'show'

    fn = views[view]
    ui.pager('show')

    if fn._fmtopic:
        fmtopic = 'show%s' % fn._fmtopic
        with ui.formatter(fmtopic, {'template': template}) as fm:
            return fn(ui, repo, fm)
    elif fn._csettopic:
        ref = 'show%s' % fn._csettopic
        spec = formatter.lookuptemplate(ui, ref, template)
        displayer = logcmdutil.changesettemplater(ui,
                                                  repo,
                                                  spec,
                                                  buffered=True)
        return fn(ui, repo, displayer)
    else:
        return fn(ui, repo)
Example #37
0
        func._fmtopic = fmtopic
        func._csettopic = csettopic


showview = showcmdfunc()


@command(
    'show',
    [
        # TODO: Switch this template flag to use cmdutil.formatteropts if
        # 'hg show' becomes stable before --template/-T is stable. For now,
        # we are putting it here without the '(EXPERIMENTAL)' flag because it
        # is an important part of the 'hg show' user experience and the entire
        # 'hg show' experience is experimental.
        ('T', 'template', '', ('display with template'), _('TEMPLATE')),
    ],
    _('VIEW'),
    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
def show(ui, repo, view=None, template=None):
    """show various repository information

    A requested view of repository data is displayed.

    If no view is requested, the list of available views is shown and the
    command aborts.

    .. note::

       There are no backwards compatibility guarantees for the output of this
       command. Output may change in any future Mercurial release.
Example #38
0
        for copy_fn, working_fn, mtime in fns_and_mtime:
            if os.lstat(copy_fn).st_mtime != mtime:
                ui.debug('file changed while diffing. '
                         'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
                util.copyfile(copy_fn, working_fn)

        return 1
    finally:
        ui.note(_('cleaning up temp directory\n'))
        shutil.rmtree(tmproot)


@command(
    'extdiff',
    [
        ('p', 'program', '', _('comparison program to run'), _('CMD')),
        ('o', 'option', [], _('pass option to comparison program'), _('OPT')),
        ('r', 'rev', [], _('revision'), _('REV')),
        ('c', 'change', '', _('change made by revision'), _('REV')),
    ] + commands.walkopts,
    _('hg extdiff [OPT]... [FILE]...'),
    inferrepo=True)
def extdiff(ui, repo, *pats, **opts):
    '''use external program to diff repository (or selected files)

    Show differences between revisions for the specified files, using
    an external program. The default program used is diff, with
    default options "-Npru".

    To select a different program, use the -p/--program option. The
    program will be passed the names of two directories to compare. To
Example #39
0
    Ideally we'd patch ``ui.expandpath()``. However, It isn't easy to tell
    from that API whether we should be giving out HTTP or SSH URLs.
    This was proposed and rejected as a core feature to Mercurial.
    http://www.selenic.com/pipermail/mercurial-devel/2014-September/062052.html
    """
    if isfirefoxrepo(repo):
        tree, uri = resolve_trees_to_uris([dest], write_access=True)[0]
        if uri:
            dest = uri

    return orig(ui, repo, dest=dest, **opts)


@command('fxheads', [
    ('T', 'template', shorttemplate, _('display with template'),
     _('TEMPLATE')),
], _('show Firefox tree heads'))
def fxheads(ui, repo, **opts):
    """Show last known head commits for pulled Firefox trees.

    The displayed list may be out of date. Pull before running to ensure
    data is current.
    """
    if not isfirefoxrepo(repo):
        raise util.Abort(_('fxheads is only available on Firefox repos'))

    displayer = cmdutil.show_changeset(ui, repo, opts)
    seen = set()
    for tag, node, tree, uri in get_firefoxtrees(repo):
        if node in seen:
Example #40
0
def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
    '''Do the actual diff:

    - copy to a temp structure if diffing 2 internal revisions
    - copy to a temp structure if diffing working revision with
      another one and more than 1 file is changed
    - just invoke the diff for a single file in the working dir
    '''

    revs = opts.get('rev')
    change = opts.get('change')
    args = ' '.join(map(util.shellquote, diffopts))
    do3way = '$parent2' in args

    if revs and change:
        msg = _('cannot specify --rev and --change at the same time')
        raise util.Abort(msg)
    elif change:
        node2 = scmutil.revsingle(repo, change, None).node()
        node1a, node1b = repo.changelog.parents(node2)
    else:
        node1a, node2 = scmutil.revpair(repo, revs)
        if not revs:
            node1b = repo.dirstate.p2()
        else:
            node1b = nullid

    # Disable 3-way merge if there is only one parent
    if do3way:
        if node1b == nullid:
            do3way = False

    matcher = scmutil.match(repo[node2], pats, opts)
    mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
    if do3way:
        mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
    else:
        mod_b, add_b, rem_b = set(), set(), set()
    modadd = mod_a | add_a | mod_b | add_b
    common = modadd | rem_a | rem_b
    if not common:
        return 0

    tmproot = tempfile.mkdtemp(prefix='extdiff.')
    try:
        # Always make a copy of node1a (and node1b, if applicable)
        dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
        dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
        rev1a = '@%d' % repo[node1a].rev()
        if do3way:
            dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
            dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
            rev1b = '@%d' % repo[node1b].rev()
        else:
            dir1b = None
            rev1b = ''

        fns_and_mtime = []

        # If node2 in not the wc or there is >1 change, copy it
        dir2root = ''
        rev2 = ''
        if node2:
            dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
            rev2 = '@%d' % repo[node2].rev()
        elif len(common) > 1:
            #we only actually need to get the files to copy back to
            #the working dir in this case (because the other cases
            #are: diffing 2 revisions or single file -- in which case
            #the file is already directly passed to the diff tool).
            dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
        else:
            # This lets the diff tool open the changed file directly
            dir2 = ''
            dir2root = repo.root

        label1a = rev1a
        label1b = rev1b
        label2 = rev2

        # If only one change, diff the files instead of the directories
        # Handle bogus modifies correctly by checking if the files exist
        if len(common) == 1:
            common_file = util.localpath(common.pop())
            dir1a = os.path.join(tmproot, dir1a, common_file)
            label1a = common_file + rev1a
            if not os.path.isfile(dir1a):
                dir1a = os.devnull
            if do3way:
                dir1b = os.path.join(tmproot, dir1b, common_file)
                label1b = common_file + rev1b
                if not os.path.isfile(dir1b):
                    dir1b = os.devnull
            dir2 = os.path.join(dir2root, dir2, common_file)
            label2 = common_file + rev2

        # Function to quote file/dir names in the argument string.
        # When not operating in 3-way mode, an empty string is
        # returned for parent2
        replace = {
            'parent': dir1a,
            'parent1': dir1a,
            'parent2': dir1b,
            'plabel1': label1a,
            'plabel2': label1b,
            'clabel': label2,
            'child': dir2,
            'root': repo.root
        }

        def quote(match):
            key = match.group()[1:]
            if not do3way and key == 'parent2':
                return ''
            return util.shellquote(replace[key])

        # Match parent2 first, so 'parent1?' will match both parent1 and parent
        regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
        if not do3way and not re.search(regex, args):
            args += ' $parent1 $child'
        args = re.sub(regex, quote, args)
        cmdline = util.shellquote(diffcmd) + ' ' + args

        ui.debug('running %r in %s\n' % (cmdline, tmproot))
        util.system(cmdline, cwd=tmproot, out=ui.fout)

        for copy_fn, working_fn, mtime in fns_and_mtime:
            if os.lstat(copy_fn).st_mtime != mtime:
                ui.debug('file changed while diffing. '
                         'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
                util.copyfile(copy_fn, working_fn)

        return 1
    finally:
        ui.note(_('cleaning up temp directory\n'))
        shutil.rmtree(tmproot)
Example #41
0
    def _connect(self):
        root = self.cvsroot
        conntype = None
        user, host = None, None
        cmd = ['cvs', 'server']

        self.ui.status(_("connecting to %s\n") % root)

        if root.startswith(":pserver:"):
            root = root[9:]
            m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
                         root)
            if m:
                conntype = "pserver"
                user, passw, serv, port, root = m.groups()
                if not user:
                    user = "******"
                if not port:
                    port = 2401
                else:
                    port = int(port)
                format0 = ":pserver:%s@%s:%s" % (user, serv, root)
                format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)

                if not passw:
                    passw = "A"
                    cvspass = os.path.expanduser("~/.cvspass")
                    try:
                        pf = open(cvspass)
                        for line in pf.read().splitlines():
                            part1, part2 = line.split(' ', 1)
                            # /1 :pserver:[email protected]:2401/cvsroot/foo
                            # Ah<Z
                            if part1 == '/1':
                                part1, part2 = part2.split(' ', 1)
                                format = format1
                            # :pserver:[email protected]:/cvsroot/foo Ah<Z
                            else:
                                format = format0
                            if part1 == format:
                                passw = part2
                                break
                        pf.close()
                    except IOError as inst:
                        if inst.errno != errno.ENOENT:
                            if not getattr(inst, 'filename', None):
                                inst.filename = cvspass
                            raise

                sck = socket.socket()
                sck.connect((serv, port))
                sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
                                    "END AUTH REQUEST", ""]))
                if sck.recv(128) != "I LOVE YOU\n":
                    raise error.Abort(_("CVS pserver authentication failed"))

                self.writep = self.readp = sck.makefile('r+')

        if not conntype and root.startswith(":local:"):
            conntype = "local"
            root = root[7:]

        if not conntype:
            # :ext:user@host/home/user/path/to/cvsroot
            if root.startswith(":ext:"):
                root = root[5:]
            m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
            # Do not take Windows path "c:\foo\bar" for a connection strings
            if os.path.isdir(root) or not m:
                conntype = "local"
            else:
                conntype = "rsh"
                user, host, root = m.group(1), m.group(2), m.group(3)

        if conntype != "pserver":
            if conntype == "rsh":
                rsh = encoding.environ.get("CVS_RSH") or "ssh"
                if user:
                    cmd = [rsh, '-l', user, host] + cmd
                else:
                    cmd = [rsh, host] + cmd

            # popen2 does not support argument lists under Windows
            cmd = [util.shellquote(arg) for arg in cmd]
            cmd = util.quotecommand(' '.join(cmd))
            self.writep, self.readp = util.popen2(cmd)

        self.realroot = root

        self.writep.write("Root %s\n" % root)
        self.writep.write("Valid-responses ok error Valid-requests Mode"
                          " M Mbinary E Checked-in Created Updated"
                          " Merged Removed\n")
        self.writep.write("valid-requests\n")
        self.writep.flush()
        r = self.readp.readline()
        if not r.startswith("Valid-requests"):
            raise error.Abort(_('unexpected response from CVS server '
                               '(expected "Valid-requests", but got %r)')
                             % r)
        if "UseUnchanged" in r:
            self.writep.write("UseUnchanged\n")
            self.writep.flush()
            r = self.readp.readline()
Example #42
0
def wrappedpullobsolete(orig, pullop):
    res = orig(pullop)

    repo = pullop.repo
    remote = pullop.remote

    if not isfirefoxrepo(repo):
        return res

    if remote.capable('firefoxtrees'):
        bmstore = bookmarks.bmstore(repo)
        # remote.local() returns a localrepository or None. If local,
        # just pass it into the wire protocol command/function to simulate
        # the remote command call.
        if remote.local():
            lines = firefoxtrees(remote.local(), None).splitlines()
        else:
            lines = remote._call('firefoxtrees').splitlines()
        oldtags = {}
        for tag, node, tree, uri in get_firefoxtrees(repo):
            oldtags[tag] = node
        newtags = {}
        for line in lines:
            tag, node = line.split()
            newtags[tag] = node

            node = bin(node)

            # A local bookmark of the incoming tag name is already set.
            # Wipe it out - the server takes precedence.
            if tag in bmstore:
                oldtags[tag] = bmstore[tag]
                repo.ui.status(
                    '(removing bookmark on %s matching firefoxtree %s)\n' %
                    (short(bmstore[tag]), tag))
                del bmstore[tag]
                bmstore.recordchange(pullop.trmanager.transaction())

                if bmstore.active == tag:
                    repo.ui.status('(deactivating bookmark %s)\n' % tag)
                    bookmarks.deactivate(repo)

            if oldtags.get(tag, None) == node:
                continue

            repo.firefoxtrees[tag] = node

            between = None
            if tag in oldtags:
                between = len(repo.revs('%n::%n', oldtags[tag], node)) - 1

                if not between:
                    continue

            msg = _('updated firefox tree tag %s') % tag
            if between:
                msg += _(' (+%d commits)') % between
            msg += '\n'
            repo.ui.status(msg)

        writefirefoxtrees(repo)

    tree = resolve_uri_to_tree(remote.url())
    if tree:
        tree = tree.encode('utf-8')
        updateremoterefs(repo, remote, tree)

    return res
Example #43
0
    def censorrevision(self, tr, censornode, tombstone=b''):
        tombstone = storageutil.packmeta({b'censored': tombstone}, b'')

        # This restriction is cargo culted from revlogs and makes no sense for
        # SQLite, since columns can be resized at will.
        if len(tombstone) > len(self.revision(censornode, raw=True)):
            raise error.Abort(
                _('censor tombstone must be no longer than '
                  'censored data'))

        # We need to replace the censored revision's data with the tombstone.
        # But replacing that data will have implications for delta chains that
        # reference it.
        #
        # While "better," more complex strategies are possible, we do something
        # simple: we find delta chain children of the censored revision and we
        # replace those incremental deltas with fulltexts of their corresponding
        # revision. Then we delete the now-unreferenced delta and original
        # revision and insert a replacement.

        # Find the delta to be censored.
        censoreddeltaid = self._db.execute(
            r'SELECT deltaid FROM fileindex WHERE id=?',
            (self._revisions[censornode].rid, )).fetchone()[0]

        # Find all its delta chain children.
        # TODO once we support storing deltas for !files, we'll need to look
        # for those delta chains too.
        rows = list(
            self._db.execute(
                r'SELECT id, pathid, node FROM fileindex '
                r'WHERE deltabaseid=? OR deltaid=?',
                (censoreddeltaid, censoreddeltaid)))

        for row in rows:
            rid, pathid, node = row

            fulltext = resolvedeltachain(self._db,
                                         pathid,
                                         node, {}, {-1: None},
                                         zstddctx=self._dctx)

            deltahash = hashlib.sha1(fulltext).digest()

            if self._compengine == 'zstd':
                deltablob = self._cctx.compress(fulltext)
                compression = COMPRESSION_ZSTD
            elif self._compengine == 'zlib':
                deltablob = zlib.compress(fulltext)
                compression = COMPRESSION_ZLIB
            elif self._compengine == 'none':
                deltablob = fulltext
                compression = COMPRESSION_NONE
            else:
                raise error.ProgrammingError(
                    'unhandled compression engine: %s' % self._compengine)

            if len(deltablob) >= len(fulltext):
                deltablob = fulltext
                compression = COMPRESSION_NONE

            deltaid = insertdelta(self._db, compression, deltahash, deltablob)

            self._db.execute(
                r'UPDATE fileindex SET deltaid=?, deltabaseid=NULL '
                r'WHERE id=?', (deltaid, rid))

        # Now create the tombstone delta and replace the delta on the censored
        # node.
        deltahash = hashlib.sha1(tombstone).digest()
        tombstonedeltaid = insertdelta(self._db, COMPRESSION_NONE, deltahash,
                                       tombstone)

        flags = self._revisions[censornode].flags
        flags |= FLAG_CENSORED

        self._db.execute(
            r'UPDATE fileindex SET flags=?, deltaid=?, deltabaseid=NULL '
            r'WHERE pathid=? AND node=?',
            (flags, tombstonedeltaid, self._pathid, censornode))

        self._db.execute(r'DELETE FROM delta WHERE id=?', (censoreddeltaid, ))

        self._refreshindex()
        self._revisioncache.clear()
Example #44
0
 def getchanges(self, rev, full):
     if full:
         raise error.Abort(_("convert from cvs does not support --full"))
     self._parse()
     return sorted(self.files[rev].items()), {}, set()
Example #45
0
@command('gclear')
def gclear(ui, repo):
    '''clear out the Git cached data

    Strips all Git-related metadata from the repo, including the mapping
    between Git and Mercurial changesets. This is an irreversible
    destructive operation that may prevent further interaction with
    other clones.
    '''
    repo.ui.status(_("clearing out the git cache data\n"))
    repo.githandler.clear()


@command('gverify',
         [(b'r', b'rev', b'', _(b'revision to verify'), _(b'REV'))],
         _(b'[-r REV]'))
def gverify(ui, repo, **opts):
    '''verify that a Mercurial rev matches the corresponding Git rev

    Given a Mercurial revision that has a corresponding Git revision in the
    map, this attempts to answer whether that revision has the same contents as
    the corresponding Git revision.

    '''
    ctx = scmutil.revsingle(repo, opts.get('rev'), '.')
    return verify.verify(ui, repo, ctx)


@command('git-cleanup')
def git_cleanup(ui, repo):
Example #46
0
    wrapui(ui)


def reposetup(ui, repo):
    # During 'hg pull' a httppeer repo is created to represent the remote repo.
    # It doesn't have a .hg directory to put a blackbox in, so we don't do
    # the blackbox setup for it.
    if not repo.local():
        return

    if util.safehasattr(ui, 'setrepo'):
        ui.setrepo(repo)


@command('^blackbox', [
    ('l', 'limit', 10, _('the number of events to show')),
], _('hg blackbox [OPTION]...'))
def blackbox(ui, repo, *revs, **opts):
    '''view the recent repository events
    '''

    if not os.path.exists(repo.join('blackbox.log')):
        return

    limit = opts.get('limit')
    blackbox = repo.opener('blackbox.log', 'r')
    lines = blackbox.read().split('\n')

    count = 0
    output = []
    for line in reversed(lines):
Example #47
0
    flag = ' '.join(opts.get('flag'))
    if flag:
        flag = ' ' + flag

    subj = desc[0].strip().rstrip('. ')
    if not numbered:
        subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
    else:
        tlen = len(str(total))
        subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
    msg['X-Mercurial-Node'] = node
    return msg, subj, ds

emailopts = [
    ('', 'body', None, _('send patches as inline message text (default)')),
    ('a', 'attach', None, _('send patches as attachments')),
    ('i', 'inline', None, _('send patches as inline attachments')),
    ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
    ('c', 'cc', [], _('email addresses of copy recipients')),
    ('', 'confirm', None, _('ask for confirmation before sending')),
    ('d', 'diffstat', None, _('add diffstat output to messages')),
    ('', 'date', '', _('use the given date as the sending date')),
    ('', 'desc', '', _('use the given file as the series description')),
    ('f', 'from', '', _('email address of sender')),
    ('n', 'test', None, _('print messages that would be sent')),
    ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
    ('', 'reply-to', [], _('email addresses replies should be sent to')),
    ('s', 'subject', '', _('subject of first message (intro or single patch)')),
    ('', 'in-reply-to', '', _('message identifier to reply to')),
    ('', 'flag', [], _('flags to add in subject prefixes')),
Example #48
0
        return

    for key, (b, e) in _terminfo_params.items():
        if not b:
            continue
        if not curses.tigetstr(e):
            # Most terminals don't support dim, invis, etc, so don't be
            # noisy and use ui.debug().
            ui.debug("no terminfo entry for %s\n" % e)
            del _terminfo_params[key]
    if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
        # Only warn about missing terminfo entries if we explicitly asked for
        # terminfo mode.
        if mode == "terminfo":
            ui.warn(
                _("no terminfo entry for setab/setaf: reverting to "
                  "ECMA-48 color\n"))
        _terminfo_params = {}


def _modesetup(ui, opts):
    global _terminfo_params

    coloropt = opts['color']
    auto = coloropt == 'auto'
    always = not auto and util.parsebool(coloropt)
    if not always and not auto:
        return None

    formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())

    mode = ui.config('color', 'mode', 'auto')
Example #49
0
    def apply(self, repo, source, revmap, merges, opts={}):
        '''apply the revisions in revmap one by one in revision order'''
        revs = sorted(revmap)
        p1, p2 = repo.dirstate.parents()
        pulls = []
        diffopts = patch.diffopts(self.ui, opts)
        diffopts.git = True

        lock = wlock = tr = None
        try:
            wlock = repo.wlock()
            lock = repo.lock()
            tr = repo.transaction('transplant')
            for rev in revs:
                node = revmap[rev]
                revstr = '%s:%s' % (rev, short(node))

                if self.applied(repo, node, p1):
                    self.ui.warn(
                        _('skipping already applied revision %s\n') % revstr)
                    continue

                parents = source.changelog.parents(node)
                if not (opts.get('filter') or opts.get('log')):
                    # If the changeset parent is the same as the
                    # wdir's parent, just pull it.
                    if parents[0] == p1:
                        pulls.append(node)
                        p1 = node
                        continue
                    if pulls:
                        if source != repo:
                            repo.pull(source.peer(), heads=pulls)
                        merge.update(repo, pulls[-1], False, False, None)
                        p1, p2 = repo.dirstate.parents()
                        pulls = []

                domerge = False
                if node in merges:
                    # pulling all the merge revs at once would mean we
                    # couldn't transplant after the latest even if
                    # transplants before them fail.
                    domerge = True
                    if not hasnode(repo, node):
                        repo.pull(source, heads=[node])

                skipmerge = False
                if parents[1] != revlog.nullid:
                    if not opts.get('parent'):
                        self.ui.note(
                            _('skipping merge changeset %s:%s\n') %
                            (rev, short(node)))
                        skipmerge = True
                    else:
                        parent = source.lookup(opts['parent'])
                        if parent not in parents:
                            raise util.Abort(
                                _('%s is not a parent of %s') %
                                (short(parent), short(node)))
                else:
                    parent = parents[0]

                if skipmerge:
                    patchfile = None
                else:
                    fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
                    fp = os.fdopen(fd, 'w')
                    gen = patch.diff(source, parent, node, opts=diffopts)
                    for chunk in gen:
                        fp.write(chunk)
                    fp.close()

                del revmap[rev]
                if patchfile or domerge:
                    try:
                        try:
                            n = self.applyone(repo,
                                              node,
                                              source.changelog.read(node),
                                              patchfile,
                                              merge=domerge,
                                              log=opts.get('log'),
                                              filter=opts.get('filter'))
                        except TransplantError:
                            # Do not rollback, it is up to the user to
                            # fix the merge or cancel everything
                            tr.close()
                            raise
                        if n and domerge:
                            self.ui.status(
                                _('%s merged at %s\n') % (revstr, short(n)))
                        elif n:
                            self.ui.status(
                                _('%s transplanted to %s\n') %
                                (short(node), short(n)))
                    finally:
                        if patchfile:
                            os.unlink(patchfile)
            tr.close()
            if pulls:
                repo.pull(source.peer(), heads=pulls)
                merge.update(repo, pulls[-1], False, False, None)
        finally:
            self.saveseries(revmap, merges)
            self.transplants.write()
            if tr:
                tr.release()
            lock.release()
            wlock.release()
Example #50
0
def patchbomb(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by
    :hg:`export`, one per message. The series starts with a "[PATCH 0
    of N]" introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description.

    With the -d/--diffstat option, if the diffstat program is
    installed, the result of running diffstat on the patch is inserted.

    Finally, the patch itself, as generated by :hg:`export`.

    With the -d/--diffstat or -c/--confirm options, you will be presented
    with a final summary of all messages and asked for confirmation before
    the messages are sent.

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created. You can include a patch both as text in the email
    body and as a regular or an inline attachment by combining the
    -a/--attach or -i/--inline with the --body option.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent.

    With -m/--mbox, instead of previewing each patchbomb message in a
    pager or sending the messages directly, it will create a UNIX
    mailbox file with the patch emails. This mailbox file can be
    previewed with any mail user agent which supports UNIX mbox
    files.

    With -n/--test, all steps will run, but mail will not be sent.
    You will be prompted for an email recipient address, a subject and
    an introductory message describing the patches of your patchbomb.
    Then when all is done, patchbomb messages are displayed. If the
    PAGER environment variable is set, your pager will be fired up once
    for each patchbomb message, so you can verify everything is alright.

    In case email sending fails, you will find a backup of your series
    introductory message in ``.hg/last-email.txt``.

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

      hg email -o -m mbox &&    # generate an mbox file...
        mutt -R -f mbox         # ... and view it with mutt
      hg email -o -m mbox &&    # generate an mbox file ...
        formail -s sendmail \\   # ... and use formail to send from the mbox
          -bm -t < mbox         # ... using sendmail

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''

    _charsets = mail._charsets(ui)

    bundle = opts.get('bundle')
    date = opts.get('date')
    mbox = opts.get('mbox')
    outgoing = opts.get('outgoing')
    rev = opts.get('rev')
    # internal option used by pbranches
    patches = opts.get('patches')

    def getoutgoing(dest, revs):
        '''Return the revisions present locally but not in dest'''
        dest = ui.expandpath(dest or 'default-push', dest or 'default')
        dest, branches = hg.parseurl(dest)
        revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
        other = hg.peer(repo, opts, dest)
        ui.status(_('comparing with %s\n') % util.hidepassword(dest))
        common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
        nodes = revs and map(repo.lookup, revs) or revs
        o = repo.changelog.findmissing(common, heads=nodes)
        if not o:
            ui.status(_("no changes found\n"))
            return []
        return [str(repo.changelog.rev(r)) for r in o]

    def getpatches(revs):
        for r in scmutil.revrange(repo, revs):
            output = cStringIO.StringIO()
            cmdutil.export(repo, [r], fp=output,
                         opts=patch.diffopts(ui, opts))
            yield output.getvalue().split('\n')

    def getbundle(dest):
        tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
        tmpfn = os.path.join(tmpdir, 'bundle')
        try:
            commands.bundle(ui, repo, tmpfn, dest, **opts)
            fp = open(tmpfn, 'rb')
            data = fp.read()
            fp.close()
            return data
        finally:
            try:
                os.unlink(tmpfn)
            except:
                pass
            os.rmdir(tmpdir)

    if not (opts.get('test') or mbox):
        # really sending
        mail.validateconfig(ui)

    if not (revs or rev or outgoing or bundle or patches):
        raise util.Abort(_('specify at least one changeset with -r or -o'))

    if outgoing and bundle:
        raise util.Abort(_("--outgoing mode always on with --bundle;"
                           " do not re-specify --outgoing"))

    if outgoing or bundle:
        if len(revs) > 1:
            raise util.Abort(_("too many destinations"))
        dest = revs and revs[0] or None
        revs = []

    if rev:
        if revs:
            raise util.Abort(_('use only one form to specify the revision'))
        revs = rev

    if outgoing:
        revs = getoutgoing(dest, rev)
    if bundle:
        opts['revs'] = revs

    # start
    if date:
        start_time = util.parsedate(date)
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    def getdescription(body, sender):
        if opts.get('desc'):
            body = open(opts.get('desc')).read()
        else:
            ui.write(_('\nWrite the introductory message for the '
                       'patch series.\n\n'))
            body = ui.edit(body, sender)
            # Save series description in case sendmail fails
            msgfile = repo.opener('last-email.txt', 'wb')
            msgfile.write(body)
            msgfile.close()
        return body

    def getpatchmsgs(patches, patchnames=None):
        msgs = []

        ui.write(_('This patch series consists of %d patches.\n\n')
                 % len(patches))

        # build the intro message, or skip it if the user declines
        if introwanted(opts, len(patches)):
            msg = makeintro(patches)
            if msg:
                msgs.append(msg)

        # are we going to send more than one message?
        numbered = len(msgs) + len(patches) > 1

        # now generate the actual patch messages
        name = None
        for i, p in enumerate(patches):
            if patchnames:
                name = patchnames[i]
            msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
                            len(patches), numbered, name)
            msgs.append(msg)

        return msgs

    def makeintro(patches):
        tlen = len(str(len(patches)))

        flag = opts.get('flag') or ''
        if flag:
            flag = ' ' + ' '.join(flag)
        prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)

        subj = (opts.get('subject') or
                prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
        if not subj:
            return None         # skip intro if the user doesn't bother

        subj = prefix + ' ' + subj

        body = ''
        if opts.get('diffstat'):
            # generate a cumulative diffstat of the whole patch series
            diffstat = patch.diffstat(sum(patches, []))
            body = '\n' + diffstat
        else:
            diffstat = None

        body = getdescription(body, sender)
        msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
        msg['Subject'] = mail.headencode(ui, subj, _charsets,
                                         opts.get('test'))
        return (msg, subj, diffstat)

    def getbundlemsgs(bundle):
        subj = (opts.get('subject')
                or prompt(ui, 'Subject:', 'A bundle for your repository'))

        body = getdescription('', sender)
        msg = email.MIMEMultipart.MIMEMultipart()
        if body:
            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
        datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
        datapart.set_payload(bundle)
        bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
        datapart.add_header('Content-Disposition', 'attachment',
                            filename=bundlename)
        email.Encoders.encode_base64(datapart)
        msg.attach(datapart)
        msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
        return [(msg, subj, None)]

    sender = (opts.get('from') or ui.config('email', 'from') or
              ui.config('patchbomb', 'from') or
              prompt(ui, 'From', ui.username()))

    if patches:
        msgs = getpatchmsgs(patches, opts.get('patchnames'))
    elif bundle:
        msgs = getbundlemsgs(getbundle(dest))
    else:
        msgs = getpatchmsgs(list(getpatches(revs)))

    showaddrs = []

    def getaddrs(header, ask=False, default=None):
        configkey = header.lower()
        opt = header.replace('-', '_').lower()
        addrs = opts.get(opt)
        if addrs:
            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))

        # not on the command line: fallback to config and then maybe ask
        addr = (ui.config('email', configkey) or
                ui.config('patchbomb', configkey) or
                '')
        if not addr and ask:
            addr = prompt(ui, header, default=default)
        if addr:
            showaddrs.append('%s: %s' % (header, addr))
            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
        else:
            return default

    to = getaddrs('To', ask=True)
    if not to:
        # we can get here in non-interactive mode
        raise util.Abort(_('no recipient addresses provided'))
    cc = getaddrs('Cc', ask=True, default='') or []
    bcc = getaddrs('Bcc') or []
    replyto = getaddrs('Reply-To')

    if opts.get('diffstat') or opts.get('confirm'):
        ui.write(_('\nFinal summary:\n\n'))
        ui.write('From: %s\n' % sender)
        for addr in showaddrs:
            ui.write('%s\n' % addr)
        for m, subj, ds in msgs:
            ui.write('Subject: %s\n' % subj)
            if ds:
                ui.write(ds)
        ui.write('\n')
        if ui.promptchoice(_('are you sure you want to send (yn)?'),
                           (_('&Yes'), _('&No'))):
            raise util.Abort(_('patchbomb canceled'))

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    first = True

    sender_addr = email.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    for i, (m, subj, ds) in enumerate(msgs):
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if first:
            parent = m['Message-Id']
            first = False

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc']  = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if replyto:
            m['Reply-To'] = ', '.join(replyto)
        if opts.get('test'):
            ui.status(_('Displaying '), subj, ' ...\n')
            ui.flush()
            if 'PAGER' in os.environ and not ui.plain():
                fp = util.popen(os.environ['PAGER'], 'w')
            else:
                fp = ui
            generator = email.Generator.Generator(fp, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                fp.write('\n')
            except IOError, inst:
                if inst.errno != errno.EPIPE:
                    raise
            if fp is not ui:
                fp.close()
        else:
            if not sendmail:
                sendmail = mail.connect(ui, mbox=mbox)
            ui.status(_('Sending '), subj, ' ...\n')
            ui.progress(_('sending'), i, item=subj, total=len(msgs))
            if not mbox:
                # Exim does not remove the Bcc field
                del m['Bcc']
            fp = cStringIO.StringIO()
            generator = email.Generator.Generator(fp, mangle_from_=False)
            generator.flatten(m, 0)
            sendmail(sender_addr, to + bcc + cc, fp.getvalue())
Example #51
0
    def __init__(self, ui, repotype, path, revs=None):
        super(convert_git, self).__init__(ui, repotype, path, revs=revs)
        common.commandline.__init__(self, ui, 'git')

        # Pass an absolute path to git to prevent from ever being interpreted
        # as a URL
        path = os.path.abspath(path)

        if os.path.isdir(path + "/.git"):
            path += "/.git"
        if not os.path.exists(path + "/objects"):
            raise common.NoRepo(
                _("%s does not look like a Git repository") % path)

        # The default value (50) is based on the default for 'git diff'.
        similarity = ui.configint('convert', 'git.similarity')
        if similarity < 0 or similarity > 100:
            raise error.Abort(_('similarity must be between 0 and 100'))
        if similarity > 0:
            self.simopt = ['-C%d%%' % similarity]
            findcopiesharder = ui.configbool('convert', 'git.findcopiesharder')
            if findcopiesharder:
                self.simopt.append('--find-copies-harder')

            renamelimit = ui.configint('convert', 'git.renamelimit')
            self.simopt.append('-l%d' % renamelimit)
        else:
            self.simopt = []

        common.checktool('git', 'git')

        self.path = path
        self.submodules = []

        self.catfilepipe = self.gitpipe('cat-file', '--batch')

        self.copyextrakeys = self.ui.configlist('convert', 'git.extrakeys')
        banned = set(self.copyextrakeys) & bannedextrakeys
        if banned:
            raise error.Abort(
                _('copying of extra key is forbidden: %s') %
                _(', ').join(sorted(banned)))

        committeractions = self.ui.configlist('convert',
                                              'git.committeractions')

        messagedifferent = None
        messagealways = None
        for a in committeractions:
            if a.startswith(('messagedifferent', 'messagealways')):
                k = a
                v = None
                if '=' in a:
                    k, v = a.split('=', 1)

                if k == 'messagedifferent':
                    messagedifferent = v or 'committer:'
                elif k == 'messagealways':
                    messagealways = v or 'committer:'

        if messagedifferent and messagealways:
            raise error.Abort(
                _('committeractions cannot define both '
                  'messagedifferent and messagealways'))

        dropcommitter = 'dropcommitter' in committeractions
        replaceauthor = 'replaceauthor' in committeractions

        if dropcommitter and replaceauthor:
            raise error.Abort(
                _('committeractions cannot define both '
                  'dropcommitter and replaceauthor'))

        if dropcommitter and messagealways:
            raise error.Abort(
                _('committeractions cannot define both '
                  'dropcommitter and messagealways'))

        if not messagedifferent and not messagealways:
            messagedifferent = 'committer:'

        self.committeractions = {
            'dropcommitter': dropcommitter,
            'replaceauthor': replaceauthor,
            'messagedifferent': messagedifferent,
            'messagealways': messagealways,
        }
Example #52
0
def transplant(ui, repo, *revs, **opts):
    '''transplant changesets from another branch

    Selected changesets will be applied on top of the current working
    directory with the log of the original changeset. The changesets
    are copied and will thus appear twice in the history with different
    identities.

    Consider using the graft command if everything is inside the same
    repository - it will use merges and will usually give a better result.
    Use the rebase extension if the changesets are unpublished and you want
    to move them instead of copying them.

    If --log is specified, log messages will have a comment appended
    of the form::

      (transplanted from CHANGESETHASH)

    You can rewrite the changelog message with the --filter option.
    Its argument will be invoked with the current changelog message as
    $1 and the patch as $2.

    --source/-s specifies another repository to use for selecting changesets,
    just as if it temporarily had been pulled.
    If --branch/-b is specified, these revisions will be used as
    heads when deciding which changsets to transplant, just as if only
    these revisions had been pulled.
    If --all/-a is specified, all the revisions up to the heads specified
    with --branch will be transplanted.

    Example:

    - transplant all changes up to REV on top of your current revision::

        hg transplant --branch REV --all

    You can optionally mark selected transplanted changesets as merge
    changesets. You will not be prompted to transplant any ancestors
    of a merged transplant, and you can merge descendants of them
    normally instead of transplanting them.

    Merge changesets may be transplanted directly by specifying the
    proper parent changeset by calling :hg:`transplant --parent`.

    If no merges or revisions are provided, :hg:`transplant` will
    start an interactive changeset browser.

    If a changeset application fails, you can fix the merge by hand
    and then resume where you left off by calling :hg:`transplant
    --continue/-c`.
    '''
    def incwalk(repo, csets, match=util.always):
        for node in csets:
            if match(node):
                yield node

    def transplantwalk(repo, dest, heads, match=util.always):
        '''Yield all nodes that are ancestors of a head but not ancestors
        of dest.
        If no heads are specified, the heads of repo will be used.'''
        if not heads:
            heads = repo.heads()
        ancestors = []
        for head in heads:
            ancestors.append(repo.changelog.ancestor(dest, head))
        for node in repo.changelog.nodesbetween(ancestors, heads)[0]:
            if match(node):
                yield node

    def checkopts(opts, revs):
        if opts.get('continue'):
            if opts.get('branch') or opts.get('all') or opts.get('merge'):
                raise util.Abort(
                    _('--continue is incompatible with '
                      '--branch, --all and --merge'))
            return
        if not (opts.get('source') or revs or opts.get('merge')
                or opts.get('branch')):
            raise util.Abort(
                _('no source URL, branch revision or revision '
                  'list provided'))
        if opts.get('all'):
            if not opts.get('branch'):
                raise util.Abort(_('--all requires a branch revision'))
            if revs:
                raise util.Abort(
                    _('--all is incompatible with a '
                      'revision list'))

    checkopts(opts, revs)

    if not opts.get('log'):
        opts['log'] = ui.config('transplant', 'log')
    if not opts.get('filter'):
        opts['filter'] = ui.config('transplant', 'filter')

    tp = transplanter(ui, repo)
    if opts.get('edit'):
        tp.editor = cmdutil.commitforceeditor

    p1, p2 = repo.dirstate.parents()
    if len(repo) > 0 and p1 == revlog.nullid:
        raise util.Abort(_('no revision checked out'))
    if not opts.get('continue'):
        if p2 != revlog.nullid:
            raise util.Abort(_('outstanding uncommitted merges'))
        m, a, r, d = repo.status()[:4]
        if m or a or r or d:
            raise util.Abort(_('outstanding local changes'))

    sourcerepo = opts.get('source')
    if sourcerepo:
        peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
        heads = map(peer.lookup, opts.get('branch', ()))
        source, csets, cleanupfn = bundlerepo.getremotechanges(ui,
                                                               repo,
                                                               peer,
                                                               onlyheads=heads,
                                                               force=True)
    else:
        source = repo
        heads = map(source.lookup, opts.get('branch', ()))
        cleanupfn = None

    try:
        if opts.get('continue'):
            tp.resume(repo, source, opts)
            return

        tf = tp.transplantfilter(repo, source, p1)
        if opts.get('prune'):
            prune = set(
                source.lookup(r)
                for r in scmutil.revrange(source, opts.get('prune')))
            matchfn = lambda x: tf(x) and x not in prune
        else:
            matchfn = tf
        merges = map(source.lookup, opts.get('merge', ()))
        revmap = {}
        if revs:
            for r in scmutil.revrange(source, revs):
                revmap[int(r)] = source.lookup(r)
        elif opts.get('all') or not merges:
            if source != repo:
                alltransplants = incwalk(source, csets, match=matchfn)
            else:
                alltransplants = transplantwalk(source,
                                                p1,
                                                heads,
                                                match=matchfn)
            if opts.get('all'):
                revs = alltransplants
            else:
                revs, newmerges = browserevs(ui, source, alltransplants, opts)
                merges.extend(newmerges)
            for r in revs:
                revmap[source.changelog.rev(r)] = r
        for r in merges:
            revmap[source.changelog.rev(r)] = r

        tp.apply(repo, source, revmap, merges, opts)
    finally:
        if cleanupfn:
            cleanupfn()
Example #53
0
    def getchanges(self, version, full):
        if full:
            raise error.Abort(_("convert from git does not support --full"))
        self.modecache = {}
        cmd = ['diff-tree', '-z', '--root', '-m', '-r'
               ] + self.simopt + [version]
        output, status = self.gitrun(*cmd)
        if status:
            raise error.Abort(_('cannot read changes in %s') % version)
        changes = []
        copies = {}
        seen = set()
        entry = None
        subexists = [False]
        subdeleted = [False]
        difftree = output.split('\x00')
        lcount = len(difftree)
        i = 0

        skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules')

        def add(entry, f, isdest):
            seen.add(f)
            h = entry[3]
            p = (entry[1] == "100755")
            s = (entry[1] == "120000")
            renamesource = (not isdest and entry[4][0] == 'R')

            if f == '.gitmodules':
                if skipsubmodules:
                    return

                subexists[0] = True
                if entry[4] == 'D' or renamesource:
                    subdeleted[0] = True
                    changes.append(('.hgsub', nodemod.nullhex))
                else:
                    changes.append(('.hgsub', ''))
            elif entry[1] == '160000' or entry[0] == ':160000':
                if not skipsubmodules:
                    subexists[0] = True
            else:
                if renamesource:
                    h = nodemod.nullhex
                self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
                changes.append((f, h))

        while i < lcount:
            l = difftree[i]
            i += 1
            if not entry:
                if not l.startswith(':'):
                    continue
                entry = tuple(pycompat.bytestr(p) for p in l.split())
                continue
            f = l
            if entry[4][0] == 'C':
                copysrc = f
                copydest = difftree[i]
                i += 1
                f = copydest
                copies[copydest] = copysrc
            if f not in seen:
                add(entry, f, False)
            # A file can be copied multiple times, or modified and copied
            # simultaneously. So f can be repeated even if fdest isn't.
            if entry[4][0] == 'R':
                # rename: next line is the destination
                fdest = difftree[i]
                i += 1
                if fdest not in seen:
                    add(entry, fdest, True)
                    # .gitmodules isn't imported at all, so it being copied to
                    # and fro doesn't really make sense
                    if f != '.gitmodules' and fdest != '.gitmodules':
                        copies[fdest] = f
            entry = None

        if subexists[0]:
            if subdeleted[0]:
                changes.append(('.hgsubstate', nodemod.nullhex))
            else:
                self.retrievegitmodules(version)
                changes.append(('.hgsubstate', ''))
        return (changes, copies, set())
Example #54
0
            files = None
        if merge:
            p1, p2 = repo.dirstate.parents()
            repo.setparents(p1, node)
            m = match.always(repo.root, '')
        else:
            m = match.exact(repo.root, '', files)

        n = repo.commit(message,
                        user,
                        date,
                        extra=extra,
                        match=m,
                        editor=self.editor)
        if not n:
            self.ui.warn(_('skipping emptied changeset %s\n') % short(node))
            return None
        if not merge:
            self.transplants.set(n, node)

        return n

    def resume(self, repo, source, opts):
        '''recover last transaction and apply remaining changesets'''
        if os.path.exists(os.path.join(self.path, 'journal')):
            n, node = self.recover(repo, source, opts)
            self.ui.status(
                _('%s transplanted as %s\n') % (short(node), short(n)))
        seriespath = os.path.join(self.path, 'series')
        if not os.path.exists(seriespath):
            self.transplants.write()
Example #55
0
 def sendrpc(self, msg):
     srv = xmlrpclib.Server(self.ciaurl)
     res = srv.hub.deliver(msg)
     if res is not True and res != 'queued.':
         raise util.Abort(
             _('%s returned an error: %s') % (self.ciaurl, res))
Example #56
0
 def numcommits(self):
     output, ret = self.gitrunlines('rev-list', '--all')
     if ret:
         raise error.Abort(
             _('cannot retrieve number of commits in %s') % self.path)
     return len(output)
def email(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by
    :hg:`export`, one per message. The series starts with a "[PATCH 0
    of N]" introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description.

    With the -d/--diffstat option, if the diffstat program is
    installed, the result of running diffstat on the patch is inserted.

    Finally, the patch itself, as generated by :hg:`export`.

    With the -d/--diffstat or --confirm options, you will be presented
    with a final summary of all messages and asked for confirmation before
    the messages are sent.

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created. You can include a patch both as text in the email
    body and as a regular or an inline attachment by combining the
    -a/--attach or -i/--inline with the --body option.

    With -B/--bookmark changesets reachable by the given bookmark are
    selected.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent. Use the ``patchbomb.bundletype`` config option to
    control the bundle type as with :hg:`bundle --type`.

    With -m/--mbox, instead of previewing each patchbomb message in a
    pager or sending the messages directly, it will create a UNIX
    mailbox file with the patch emails. This mailbox file can be
    previewed with any mail user agent which supports UNIX mbox
    files.

    With -n/--test, all steps will run, but mail will not be sent.
    You will be prompted for an email recipient address, a subject and
    an introductory message describing the patches of your patchbomb.
    Then when all is done, patchbomb messages are displayed.

    In case email sending fails, you will find a backup of your series
    introductory message in ``.hg/last-email.txt``.

    The default behavior of this command can be customized through
    configuration. (See :hg:`help patchbomb` for details)

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -B feature       # send all ancestors of feature bookmark

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

      hg email -o -m mbox &&    # generate an mbox file...
        mutt -R -f mbox         # ... and view it with mutt
      hg email -o -m mbox &&    # generate an mbox file ...
        formail -s sendmail \\   # ... and use formail to send from the mbox
          -bm -t < mbox         # ... using sendmail

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''
    opts = pycompat.byteskwargs(opts)

    _charsets = mail._charsets(ui)

    bundle = opts.get('bundle')
    date = opts.get('date')
    mbox = opts.get('mbox')
    outgoing = opts.get('outgoing')
    rev = opts.get('rev')
    bookmark = opts.get('bookmark')

    if not (opts.get('test') or mbox):
        # really sending
        mail.validateconfig(ui)

    if not (revs or rev or outgoing or bundle or bookmark):
        raise error.Abort(
            _('specify at least one changeset with -B, -r or -o'))

    if outgoing and bundle:
        raise error.Abort(
            _("--outgoing mode always on with --bundle;"
              " do not re-specify --outgoing"))
    if rev and bookmark:
        raise error.Abort(_("-r and -B are mutually exclusive"))

    if outgoing or bundle:
        if len(revs) > 1:
            raise error.Abort(_("too many destinations"))
        if revs:
            dest = revs[0]
        else:
            dest = None
        revs = []

    if rev:
        if revs:
            raise error.Abort(_('use only one form to specify the revision'))
        revs = rev
    elif bookmark:
        if bookmark not in repo._bookmarks:
            raise error.Abort(_("bookmark '%s' not found") % bookmark)
        revs = repair.stripbmrevset(repo, bookmark)

    revs = scmutil.revrange(repo, revs)
    if outgoing:
        revs = _getoutgoing(repo, dest, revs)
    if bundle:
        opts['revs'] = [str(r) for r in revs]

    # check if revision exist on the public destination
    publicurl = repo.ui.config('patchbomb', 'publicurl')
    if publicurl:
        repo.ui.debug('checking that revision exist in the public repo\n')
        try:
            publicpeer = hg.peer(repo, {}, publicurl)
        except error.RepoError:
            repo.ui.write_err(
                _('unable to access public repo: %s\n') % publicurl)
            raise
        if not publicpeer.capable('known'):
            repo.ui.debug('skipping existence checks: public repo too old\n')
        else:
            out = [repo[r] for r in revs]
            known = publicpeer.known(h.node() for h in out)
            missing = []
            for idx, h in enumerate(out):
                if not known[idx]:
                    missing.append(h)
            if missing:
                if 1 < len(missing):
                    msg = _('public "%s" is missing %s and %i others')
                    msg %= (publicurl, missing[0], len(missing) - 1)
                else:
                    msg = _('public url %s is missing %s')
                    msg %= (publicurl, missing[0])
                revhint = ' '.join('-r %s' % h
                                   for h in repo.set('heads(%ld)', missing))
                hint = _("use 'hg push %s %s'") % (publicurl, revhint)
                raise error.Abort(msg, hint=hint)

    # start
    if date:
        start_time = util.parsedate(date)
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    # deprecated config: patchbomb.from
    sender = (opts.get('from') or ui.config('email', 'from')
              or ui.config('patchbomb', 'from')
              or prompt(ui, 'From', ui.username()))

    if bundle:
        stropts = pycompat.strkwargs(opts)
        bundledata = _getbundle(repo, dest, **stropts)
        bundleopts = stropts.copy()
        bundleopts.pop(r'bundle', None)  # already processed
        msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
    else:
        msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts))

    showaddrs = []

    def getaddrs(header, ask=False, default=None):
        configkey = header.lower()
        opt = header.replace('-', '_').lower()
        addrs = opts.get(opt)
        if addrs:
            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))

        # not on the command line: fallback to config and then maybe ask
        addr = (ui.config('email', configkey)
                or ui.config('patchbomb', configkey))
        if not addr:
            specified = (ui.hasconfig('email', configkey)
                         or ui.hasconfig('patchbomb', configkey))
            if not specified and ask:
                addr = prompt(ui, header, default=default)
        if addr:
            showaddrs.append('%s: %s' % (header, addr))
            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
        elif default:
            return mail.addrlistencode(ui, [default], _charsets,
                                       opts.get('test'))
        return []

    to = getaddrs('To', ask=True)
    if not to:
        # we can get here in non-interactive mode
        raise error.Abort(_('no recipient addresses provided'))
    cc = getaddrs('Cc', ask=True, default='')
    bcc = getaddrs('Bcc')
    replyto = getaddrs('Reply-To')

    confirm = ui.configbool('patchbomb', 'confirm')
    confirm |= bool(opts.get('diffstat') or opts.get('confirm'))

    if confirm:
        ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
        ui.write(('From: %s\n' % sender), label='patchbomb.from')
        for addr in showaddrs:
            ui.write('%s\n' % addr, label='patchbomb.to')
        for m, subj, ds in msgs:
            ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
            if ds:
                ui.write(ds, label='patchbomb.diffstats')
        ui.write('\n')
        if ui.promptchoice(
                _('are you sure you want to send (yn)?'
                  '$$ &Yes $$ &No')):
            raise error.Abort(_('patchbomb canceled'))

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    sender_addr = emailmod.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    firstpatch = None
    for i, (m, subj, ds) in enumerate(msgs):
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
            if not firstpatch:
                firstpatch = m['Message-Id']
            m['X-Mercurial-Series-Id'] = firstpatch
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if not parent or 'X-Mercurial-Node' not in m:
            parent = m['Message-Id']

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc'] = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if replyto:
            m['Reply-To'] = ', '.join(replyto)
        if opts.get('test'):
            ui.status(_('displaying '), subj, ' ...\n')
            ui.pager('email')
            generator = emailmod.Generator.Generator(ui, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                ui.write('\n')
            except IOError as inst:
                if inst.errno != errno.EPIPE:
                    raise
        else:
            if not sendmail:
                sendmail = mail.connect(ui, mbox=mbox)
            ui.status(_('sending '), subj, ' ...\n')
            ui.progress(_('sending'),
                        i,
                        item=subj,
                        total=len(msgs),
                        unit=_('emails'))
            if not mbox:
                # Exim does not remove the Bcc field
                del m['Bcc']
            fp = stringio()
            generator = emailmod.Generator.Generator(fp, mangle_from_=False)
            generator.flatten(m, 0)
            sendmail(sender_addr, to + bcc + cc, fp.getvalue())

    ui.progress(_('writing'), None)
    ui.progress(_('sending'), None)
Example #58
0
def verify(ui, repo, hgctx):
    '''verify that a Mercurial rev matches the corresponding Git rev

    Given a Mercurial revision that has a corresponding Git revision in the map,
    this attempts to answer whether that revision has the same contents as the
    corresponding Git revision.

    '''
    handler = repo.githandler

    gitsha = handler.map_git_get(hgctx.hex())
    if not gitsha:
        # TODO deal better with commits in the middle of octopus merges
        raise hgutil.Abort(
            _('no git commit found for rev %s') % hgctx,
            hint=_('if this is an octopus merge, verify against the last rev'))

    try:
        gitcommit = handler.git.get_object(gitsha)
    except KeyError:
        raise hgutil.Abort(
            _('git equivalent %s for rev %s not found!') % (gitsha, hgctx))
    if not isinstance(gitcommit, Commit):
        raise hgutil.Abort(
            _('git equivalent %s for rev %s is not a commit!') %
            (gitsha, hgctx))

    ui.status(_('verifying rev %s against git commit %s\n') % (hgctx, gitsha))
    failed = False

    # TODO check commit message and other metadata

    dirkind = stat.S_IFDIR

    hgfiles = set(hgctx)
    # TODO deal with submodules
    hgfiles.discard('.hgsubstate')
    hgfiles.discard('.hgsub')
    gitfiles = set()

    i = 0
    for gitfile, dummy in diff_tree.walk_trees(handler.git.object_store,
                                               gitcommit.tree, None):
        if gitfile.mode == dirkind:
            continue
        # TODO deal with submodules
        if (gitfile.mode == S_IFGITLINK or gitfile.path == '.hgsubstate'
                or gitfile.path == '.hgsub'):
            continue
        ui.progress('verify', i, total=len(hgfiles))
        i += 1
        gitfiles.add(gitfile.path)

        try:
            fctx = hgctx[gitfile.path]
        except error.LookupError:
            # we'll deal with this at the end
            continue

        hgflags = fctx.flags()
        gitflags = handler.convert_git_int_mode(gitfile.mode)
        if hgflags != gitflags:
            ui.write(
                _("file has different flags: %s (hg '%s', git '%s')\n") %
                (gitfile.path, hgflags, gitflags))
            failed = True
        if fctx.data() != handler.git[gitfile.sha].data:
            ui.write(_('difference in: %s\n') % gitfile.path)
            failed = True

    ui.progress('verify', None, total=len(hgfiles))

    if hgfiles != gitfiles:
        failed = True
        missing = gitfiles - hgfiles
        for f in sorted(missing):
            ui.write(_('file found in git but not hg: %s\n') % f)
        unexpected = hgfiles - gitfiles
        for f in sorted(unexpected):
            ui.write(_('file found in hg but not git: %s\n') % f)

    if failed:
        return 1
    else:
        return 0
def parsereleasenotesfile(sections, text):
    """Parse text content containing generated release notes."""
    notes = parsedreleasenotes()

    blocks = minirst.parse(text)[0]

    def gatherparagraphsbullets(offset, title=False):
        notefragment = []

        for i in range(offset + 1, len(blocks)):
            block = blocks[i]

            if block['type'] == 'margin':
                continue
            elif block['type'] == 'section':
                break
            elif block['type'] == 'bullet':
                if block['indent'] != 0:
                    raise error.Abort(_('indented bullet lists not supported'))
                if title:
                    lines = [l[1:].strip() for l in block['lines']]
                    notefragment.append(lines)
                    continue
                else:
                    lines = [[l[1:].strip() for l in block['lines']]]

                    for block in blocks[i + 1:]:
                        if block['type'] in ('bullet', 'section'):
                            break
                        if block['type'] == 'paragraph':
                            lines.append(block['lines'])
                    notefragment.append(lines)
                    continue
            elif block['type'] != 'paragraph':
                raise error.Abort(
                    _('unexpected block type in release notes: '
                      '%s') % block['type'])
            if title:
                notefragment.append(block['lines'])

        return notefragment

    currentsection = None
    for i, block in enumerate(blocks):
        if block['type'] != 'section':
            continue

        title = block['lines'][0]

        # TODO the parsing around paragraphs and bullet points needs some
        # work.
        if block['underline'] == '=':  # main section
            name = sections.sectionfromtitle(title)
            if not name:
                raise error.Abort(
                    _('unknown release notes section: %s') % title)

            currentsection = name
            bullet_points = gatherparagraphsbullets(i)
            if bullet_points:
                for para in bullet_points:
                    notes.addnontitleditem(currentsection, para)

        elif block['underline'] == '-':  # sub-section
            if title == BULLET_SECTION:
                bullet_points = gatherparagraphsbullets(i)
                for para in bullet_points:
                    notes.addnontitleditem(currentsection, para)
            else:
                paragraphs = gatherparagraphsbullets(i, True)
                notes.addtitleditem(currentsection, title, paragraphs)
        else:
            raise error.Abort(_('unsupported section type for %s') % title)

    return notes
Example #60
0
def extsetup(ui):
    if ui.configbool('win32text', 'warn', True):
        ui.warn(_("win32text is deprecated: "
                  "http://mercurial.selenic.com/wiki/Win32TextExtension\n"))