コード例 #1
0
ファイル: bugzilla.py プロジェクト: Nurb432/plan9front
    def notify(self, ids, committer):
        '''tell bugzilla to send mail.'''

        self.ui.status(_('telling bugzilla to send mail:\n'))
        (user, userid) = self.get_bugzilla_user(committer)
        for id in ids:
            self.ui.status(_('  bug %s\n') % id)
            cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
            bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
            try:
                # Backwards-compatible with old notify string, which
                # took one string. This will throw with a new format
                # string.
                cmd = cmdfmt % id
            except TypeError:
                cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
            self.ui.note(_('running notify command %s\n') % cmd)
            fp = util.popen('(%s) 2>&1' % cmd)
            out = fp.read()
            ret = fp.close()
            if ret:
                self.ui.warn(out)
                raise util.Abort(_('bugzilla notify command %s') %
                                 util.explain_exit(ret)[0])
        self.ui.status(_('done\n'))
コード例 #2
0
ファイル: pager.py プロジェクト: vvergu/mercurial
def _runpager(p):
    if not hasattr(os, "fork"):
        sys.stdout = util.popen(p, "wb")
        if sys.stderr.isatty():
            sys.stderr = sys.stdout
        return
    fdin, fdout = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(fdin)
        os.dup2(fdout, sys.stdout.fileno())
        if sys.stderr.isatty():
            os.dup2(fdout, sys.stderr.fileno())
        os.close(fdout)
        return
    os.dup2(fdin, sys.stdin.fileno())
    os.close(fdin)
    os.close(fdout)
    try:
        os.execvp("/bin/sh", ["/bin/sh", "-c", p])
    except OSError, e:
        if e.errno == errno.ENOENT:
            # no /bin/sh, try executing the pager directly
            args = shlex.split(p)
            os.execvp(args[0], args)
        else:
            raise
コード例 #3
0
ファイル: common.py プロジェクト: carlgao/lenga
 def _run(self, cmd, *args, **kwargs):
     cmdline = self._cmdline(cmd, *args, **kwargs)
     self.prerun()
     try:
         return util.popen(cmdline)
     finally:
         self.postrun()
コード例 #4
0
ファイル: importer.py プロジェクト: davidshepherd7/dotfiles
 def content(self, rev):
     text = None
     if os.path.isfile(self.rcspath):
         cmd = 'co -kk -q -p1.%d %s' % (rev, util.shellquote(self.rcspath))
         with util.popen(cmd, mode='rb') as fp:
             text = fp.read()
     return text
コード例 #5
0
ファイル: bugzilla.py プロジェクト: sdr01810/intellij-idea-ce
 def notify(self, bugs, committer):
     '''tell bugzilla to send mail.'''
     self.ui.status(_('telling bugzilla to send mail:\n'))
     (user, userid) = self.get_bugzilla_user(committer)
     for id in bugs.keys():
         self.ui.status(_('  bug %s\n') % id)
         cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
         bzdir = self.ui.config('bugzilla', 'bzdir',
                                '/var/www/html/bugzilla')
         try:
             # Backwards-compatible with old notify string, which
             # took one string. This will throw with a new format
             # string.
             cmd = cmdfmt % id
         except TypeError:
             cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
         self.ui.note(_('running notify command %s\n') % cmd)
         fp = util.popen('(%s) 2>&1' % cmd)
         out = fp.read()
         ret = fp.close()
         if ret:
             self.ui.warn(out)
             raise util.Abort(
                 _('bugzilla notify command %s') % util.explainexit(ret)[0])
     self.ui.status(_('done\n'))
コード例 #6
0
ファイル: pager.py プロジェクト: Azult/containers-exercise
def _pagerfork(ui, p):
    if not util.safehasattr(os, 'fork'):
        sys.stdout = util.popen(p, 'wb')
        if ui._isatty(sys.stderr):
            sys.stderr = sys.stdout
        return
    fdin, fdout = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(fdin)
        os.dup2(fdout, sys.stdout.fileno())
        if ui._isatty(sys.stderr):
            os.dup2(fdout, sys.stderr.fileno())
        os.close(fdout)
        return
    os.dup2(fdin, sys.stdin.fileno())
    os.close(fdin)
    os.close(fdout)
    try:
        os.execvp('/bin/sh', ['/bin/sh', '-c', p])
    except OSError, e:
        if e.errno == errno.ENOENT:
            # no /bin/sh, try executing the pager directly
            args = shlex.split(p)
            os.execvp(args[0], args)
        else:
            raise
コード例 #7
0
ファイル: pager.py プロジェクト: spraints/for-example
def _pagerfork(ui, p):
    if not util.safehasattr(os, 'fork'):
        sys.stdout = util.popen(p, 'wb')
        if ui._isatty(sys.stderr):
            sys.stderr = sys.stdout
        return
    fdin, fdout = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(fdin)
        os.dup2(fdout, sys.stdout.fileno())
        if ui._isatty(sys.stderr):
            os.dup2(fdout, sys.stderr.fileno())
        os.close(fdout)
        return
    os.dup2(fdin, sys.stdin.fileno())
    os.close(fdin)
    os.close(fdout)
    try:
        os.execvp('/bin/sh', ['/bin/sh', '-c', p])
    except OSError, e:
        if e.errno == errno.ENOENT:
            # no /bin/sh, try executing the pager directly
            args = shlex.split(p)
            os.execvp(args[0], args)
        else:
            raise
コード例 #8
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
 def helper():
     stdout = util.popen(cmd, mode='rb')
     for each in loaditer(stdout):
         client_name = each.get('client', None)
         if client_name is not None and client_name == client:
             return True
     return False
コード例 #9
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def get_filelogs_at_cl(client, clnum):
    cmd = 'p4 --client %s -G fstat -T ' \
         '"depotFile,headAction,headType,headRev" ' \
         '"//%s/..."@%d' % (
         util.shellquote(client),
         util.shellquote(client),
         clnum
         )
    stdout = util.popen(cmd, mode='rb')
    try:
        result = []
        for d in loaditer(stdout):
            if d.get('depotFile'):
                headaction = d['headAction']
                if headaction in ACTION_ARCHIVE or headaction in ACTION_DELETE:
                    continue
                depotfile = d['depotFile']
                filelog = {}
                filelog[clnum] = {
                    'action': d['headAction'],
                    'type': d['headType'],
                }
                result.append(P4Filelog(depotfile, filelog))
        return result
    except Exception:
        raise P4Exception(stdout)
コード例 #10
0
 def _run(self, cmd, *args, **kwargs):
     cmdline = self._cmdline(cmd, *args, **kwargs)
     self.ui.debug('running: %s\n' % (cmdline,))
     self.prerun()
     try:
         return util.popen(cmdline)
     finally:
         self.postrun()
コード例 #11
0
ファイル: git.py プロジェクト: RayFerr000/PLTL
 def gitopen(self, s, err=None):
     if err == subprocess.PIPE:
         (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
         return so
     elif err == subprocess.STDOUT:
             return self.popen_with_stderr(s)
     else:
         return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
コード例 #12
0
ファイル: common.py プロジェクト: Nurb432/plan9front
 def _run(self, cmd, *args, **kwargs):
     cmdline = self._cmdline(cmd, *args, **kwargs)
     self.ui.debug(_('running: %s\n') % (cmdline,))
     self.prerun()
     try:
         return util.popen(cmdline)
     finally:
         self.postrun()
コード例 #13
0
 def _parse_view(self, path):
     "Read changes affecting the path"
     cmd = 'p4 -G changes -s submitted "%s"' % path
     stdout = util.popen(cmd)
     for d in loaditer(stdout):
         c = d.get("change", None)
         if c:
             self.p4changes[c] = True
コード例 #14
0
ファイル: p4.py プロジェクト: MezzLabs/mercurial
 def _parse_view(self, path):
     "Read changes affecting the path"
     cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
     stdout = util.popen(cmd, mode='rb')
     for d in loaditer(stdout):
         c = d.get("change", None)
         if c:
             self.p4changes[c] = True
コード例 #15
0
 def _parse_view(self, path):
     "Read changes affecting the path"
     cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
     stdout = util.popen(cmd, mode='rb')
     for d in loaditer(stdout):
         c = d.get("change", None)
         if c:
             self.p4changes[c] = True
コード例 #16
0
 def gitopen(self, s, err=None):
     if err == subprocess.PIPE:
         (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
         return so
     elif err == subprocess.STDOUT:
             return self.popen_with_stderr(s)
     else:
         return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
コード例 #17
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def parse_usermap():
    cmd = 'p4 -G users'
    stdout = util.popen(cmd, mode='rb')
    try:
        for d in loaditer(stdout):
            if d.get('User'):
                yield d
    except Exception:
        raise P4Exception(stdout)
コード例 #18
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def get_latest_cl(client):
    cmd = 'p4 --client %s -G changes -m 1 -s submitted' % (
            util.shellquote(client))
    stdout = util.popen(cmd, mode='rb')
    parsed = marshal.load(stdout)
    cl = parsed.get('change')
    if cl:
        return int(cl)
    return None
コード例 #19
0
ファイル: pager.py プロジェクト: fuzxxl/plan9front
 def pagecmd(orig, ui, options, cmd, cmdfunc):
     p = ui.config("pager", "pager", os.environ.get("PAGER"))
     if p and sys.stdout.isatty() and "--debugger" not in sys.argv:
         attend = ui.configlist("pager", "attend")
         if cmd in attend or (cmd not in ui.configlist("pager", "ignore") and not attend):
             sys.stderr = sys.stdout = util.popen(p, "wb")
             if ui.configbool("pager", "quiet"):
                 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
     return orig(ui, options, cmd, cmdfunc)
コード例 #20
0
ファイル: git.py プロジェクト: MezzLabs/mercurial
 def gitopen(self, s):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #21
0
 def pagecmd(orig, ui, options, cmd, cmdfunc):
     p = ui.config("pager", "pager", os.environ.get("PAGER"))
     if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
         attend = ui.configlist('pager', 'attend', attended)
         if (cmd in attend or
             (cmd not in ui.configlist('pager', 'ignore') and not attend)):
             sys.stderr = sys.stdout = util.popen(p, "wb")
             if ui.configbool('pager', 'quiet'):
                 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
     return orig(ui, options, cmd, cmdfunc)
コード例 #22
0
ファイル: git.py プロジェクト: ezc/mercurial
 def gitopen(self, s):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #23
0
 def pagecmd(orig, ui, options, cmd, cmdfunc):
     p = ui.config("pager", "pager", os.environ.get("PAGER"))
     if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
         attend = ui.configlist('pager', 'attend')
         if (cmd in attend or
             (cmd not in ui.configlist('pager', 'ignore') and not attend)):
             sys.stderr = sys.stdout = util.popen(p, "wb")
             if ui.configbool('pager', 'quiet'):
                 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
     return orig(ui, options, cmd, cmdfunc)
コード例 #24
0
ファイル: importer.py プロジェクト: davidshepherd7/dotfiles
 def revisions(self):
     revs = set()
     if os.path.isfile(self.rcspath):
         stdout = util.popen('rlog %s 2>%s'
                             % (util.shellquote(self.rcspath), os.devnull),
                             mode='rb')
         for l in stdout.readlines():
             m = re.match('revision 1.(\d+)', l)
             if m:
                 revs.add(int(m.group(1)))
     return revs
コード例 #25
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def parse_filelist_at_cl(client, cl=None):
    cmd = 'p4 --client %s -G files //%s/...@%d' %(
            util.shellquote(client),
            util.shellquote(client),
            cl
            )
    stdout = util.popen(cmd, mode='rb')
    for d in loaditer(stdout):
        c = d.get('depotFile', None)
        if c:
            yield d
コード例 #26
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def parse_filelist(client, startcl=None, endcl=None):
    if startcl is None:
        startcl = 0

    cmd = 'p4 --client %s -G files -a //%s/...%s' % (
            util.shellquote(client),
            util.shellquote(client),
            revrange(startcl, endcl))
    stdout = util.popen(cmd, mode='rb')
    for d in loaditer(stdout):
        c = d.get('depotFile', None)
        if c:
            yield d
コード例 #27
0
ファイル: git.py プロジェクト: agbiotec/galaxy-tools-vcr
 def gitopen(self, s, noerr=False):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         if noerr:
             (stdin, stdout, stderr) = util.popen3(s)
             return stdout
         else:
             return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #28
0
 def gitopen(self, s, noerr=False):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         if noerr:
             (stdin, stdout, stderr) = util.popen3(s)
             return stdout
         else:
             return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #29
0
 def gitopen(self, s, err=None):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         if err == subprocess.PIPE:
             (stdin, stdout, stderr) = util.popen3(s)
             return stdout
         elif err == subprocess.STDOUT:
             return self.popen_with_stderr(s)
         else:
             return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #30
0
ファイル: git.py プロジェクト: RayFerr000/PLTL
 def gitopen(self, s, err=None):
     prevgitdir = os.environ.get('GIT_DIR')
     os.environ['GIT_DIR'] = self.path
     try:
         if err == subprocess.PIPE:
             (stdin, stdout, stderr) = util.popen3(s)
             return stdout
         elif err == subprocess.STDOUT:
             return self.popen_with_stderr(s)
         else:
             return util.popen(s, 'rb')
     finally:
         if prevgitdir is None:
             del os.environ['GIT_DIR']
         else:
             os.environ['GIT_DIR'] = prevgitdir
コード例 #31
0
ファイル: p4.py プロジェクト: ZanderZhang/Andriod-Learning
    def getfile(self, name, rev):
        cmd = 'p4 -G print %s' \
            % util.shellquote("%s#%s" % (self.depotname[name], rev))
        stdout = util.popen(cmd, mode='rb')

        mode = None
        contents = ""
        keywords = None

        for d in loaditer(stdout):
            code = d["code"]
            data = d.get("data")

            if code == "error":
                raise IOError(d["generic"], data)

            elif code == "stat":
                if d.get("action") == "purge":
                    return None, None
                p4type = self.re_type.match(d["type"])
                if p4type:
                    mode = ""
                    flags = (p4type.group(1) or "") + (p4type.group(3) or "")
                    if "x" in flags:
                        mode = "x"
                    if p4type.group(2) == "symlink":
                        mode = "l"
                    if "ko" in flags:
                        keywords = self.re_keywords_old
                    elif "k" in flags:
                        keywords = self.re_keywords

            elif code == "text" or code == "binary":
                contents += data

        if mode is None:
            return None, None

        if keywords:
            contents = keywords.sub("$\\1$", contents)
        if mode == "l" and contents.endswith("\n"):
            contents = contents[:-1]

        return contents, mode
コード例 #32
0
    def getfile(self, name, rev):
        cmd = 'p4 -G print %s' \
            % util.shellquote("%s#%s" % (self.depotname[name], rev))
        stdout = util.popen(cmd, mode='rb')

        mode = None
        contents = ""
        keywords = None

        for d in loaditer(stdout):
            code = d["code"]
            data = d.get("data")

            if code == "error":
                raise IOError(d["generic"], data)

            elif code == "stat":
                if d.get("action") == "purge":
                    return None, None
                p4type = self.re_type.match(d["type"])
                if p4type:
                    mode = ""
                    flags = (p4type.group(1) or "") + (p4type.group(3) or "")
                    if "x" in flags:
                        mode = "x"
                    if p4type.group(2) == "symlink":
                        mode = "l"
                    if "ko" in flags:
                        keywords = self.re_keywords_old
                    elif "k" in flags:
                        keywords = self.re_keywords

            elif code == "text" or code == "binary":
                contents += data

        if mode is None:
            return None, None

        if keywords:
            contents = keywords.sub("$\\1$", contents)
        if mode == "l" and contents.endswith("\n"):
            contents = contents[:-1]

        return contents, mode
コード例 #33
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def parse_changes(client, startcl=None, endcl=None):
    "Read changes affecting the path"
    cmd = 'p4 --client %s -ztag -G changes -s submitted //%s/...%s' % (
        util.shellquote(client),
        util.shellquote(client),
        revrange(startcl, endcl))

    stdout = util.popen(cmd, mode='rb')
    cur_time = time.time()
    for d in loaditer(stdout):
        c = d.get("change", None)
        oc = d.get("oldChange", None)
        user = d.get("user", None)
        commit_time = d.get("time", None)
        time_diff = (cur_time - int(commit_time)) if commit_time else 0
        if oc:
            yield P4Changelist(int(oc), int(c), user, time_diff)
        elif c:
            yield P4Changelist(int(c), int(c), user, time_diff)
コード例 #34
0
    def getfile(self, name, rev):
        cmd = 'p4 -G print "%s#%s"' % (self.depotname[name], rev)
        stdout = util.popen(cmd)

        mode = None
        contents = ""
        keywords = None

        for d in loaditer(stdout):
            code = d["code"]
            data = d.get("data")

            if code == "error":
                raise IOError(d["generic"], data)

            elif code == "stat":
                p4type = self.re_type.match(d["type"])
                if p4type:
                    mode = ""
                    flags = (p4type.group(1) or "") + (p4type.group(3) or "")
                    if "x" in flags:
                        mode = "x"
                    if p4type.group(2) == "symlink":
                        mode = "l"
                    if "ko" in flags:
                        keywords = self.re_keywords_old
                    elif "k" in flags:
                        keywords = self.re_keywords

            elif code == "text" or code == "binary":
                contents += data

        if mode is None:
            raise IOError(0, "bad stat")

        self.modecache[(name, rev)] = mode

        if keywords:
            contents = keywords.sub("$\\1$", contents)
        if mode == "l" and contents.endswith("\n"):
            contents = contents[:-1]

        return contents
コード例 #35
0
ファイル: p4.py プロジェクト: davidshepherd7/dotfiles
def parse_fstat(clnum, client, filter=None):
    cmd = 'p4 --client %s -G fstat -e %d -T ' \
          '"depotFile,headAction,headType,headRev" "//%s/..."' % (
            util.shellquote(client),
            clnum,
            util.shellquote(client))
    stdout = util.popen(cmd, mode='rb')
    try:
        result = []
        for d in loaditer(stdout):
            if d.get('depotFile') and (filter is None or filter(d)):
                if d['headAction'] in ACTION_ARCHIVE:
                    continue
                result.append({
                    'depotFile': d['depotFile'],
                    'action': d['headAction'],
                    'type': d['headType'],
                })
        return result
    except Exception:
        raise P4Exception(stdout)
コード例 #36
0
ファイル: bugzilla.py プロジェクト: Rokko11/idea-community
    def notify(self, ids, committer):
        """tell bugzilla to send mail."""

        self.ui.status(_("telling bugzilla to send mail:\n"))
        (user, userid) = self.get_bugzilla_user(committer)
        for id in ids:
            self.ui.status(_("  bug %s\n") % id)
            cmdfmt = self.ui.config("bugzilla", "notify", self.default_notify)
            bzdir = self.ui.config("bugzilla", "bzdir", "/var/www/html/bugzilla")
            try:
                # Backwards-compatible with old notify string, which
                # took one string. This will throw with a new format
                # string.
                cmd = cmdfmt % id
            except TypeError:
                cmd = cmdfmt % {"bzdir": bzdir, "id": id, "user": user}
            self.ui.note(_("running notify command %s\n") % cmd)
            fp = util.popen("(%s) 2>&1" % cmd)
            out = fp.read()
            ret = fp.close()
            if ret:
                self.ui.warn(out)
                raise util.Abort(_("bugzilla notify command %s") % util.explain_exit(ret)[0])
        self.ui.status(_("done\n"))
コード例 #37
0
ファイル: git.py プロジェクト: ezc/mercurial
 def gitopen(self, s):
     return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
コード例 #38
0
ファイル: p4.py プロジェクト: MezzLabs/mercurial
    def _parse(self, ui, path):
        "Prepare list of P4 filenames and revisions to import"
        ui.status(_('reading p4 views\n'))

        # read client spec or view
        if "/" in path:
            self._parse_view(path)
            if path.startswith("//") and path.endswith("/..."):
                views = {path[:-3]:""}
            else:
                views = {"//": ""}
        else:
            cmd = 'p4 -G client -o %s' % util.shellquote(path)
            clientspec = marshal.load(util.popen(cmd, mode='rb'))

            views = {}
            for client in clientspec:
                if client.startswith("View"):
                    sview, cview = clientspec[client].split()
                    self._parse_view(sview)
                    if sview.endswith("...") and cview.endswith("..."):
                        sview = sview[:-3]
                        cview = cview[:-3]
                    cview = cview[2:]
                    cview = cview[cview.find("/") + 1:]
                    views[sview] = cview

        # list of changes that affect our source files
        self.p4changes = self.p4changes.keys()
        self.p4changes.sort(key=int)

        # list with depot pathnames, longest first
        vieworder = views.keys()
        vieworder.sort(key=len, reverse=True)

        # handle revision limiting
        startrev = self.ui.config('convert', 'p4.startrev', default=0)
        self.p4changes = [x for x in self.p4changes
                          if ((not startrev or int(x) >= int(startrev)) and
                              (not self.rev or int(x) <= int(self.rev)))]

        # now read the full changelists to get the list of file revisions
        ui.status(_('collecting p4 changelists\n'))
        lastid = None
        for change in self.p4changes:
            cmd = "p4 -G describe -s %s" % change
            stdout = util.popen(cmd, mode='rb')
            d = marshal.load(stdout)
            desc = self.recode(d["desc"])
            shortdesc = desc.split("\n", 1)[0]
            t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
            ui.status(util.ellipsis(t, 80) + '\n')

            if lastid:
                parents = [lastid]
            else:
                parents = []

            date = (int(d["time"]), 0)     # timezone not set
            c = commit(author=self.recode(d["user"]), date=util.datestr(date),
                       parents=parents, desc=desc, branch='',
                       extra={"p4": change})

            files = []
            i = 0
            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
                oldname = d["depotFile%d" % i]
                filename = None
                for v in vieworder:
                    if oldname.startswith(v):
                        filename = views[v] + oldname[len(v):]
                        break
                if filename:
                    files.append((filename, d["rev%d" % i]))
                    self.depotname[filename] = oldname
                i += 1
            self.changeset[change] = c
            self.files[change] = files
            lastid = change

        if lastid:
            self.heads = [lastid]
コード例 #39
0
ファイル: cvsps.py プロジェクト: Distrotech/mercurial
def createlog(ui, directory=None, root="", rlog=True, cache=None):
    '''Collect the CVS rlog'''

    # Because we store many duplicate commit log messages, reusing strings
    # saves a lot of memory and pickle storage space.
    _scache = {}
    def scache(s):
        "return a shared version of a string"
        return _scache.setdefault(s, s)

    ui.status(_('collecting CVS rlog\n'))

    log = []      # list of logentry objects containing the CVS state

    # patterns to match in CVS (r)log output, by state of use
    re_00 = re.compile('RCS file: (.+)$')
    re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
    re_02 = re.compile('cvs (r?log|server): (.+)\n$')
    re_03 = re.compile("(Cannot access.+CVSROOT)|"
                       "(can't create temporary directory.+)$")
    re_10 = re.compile('Working file: (.+)$')
    re_20 = re.compile('symbolic names:')
    re_30 = re.compile('\t(.+): ([\\d.]+)$')
    re_31 = re.compile('----------------------------$')
    re_32 = re.compile('======================================='
                       '======================================$')
    re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
    re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
                       r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
                       r'(\s+commitid:\s+([^;]+);)?'
                       r'(.*mergepoint:\s+([^;]+);)?')
    re_70 = re.compile('branches: (.+);$')

    file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')

    prefix = ''   # leading path to strip of what we get from CVS

    if directory is None:
        # Current working directory

        # Get the real directory in the repository
        try:
            prefix = open(os.path.join('CVS','Repository')).read().strip()
            directory = prefix
            if prefix == ".":
                prefix = ""
        except IOError:
            raise logerror(_('not a CVS sandbox'))

        if prefix and not prefix.endswith(os.sep):
            prefix += os.sep

        # Use the Root file in the sandbox, if it exists
        try:
            root = open(os.path.join('CVS','Root')).read().strip()
        except IOError:
            pass

    if not root:
        root = os.environ.get('CVSROOT', '')

    # read log cache if one exists
    oldlog = []
    date = None

    if cache:
        cachedir = os.path.expanduser('~/.hg.cvsps')
        if not os.path.exists(cachedir):
            os.mkdir(cachedir)

        # The cvsps cache pickle needs a uniquified name, based on the
        # repository location. The address may have all sort of nasties
        # in it, slashes, colons and such. So here we take just the
        # alphanumeric characters, concatenated in a way that does not
        # mix up the various components, so that
        #    :pserver:user@server:/path
        # and
        #    /pserver/user/server/path
        # are mapped to different cache file names.
        cachefile = root.split(":") + [directory, "cache"]
        cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
        cachefile = os.path.join(cachedir,
                                 '.'.join([s for s in cachefile if s]))

    if cache == 'update':
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile))
            for e in oldlog:
                if not (util.safehasattr(e, 'branchpoints') and
                        util.safehasattr(e, 'commitid') and
                        util.safehasattr(e, 'mergepoint')):
                    ui.status(_('ignoring old cache\n'))
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception as e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
    branchmap = {} # mapping between branch names and revision numbers
    rcsmap = {}
    state = 0
    store = False # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
                tags = {}
                if rlog:
                    filename = util.normpath(rcs[:-2])
                    if filename.startswith(prefix):
                        filename = filename[len(prefix):]
                    if filename.startswith('/'):
                        filename = filename[1:]
                    if filename.startswith('Attic/'):
                        filename = filename[6:]
                    else:
                        filename = filename.replace('/Attic/', '/')
                    state = 2
                    continue
                state = 1
                continue
            match = re_01.match(line)
            if match:
                raise logerror(match.group(1))
            match = re_02.match(line)
            if match:
                raise logerror(match.group(2))
            if re_03.match(line):
                raise logerror(line)

        elif state == 1:
            # expect 'Working file' (only when using log instead of rlog)
            match = re_10.match(line)
            assert match, _('RCS file must be followed by working file')
            filename = util.normpath(match.group(1))
            state = 2

        elif state == 2:
            # expect 'symbolic names'
            if re_20.match(line):
                branchmap = {}
                state = 3

        elif state == 3:
            # read the symbolic names and store as tags
            match = re_30.match(line)
            if match:
                rev = [int(x) for x in match.group(2).split('.')]

                # Convert magic branch number to an odd-numbered one
                revn = len(rev)
                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
                    rev = rev[:-2] + rev[-1:]
                rev = tuple(rev)

                if rev not in tags:
                    tags[rev] = []
                tags[rev].append(match.group(1))
                branchmap[match.group(1)] = match.group(2)

            elif re_31.match(line):
                state = 5
            elif re_32.match(line):
                state = 0

        elif state == 4:
            # expecting '------' separator before first revision
            if re_31.match(line):
                state = 5
            else:
                assert not re_32.match(line), _('must have at least '
                                                'some revisions')

        elif state == 5:
            # expecting revision number and possibly (ignored) lock indication
            # we create the logentry here from values stored in states 0 to 4,
            # as this state is re-entered for subsequent revisions of a file.
            match = re_50.match(line)
            assert match, _('expected revision number')
            e = logentry(rcs=scache(rcs),
                         file=scache(filename),
                         revision=tuple([int(x) for x in
                                         match.group(1).split('.')]),
                         branches=[],
                         parent=None,
                         commitid=None,
                         mergepoint=None,
                         branchpoints=set())

            state = 6

        elif state == 6:
            # expecting date, author, state, lines changed
            match = re_60.match(line)
            assert match, _('revision must be followed by date line')
            d = match.group(1)
            if d[2] == '/':
                # Y2K
                d = '19' + d

            if len(d.split()) != 3:
                # cvs log dates always in GMT
                d = d + ' UTC'
            e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
                                        '%Y/%m/%d %H:%M:%S',
                                        '%Y-%m-%d %H:%M:%S'])
            e.author = scache(match.group(2))
            e.dead = match.group(3).lower() == 'dead'

            if match.group(5):
                if match.group(6):
                    e.lines = (int(match.group(5)), int(match.group(6)))
                else:
                    e.lines = (int(match.group(5)), 0)
            elif match.group(6):
                e.lines = (0, int(match.group(6)))
            else:
                e.lines = None

            if match.group(7): # cvs 1.12 commitid
                e.commitid = match.group(8)

            if match.group(9): # cvsnt mergepoint
                myrev = match.group(10).split('.')
                if len(myrev) == 2: # head
                    e.mergepoint = 'HEAD'
                else:
                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                    branches = [b for b in branchmap if branchmap[b] == myrev]
                    assert len(branches) == 1, ('unknown branch: %s'
                                                % e.mergepoint)
                    e.mergepoint = branches[0]

            e.comment = []
            state = 7

        elif state == 7:
            # read the revision numbers of branches that start at this revision
            # or store the commit log message otherwise
            m = re_70.match(line)
            if m:
                e.branches = [tuple([int(y) for y in x.strip().split('.')])
                                for x in m.group(1).split(';')]
                state = 8
            elif re_31.match(line) and re_50.match(peek):
                state = 5
                store = True
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        elif state == 8:
            # store commit log message
            if re_31.match(line):
                cpeek = peek
                if cpeek.endswith('\n'):
                    cpeek = cpeek[:-1]
                if re_50.match(cpeek):
                    state = 5
                    store = True
                else:
                    e.comment.append(line)
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        # When a file is added on a branch B1, CVS creates a synthetic
        # dead trunk revision 1.1 so that the branch has a root.
        # Likewise, if you merge such a file to a later branch B2 (one
        # that already existed when the file was added on B1), CVS
        # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
        # these revisions now, but mark them synthetic so
        # createchangeset() can take care of them.
        if (store and
              e.dead and
              e.revision[-1] == 1 and      # 1.1 or 1.1.x.1
              len(e.comment) == 1 and
              file_added_re.match(e.comment[0])):
            ui.debug('found synthetic revision in %s: %r\n'
                     % (e.rcs, e.comment[0]))
            e.synthetic = True

        if store:
            # clean up the results and save in the log.
            store = False
            e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
            e.comment = scache('\n'.join(e.comment))

            revn = len(e.revision)
            if revn > 3 and (revn % 2) == 0:
                e.branch = tags.get(e.revision[:-1], [None])[0]
            else:
                e.branch = None

            # find the branches starting from this revision
            branchpoints = set()
            for branch, revision in branchmap.iteritems():
                revparts = tuple([int(i) for i in revision.split('.')])
                if len(revparts) < 2: # bad tags
                    continue
                if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                    # normal branch
                    if revparts[:-2] == e.revision:
                        branchpoints.add(branch)
                elif revparts == (1, 1, 1): # vendor branch
                    if revparts in e.branches:
                        branchpoints.add(branch)
            e.branchpoints = branchpoints

            log.append(e)

            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs

            if len(log) % 100 == 0:
                ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')

    log.sort(key=lambda x: (x.rcs, x.revision))

    # find parent revisions of individual files
    versions = {}
    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
        rcs = e.rcs.replace('/Attic/', '/')
        if rcs in rcsmap:
            e.rcs = rcsmap[rcs]
        branch = e.revision[:-1]
        versions[(e.rcs, branch)] = e.revision

    for e in log:
        branch = e.revision[:-1]
        p = versions.get((e.rcs, branch), None)
        if p is None:
            p = e.revision[:-2]
        e.parent = p
        versions[(e.rcs, branch)] = e.revision

    # update the log cache
    if cache:
        if log:
            # join up the old and new logs
            log.sort(key=lambda x: x.date)

            if oldlog and oldlog[-1].date >= log[0].date:
                raise logerror(_('log cache overlaps with new log entries,'
                                 ' re-run without cache.'))

            log = oldlog + log

            # write the new cachefile
            ui.note(_('writing cvs log cache %s\n') % cachefile)
            pickle.dump(log, open(cachefile, 'w'))
        else:
            log = oldlog

    ui.status(_('%d log entries\n') % len(log))

    hook.hook(ui, None, "cvslog", True, log=log)

    return log
コード例 #40
0
def patchbomb(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by
    :hg:`export`, one per message. The series starts with a "[PATCH 0
    of N]" introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description.

    With the -d/--diffstat option, if the diffstat program is
    installed, the result of running diffstat on the patch is inserted.

    Finally, the patch itself, as generated by :hg:`export`.

    With the -d/--diffstat or --confirm options, you will be presented
    with a final summary of all messages and asked for confirmation before
    the messages are sent.

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created. You can include a patch both as text in the email
    body and as a regular or an inline attachment by combining the
    -a/--attach or -i/--inline with the --body option.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent.

    With -m/--mbox, instead of previewing each patchbomb message in a
    pager or sending the messages directly, it will create a UNIX
    mailbox file with the patch emails. This mailbox file can be
    previewed with any mail user agent which supports UNIX mbox
    files.

    With -n/--test, all steps will run, but mail will not be sent.
    You will be prompted for an email recipient address, a subject and
    an introductory message describing the patches of your patchbomb.
    Then when all is done, patchbomb messages are displayed. If the
    PAGER environment variable is set, your pager will be fired up once
    for each patchbomb message, so you can verify everything is alright.

    In case email sending fails, you will find a backup of your series
    introductory message in ``.hg/last-email.txt``.

    The default behavior of this command can be customized through
    configuration. (See :hg:`help patchbomb` for details)

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

      hg email -o -m mbox &&    # generate an mbox file...
        mutt -R -f mbox         # ... and view it with mutt
      hg email -o -m mbox &&    # generate an mbox file ...
        formail -s sendmail \\   # ... and use formail to send from the mbox
          -bm -t < mbox         # ... using sendmail

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''

    _charsets = mail._charsets(ui)

    bundle = opts.get('bundle')
    date = opts.get('date')
    mbox = opts.get('mbox')
    outgoing = opts.get('outgoing')
    rev = opts.get('rev')
    # internal option used by pbranches
    patches = opts.get('patches')

    if not (opts.get('test') or mbox):
        # really sending
        mail.validateconfig(ui)

    if not (revs or rev or outgoing or bundle or patches):
        raise util.Abort(_('specify at least one changeset with -r or -o'))

    if outgoing and bundle:
        raise util.Abort(
            _("--outgoing mode always on with --bundle;"
              " do not re-specify --outgoing"))

    if outgoing or bundle:
        if len(revs) > 1:
            raise util.Abort(_("too many destinations"))
        if revs:
            dest = revs[0]
        else:
            dest = None
        revs = []

    if rev:
        if revs:
            raise util.Abort(_('use only one form to specify the revision'))
        revs = rev

    if outgoing:
        revs = _getoutgoing(repo, dest, rev)
    if bundle:
        opts['revs'] = revs

    # start
    if date:
        start_time = util.parsedate(date)
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    sender = (opts.get('from') or ui.config('email', 'from')
              or ui.config('patchbomb', 'from')
              or prompt(ui, 'From', ui.username()))

    if patches:
        msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
                             **opts)
    elif bundle:
        bundledata = _getbundle(repo, dest, **opts)
        bundleopts = opts.copy()
        bundleopts.pop('bundle', None)  # already processed
        msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
    else:
        _patches = list(_getpatches(repo, revs, **opts))
        msgs = _getpatchmsgs(repo, sender, _patches, **opts)

    showaddrs = []

    def getaddrs(header, ask=False, default=None):
        configkey = header.lower()
        opt = header.replace('-', '_').lower()
        addrs = opts.get(opt)
        if addrs:
            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))

        # not on the command line: fallback to config and then maybe ask
        addr = (ui.config('email', configkey)
                or ui.config('patchbomb', configkey) or '')
        if not addr and ask:
            addr = prompt(ui, header, default=default)
        if addr:
            showaddrs.append('%s: %s' % (header, addr))
            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
        else:
            return default

    to = getaddrs('To', ask=True)
    if not to:
        # we can get here in non-interactive mode
        raise util.Abort(_('no recipient addresses provided'))
    cc = getaddrs('Cc', ask=True, default='') or []
    bcc = getaddrs('Bcc') or []
    replyto = getaddrs('Reply-To')

    confirm = ui.configbool('patchbomb', 'confirm')
    confirm |= bool(opts.get('diffstat') or opts.get('confirm'))

    if confirm:
        ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
        ui.write(('From: %s\n' % sender), label='patchbomb.from')
        for addr in showaddrs:
            ui.write('%s\n' % addr, label='patchbomb.to')
        for m, subj, ds in msgs:
            ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
            if ds:
                ui.write(ds, label='patchbomb.diffstats')
        ui.write('\n')
        if ui.promptchoice(
                _('are you sure you want to send (yn)?'
                  '$$ &Yes $$ &No')):
            raise util.Abort(_('patchbomb canceled'))

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    sender_addr = email.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    firstpatch = None
    for i, (m, subj, ds) in enumerate(msgs):
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
            if not firstpatch:
                firstpatch = m['Message-Id']
            m['X-Mercurial-Series-Id'] = firstpatch
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if not parent or 'X-Mercurial-Node' not in m:
            parent = m['Message-Id']

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc'] = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if replyto:
            m['Reply-To'] = ', '.join(replyto)
        if opts.get('test'):
            ui.status(_('displaying '), subj, ' ...\n')
            ui.flush()
            if 'PAGER' in os.environ and not ui.plain():
                fp = util.popen(os.environ['PAGER'], 'w')
            else:
                fp = ui
            generator = email.Generator.Generator(fp, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                fp.write('\n')
            except IOError, inst:
                if inst.errno != errno.EPIPE:
                    raise
            if fp is not ui:
                fp.close()
        else:
            if not sendmail:
                verifycert = ui.config('smtp', 'verifycert')
                if opts.get('insecure'):
                    ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
                try:
                    sendmail = mail.connect(ui, mbox=mbox)
                finally:
                    ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
            ui.status(_('sending '), subj, ' ...\n')
            ui.progress(_('sending'), i, item=subj, total=len(msgs))
            if not mbox:
                # Exim does not remove the Bcc field
                del m['Bcc']
            fp = cStringIO.StringIO()
            generator = email.Generator.Generator(fp, mangle_from_=False)
            generator.flatten(m, 0)
            sendmail(sender_addr, to + bcc + cc, fp.getvalue())
コード例 #41
0
def patchbomb(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by
    :hg:`export`, one per message. The series starts with a "[PATCH 0
    of N]" introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description.

    With the -d/--diffstat option, if the diffstat program is
    installed, the result of running diffstat on the patch is inserted.

    Finally, the patch itself, as generated by :hg:`export`.

    With the -d/--diffstat or -c/--confirm options, you will be presented
    with a final summary of all messages and asked for confirmation before
    the messages are sent.

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent.

    With -m/--mbox, instead of previewing each patchbomb message in a
    pager or sending the messages directly, it will create a UNIX
    mailbox file with the patch emails. This mailbox file can be
    previewed with any mail user agent which supports UNIX mbox
    files.

    With -n/--test, all steps will run, but mail will not be sent.
    You will be prompted for an email recipient address, a subject and
    an introductory message describing the patches of your patchbomb.
    Then when all is done, patchbomb messages are displayed. If the
    PAGER environment variable is set, your pager will be fired up once
    for each patchbomb message, so you can verify everything is alright.

    In case email sending fails, you will find a backup of your series
    introductory message in ``.hg/last-email.txt``.

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

      hg email -o -m mbox &&    # generate an mbox file...
        mutt -R -f mbox         # ... and view it with mutt
      hg email -o -m mbox &&    # generate an mbox file ...
        formail -s sendmail \\   # ... and use formail to send from the mbox
          -bm -t < mbox         # ... using sendmail

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''

    _charsets = mail._charsets(ui)

    bundle = opts.get('bundle')
    date = opts.get('date')
    mbox = opts.get('mbox')
    outgoing = opts.get('outgoing')
    rev = opts.get('rev')
    # internal option used by pbranches
    patches = opts.get('patches')

    def getoutgoing(dest, revs):
        '''Return the revisions present locally but not in dest'''
        dest = ui.expandpath(dest or 'default-push', dest or 'default')
        dest, branches = hg.parseurl(dest)
        revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
        other = hg.peer(repo, opts, dest)
        ui.status(_('comparing with %s\n') % util.hidepassword(dest))
        common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
        nodes = revs and map(repo.lookup, revs) or revs
        o = repo.changelog.findmissing(common, heads=nodes)
        if not o:
            ui.status(_("no changes found\n"))
            return []
        return [str(repo.changelog.rev(r)) for r in o]

    def getpatches(revs):
        for r in scmutil.revrange(repo, revs):
            output = cStringIO.StringIO()
            cmdutil.export(repo, [r], fp=output, opts=patch.diffopts(ui, opts))
            yield output.getvalue().split('\n')

    def getbundle(dest):
        tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
        tmpfn = os.path.join(tmpdir, 'bundle')
        try:
            commands.bundle(ui, repo, tmpfn, dest, **opts)
            fp = open(tmpfn, 'rb')
            data = fp.read()
            fp.close()
            return data
        finally:
            try:
                os.unlink(tmpfn)
            except:
                pass
            os.rmdir(tmpdir)

    if not (opts.get('test') or mbox):
        # really sending
        mail.validateconfig(ui)

    if not (revs or rev or outgoing or bundle or patches):
        raise util.Abort(_('specify at least one changeset with -r or -o'))

    if outgoing and bundle:
        raise util.Abort(
            _("--outgoing mode always on with --bundle;"
              " do not re-specify --outgoing"))

    if outgoing or bundle:
        if len(revs) > 1:
            raise util.Abort(_("too many destinations"))
        dest = revs and revs[0] or None
        revs = []

    if rev:
        if revs:
            raise util.Abort(_('use only one form to specify the revision'))
        revs = rev

    if outgoing:
        revs = getoutgoing(dest, rev)
    if bundle:
        opts['revs'] = revs

    # start
    if date:
        start_time = util.parsedate(date)
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    def getdescription(body, sender):
        if opts.get('desc'):
            body = open(opts.get('desc')).read()
        else:
            ui.write(
                _('\nWrite the introductory message for the '
                  'patch series.\n\n'))
            body = ui.edit(body, sender)
            # Save series description in case sendmail fails
            msgfile = repo.opener('last-email.txt', 'wb')
            msgfile.write(body)
            msgfile.close()
        return body

    def getpatchmsgs(patches, patchnames=None):
        msgs = []

        ui.write(
            _('This patch series consists of %d patches.\n\n') % len(patches))

        # build the intro message, or skip it if the user declines
        if introwanted(opts, len(patches)):
            msg = makeintro(patches)
            if msg:
                msgs.append(msg)

        # are we going to send more than one message?
        numbered = len(msgs) + len(patches) > 1

        # now generate the actual patch messages
        name = None
        for i, p in enumerate(patches):
            if patchnames:
                name = patchnames[i]
            msg = makepatch(ui, repo, p, opts, _charsets, i + 1, len(patches),
                            numbered, name)
            msgs.append(msg)

        return msgs

    def makeintro(patches):
        tlen = len(str(len(patches)))

        flag = opts.get('flag') or ''
        if flag:
            flag = ' ' + ' '.join(flag)
        prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)

        subj = (opts.get('subject')
                or prompt(ui, 'Subject: ', rest=prefix, default=''))
        if not subj:
            return None  # skip intro if the user doesn't bother

        subj = prefix + ' ' + subj

        body = ''
        if opts.get('diffstat'):
            # generate a cumulative diffstat of the whole patch series
            diffstat = patch.diffstat(sum(patches, []))
            body = '\n' + diffstat
        else:
            diffstat = None

        body = getdescription(body, sender)
        msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
        msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
        return (msg, subj, diffstat)

    def getbundlemsgs(bundle):
        subj = (opts.get('subject')
                or prompt(ui, 'Subject:', 'A bundle for your repository'))

        body = getdescription('', sender)
        msg = email.MIMEMultipart.MIMEMultipart()
        if body:
            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
        datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
        datapart.set_payload(bundle)
        bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
        datapart.add_header('Content-Disposition',
                            'attachment',
                            filename=bundlename)
        email.Encoders.encode_base64(datapart)
        msg.attach(datapart)
        msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
        return [(msg, subj, None)]

    sender = (opts.get('from') or ui.config('email', 'from')
              or ui.config('patchbomb', 'from')
              or prompt(ui, 'From', ui.username()))

    if patches:
        msgs = getpatchmsgs(patches, opts.get('patchnames'))
    elif bundle:
        msgs = getbundlemsgs(getbundle(dest))
    else:
        msgs = getpatchmsgs(list(getpatches(revs)))

    showaddrs = []

    def getaddrs(header, ask=False, default=None):
        configkey = header.lower()
        opt = header.replace('-', '_').lower()
        addrs = opts.get(opt)
        if addrs:
            showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
            return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))

        # not on the command line: fallback to config and then maybe ask
        addr = (ui.config('email', configkey)
                or ui.config('patchbomb', configkey) or '')
        if not addr and ask:
            addr = prompt(ui, header, default=default)
        if addr:
            showaddrs.append('%s: %s' % (header, addr))
            return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
        else:
            return default

    to = getaddrs('To', ask=True)
    if not to:
        # we can get here in non-interactive mode
        raise util.Abort(_('no recipient addresses provided'))
    cc = getaddrs('Cc', ask=True, default='') or []
    bcc = getaddrs('Bcc') or []
    replyto = getaddrs('Reply-To')

    if opts.get('diffstat') or opts.get('confirm'):
        ui.write(_('\nFinal summary:\n\n'))
        ui.write('From: %s\n' % sender)
        for addr in showaddrs:
            ui.write('%s\n' % addr)
        for m, subj, ds in msgs:
            ui.write('Subject: %s\n' % subj)
            if ds:
                ui.write(ds)
        ui.write('\n')
        if ui.promptchoice(_('are you sure you want to send (yn)?'),
                           (_('&Yes'), _('&No'))):
            raise util.Abort(_('patchbomb canceled'))

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    first = True

    sender_addr = email.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    for i, (m, subj, ds) in enumerate(msgs):
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if first:
            parent = m['Message-Id']
            first = False

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc'] = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if replyto:
            m['Reply-To'] = ', '.join(replyto)
        if opts.get('test'):
            ui.status(_('Displaying '), subj, ' ...\n')
            ui.flush()
            if 'PAGER' in os.environ and not ui.plain():
                fp = util.popen(os.environ['PAGER'], 'w')
            else:
                fp = ui
            generator = email.Generator.Generator(fp, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                fp.write('\n')
            except IOError, inst:
                if inst.errno != errno.EPIPE:
                    raise
            if fp is not ui:
                fp.close()
        else:
            if not sendmail:
                sendmail = mail.connect(ui, mbox=mbox)
            ui.status(_('Sending '), subj, ' ...\n')
            ui.progress(_('sending'), i, item=subj, total=len(msgs))
            if not mbox:
                # Exim does not remove the Bcc field
                del m['Bcc']
            fp = cStringIO.StringIO()
            generator = email.Generator.Generator(fp, mangle_from_=False)
            generator.flatten(m, 0)
            sendmail(sender_addr, to + bcc + cc, fp.getvalue())
コード例 #42
0
ファイル: cvsps.py プロジェクト: codeskyblue/gobuild-1
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
    branchmap = {} # mapping between branch names and revision numbers
    state = 0
    store = False # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
コード例 #43
0
    def _parse(self, ui, path):
        "Prepare list of P4 filenames and revisions to import"
        p4changes = {}
        changeset = {}
        files_map = {}
        copies_map = {}
        localname = {}
        depotname = {}
        heads = []

        ui.status(_('reading p4 views\n'))

        # read client spec or view
        if "/" in path:
            p4changes.update(self._parse_view(path))
            if path.startswith("//") and path.endswith("/..."):
                views = {path[:-3]: ""}
            else:
                views = {"//": ""}
        else:
            cmd = 'p4 -G client -o %s' % util.shellquote(path)
            clientspec = marshal.load(util.popen(cmd, mode='rb'))

            views = {}
            for client in clientspec:
                if client.startswith("View"):
                    sview, cview = clientspec[client].split()
                    p4changes.update(self._parse_view(sview))
                    if sview.endswith("...") and cview.endswith("..."):
                        sview = sview[:-3]
                        cview = cview[:-3]
                    cview = cview[2:]
                    cview = cview[cview.find("/") + 1:]
                    views[sview] = cview

        # list of changes that affect our source files
        p4changes = p4changes.keys()
        p4changes.sort(key=int)

        # list with depot pathnames, longest first
        vieworder = views.keys()
        vieworder.sort(key=len, reverse=True)

        # handle revision limiting
        startrev = self.ui.config('convert', 'p4.startrev', default=0)

        # now read the full changelists to get the list of file revisions
        ui.status(_('collecting p4 changelists\n'))
        lastid = None
        for change in p4changes:
            if startrev and int(change) < int(startrev):
                continue
            if self.revs and int(change) > int(self.revs[0]):
                continue
            if change in self.revmap:
                # Ignore already present revisions, but set the parent pointer.
                lastid = change
                continue

            if lastid:
                parents = [lastid]
            else:
                parents = []

            d = self._fetch_revision(change)
            c = self._construct_commit(d, parents)

            descarr = c.desc.splitlines(True)
            if len(descarr) > 0:
                shortdesc = descarr[0].rstrip('\r\n')
            else:
                shortdesc = '**empty changelist description**'

            t = '%s %s' % (c.rev, repr(shortdesc)[1:-1])
            ui.status(util.ellipsis(t, 80) + '\n')

            files = []
            copies = {}
            copiedfiles = []
            i = 0
            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
                oldname = d["depotFile%d" % i]
                filename = None
                for v in vieworder:
                    if oldname.lower().startswith(v.lower()):
                        filename = decodefilename(views[v] + oldname[len(v):])
                        break
                if filename:
                    files.append((filename, d["rev%d" % i]))
                    depotname[filename] = oldname
                    if (d.get("action%d" % i) == "move/add"):
                        copiedfiles.append(filename)
                    localname[oldname] = filename
                i += 1

            # Collect information about copied files
            for filename in copiedfiles:
                oldname = depotname[filename]

                flcmd = 'p4 -G filelog %s' \
                      % util.shellquote(oldname)
                flstdout = util.popen(flcmd, mode='rb')

                copiedfilename = None
                for d in loaditer(flstdout):
                    copiedoldname = None

                    i = 0
                    while ("change%d" % i) in d:
                        if (d["change%d" % i] == change
                                and d["action%d" % i] == "move/add"):
                            j = 0
                            while ("file%d,%d" % (i, j)) in d:
                                if d["how%d,%d" % (i, j)] == "moved from":
                                    copiedoldname = d["file%d,%d" % (i, j)]
                                    break
                                j += 1
                        i += 1

                    if copiedoldname and copiedoldname in localname:
                        copiedfilename = localname[copiedoldname]
                        break

                if copiedfilename:
                    copies[filename] = copiedfilename
                else:
                    ui.warn(
                        _("cannot find source for copied file: %s@%s\n") %
                        (filename, change))

            changeset[change] = c
            files_map[change] = files
            copies_map[change] = copies
            lastid = change

        if lastid and len(changeset) > 0:
            heads = [lastid]

        return {
            'changeset': changeset,
            'files': files_map,
            'copies': copies_map,
            'heads': heads,
            'depotname': depotname,
        }
コード例 #44
0
    def _parse(self, ui, path):
        "Prepare list of P4 filenames and revisions to import"
        ui.status(_('reading p4 views\n'))

        # read client spec or view
        if "/" in path:
            self._parse_view(path)
            if path.startswith("//") and path.endswith("/..."):
                views = {path[:-3]:""}
            else:
                views = {"//": ""}
        else:
            cmd = 'p4 -G client -o "%s"' % path
            clientspec = marshal.load(util.popen(cmd))

            views = {}
            for client in clientspec:
                if client.startswith("View"):
                    sview, cview = clientspec[client].split()
                    self._parse_view(sview)
                    if sview.endswith("...") and cview.endswith("..."):
                        sview = sview[:-3]
                        cview = cview[:-3]
                    cview = cview[2:]
                    cview = cview[cview.find("/") + 1:]
                    views[sview] = cview

        # list of changes that affect our source files
        self.p4changes = self.p4changes.keys()
        self.p4changes.sort(key=int)

        # list with depot pathnames, longest first
        vieworder = views.keys()
        vieworder.sort(key=len, reverse=True)

        # handle revision limiting
        startrev = self.ui.config('convert', 'p4.startrev', default=0)
        self.p4changes = [x for x in self.p4changes
                          if ((not startrev or int(x) >= int(startrev)) and
                              (not self.rev or int(x) <= int(self.rev)))]

        # now read the full changelists to get the list of file revisions
        ui.status(_('collecting p4 changelists\n'))
        lastid = None
        for change in self.p4changes:
            cmd = "p4 -G describe %s" % change
            stdout = util.popen(cmd)
            d = marshal.load(stdout)

            desc = self.recode(d["desc"])
            shortdesc = desc.split("\n", 1)[0]
            t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
            ui.status(util.ellipsis(t, 80) + '\n')

            if lastid:
                parents = [lastid]
            else:
                parents = []

            date = (int(d["time"]), 0)     # timezone not set
            c = commit(author=self.recode(d["user"]), date=util.datestr(date),
                        parents=parents, desc=desc, branch='', extra={"p4": change})

            files = []
            i = 0
            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
                oldname = d["depotFile%d" % i]
                filename = None
                for v in vieworder:
                    if oldname.startswith(v):
                        filename = views[v] + oldname[len(v):]
                        break
                if filename:
                    files.append((filename, d["rev%d" % i]))
                    self.depotname[filename] = oldname
                i += 1
            self.changeset[change] = c
            self.files[change] = files
            lastid = change

        if lastid:
            self.heads = [lastid]
コード例 #45
0
 def gitcmd(self, s):
     return util.popen('GIT_DIR=%s %s' % (self.path, s))
コード例 #46
0
ファイル: cvsps.py プロジェクト: yangdy-buji/idea-community
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
    branchmap = {} # mapping between branch names and revision numbers
    state = 0
    store = False # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
コード例 #47
0
def patchbomb(ui, repo, *revs, **opts):
    '''send changesets by email

    By default, diffs are sent in the format generated by hg export,
    one per message. The series starts with a "[PATCH 0 of N]"
    introduction, which describes the series as a whole.

    Each patch email has a Subject line of "[PATCH M of N] ...", using
    the first line of the changeset description as the subject text.
    The message contains two or three parts. First, the changeset
    description. Next, (optionally) if the diffstat program is
    installed and -d/--diffstat is used, the result of running
    diffstat on the patch. Finally, the patch itself, as generated by
    "hg export".

    By default the patch is included as text in the email body for
    easy reviewing. Using the -a/--attach option will instead create
    an attachment for the patch. With -i/--inline an inline attachment
    will be created.

    With -o/--outgoing, emails will be generated for patches not found
    in the destination repository (or only those which are ancestors
    of the specified revisions if any are provided)

    With -b/--bundle, changesets are selected as for --outgoing, but a
    single email containing a binary Mercurial bundle as an attachment
    will be sent.

    Examples::

      hg email -r 3000          # send patch 3000 only
      hg email -r 3000 -r 3001  # send patches 3000 and 3001
      hg email -r 3000:3005     # send patches 3000 through 3005
      hg email 3000             # send patch 3000 (deprecated)

      hg email -o               # send all patches not in default
      hg email -o DEST          # send all patches not in DEST
      hg email -o -r 3000       # send all ancestors of 3000 not in default
      hg email -o -r 3000 DEST  # send all ancestors of 3000 not in DEST

      hg email -b               # send bundle of all patches not in default
      hg email -b DEST          # send bundle of all patches not in DEST
      hg email -b -r 3000       # bundle of all ancestors of 3000 not in default
      hg email -b -r 3000 DEST  # bundle of all ancestors of 3000 not in DEST

    Before using this command, you will need to enable email in your
    hgrc. See the [email] section in hgrc(5) for details.
    '''

    _charsets = mail._charsets(ui)

    def outgoing(dest, revs):
        '''Return the revisions present locally but not in dest'''
        dest = ui.expandpath(dest or 'default-push', dest or 'default')
        dest, branches = hg.parseurl(dest)
        revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
        if revs:
            revs = [repo.lookup(rev) for rev in revs]
        other = hg.repository(cmdutil.remoteui(repo, opts), dest)
        ui.status(_('comparing with %s\n') % dest)
        o = repo.findoutgoing(other)
        if not o:
            ui.status(_("no changes found\n"))
            return []
        o = repo.changelog.nodesbetween(o, revs)[0]
        return [str(repo.changelog.rev(r)) for r in o]

    def getpatches(revs):
        for r in cmdutil.revrange(repo, revs):
            output = cStringIO.StringIO()
            patch.export(repo, [r], fp=output, opts=patch.diffopts(ui, opts))
            yield output.getvalue().split('\n')

    def getbundle(dest):
        tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
        tmpfn = os.path.join(tmpdir, 'bundle')
        try:
            commands.bundle(ui, repo, tmpfn, dest, **opts)
            return open(tmpfn, 'rb').read()
        finally:
            try:
                os.unlink(tmpfn)
            except:
                pass
            os.rmdir(tmpdir)

    if not (opts.get('test') or opts.get('mbox')):
        # really sending
        mail.validateconfig(ui)

    if not (revs or opts.get('rev') or opts.get('outgoing')
            or opts.get('bundle') or opts.get('patches')):
        raise util.Abort(_('specify at least one changeset with -r or -o'))

    if opts.get('outgoing') and opts.get('bundle'):
        raise util.Abort(
            _("--outgoing mode always on with --bundle;"
              " do not re-specify --outgoing"))

    if opts.get('outgoing') or opts.get('bundle'):
        if len(revs) > 1:
            raise util.Abort(_("too many destinations"))
        dest = revs and revs[0] or None
        revs = []

    if opts.get('rev'):
        if revs:
            raise util.Abort(_('use only one form to specify the revision'))
        revs = opts.get('rev')

    if opts.get('outgoing'):
        revs = outgoing(dest, opts.get('rev'))
    if opts.get('bundle'):
        opts['revs'] = revs

    # start
    if opts.get('date'):
        start_time = util.parsedate(opts.get('date'))
    else:
        start_time = util.makedate()

    def genmsgid(id):
        return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())

    def getdescription(body, sender):
        if opts.get('desc'):
            body = open(opts.get('desc')).read()
        else:
            ui.write(
                _('\nWrite the introductory message for the '
                  'patch series.\n\n'))
            body = ui.edit(body, sender)
        return body

    def getpatchmsgs(patches, patchnames=None):
        jumbo = []
        msgs = []

        ui.write(
            _('This patch series consists of %d patches.\n\n') % len(patches))

        name = None
        for i, p in enumerate(patches):
            jumbo.extend(p)
            if patchnames:
                name = patchnames[i]
            msg = makepatch(ui, repo, p, opts, _charsets, i + 1, len(patches),
                            name)
            msgs.append(msg)

        if len(patches) > 1 or opts.get('intro'):
            tlen = len(str(len(patches)))

            flag = ' '.join(opts.get('flag'))
            if flag:
                subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag)
            else:
                subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches))
            subj += ' ' + (opts.get('subject')
                           or prompt(ui, 'Subject: ', rest=subj))

            body = ''
            if opts.get('diffstat'):
                d = cdiffstat(ui, _('Final summary:\n'), jumbo)
                if d:
                    body = '\n' + d

            body = getdescription(body, sender)
            msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
            msg['Subject'] = mail.headencode(ui, subj, _charsets,
                                             opts.get('test'))

            msgs.insert(0, (msg, subj))
        return msgs

    def getbundlemsgs(bundle):
        subj = (opts.get('subject')
                or prompt(ui, 'Subject:', 'A bundle for your repository'))

        body = getdescription('', sender)
        msg = email.MIMEMultipart.MIMEMultipart()
        if body:
            msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
        datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
        datapart.set_payload(bundle)
        bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
        datapart.add_header('Content-Disposition',
                            'attachment',
                            filename=bundlename)
        email.Encoders.encode_base64(datapart)
        msg.attach(datapart)
        msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
        return [(msg, subj)]

    sender = (opts.get('from') or ui.config('email', 'from')
              or ui.config('patchbomb', 'from')
              or prompt(ui, 'From', ui.username()))

    # internal option used by pbranches
    patches = opts.get('patches')
    if patches:
        msgs = getpatchmsgs(patches, opts.get('patchnames'))
    elif opts.get('bundle'):
        msgs = getbundlemsgs(getbundle(dest))
    else:
        msgs = getpatchmsgs(list(getpatches(revs)))

    def getaddrs(opt, prpt=None, default=None):
        if opts.get(opt):
            return mail.addrlistencode(ui, opts.get(opt), _charsets,
                                       opts.get('test'))

        addrs = (ui.config('email', opt) or ui.config('patchbomb', opt) or '')
        if not addrs and prpt:
            addrs = prompt(ui, prpt, default)

        return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test'))

    to = getaddrs('to', 'To')
    cc = getaddrs('cc', 'Cc', '')
    bcc = getaddrs('bcc')

    ui.write('\n')

    parent = opts.get('in_reply_to') or None
    # angle brackets may be omitted, they're not semantically part of the msg-id
    if parent is not None:
        if not parent.startswith('<'):
            parent = '<' + parent
        if not parent.endswith('>'):
            parent += '>'

    first = True

    sender_addr = email.Utils.parseaddr(sender)[1]
    sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
    sendmail = None
    for m, subj in msgs:
        try:
            m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
        except TypeError:
            m['Message-Id'] = genmsgid('patchbomb')
        if parent:
            m['In-Reply-To'] = parent
            m['References'] = parent
        if first:
            parent = m['Message-Id']
            first = False

        m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
        m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)

        start_time = (start_time[0] + 1, start_time[1])
        m['From'] = sender
        m['To'] = ', '.join(to)
        if cc:
            m['Cc'] = ', '.join(cc)
        if bcc:
            m['Bcc'] = ', '.join(bcc)
        if opts.get('test'):
            ui.status(_('Displaying '), subj, ' ...\n')
            ui.flush()
            if 'PAGER' in os.environ:
                fp = util.popen(os.environ['PAGER'], 'w')
            else:
                fp = ui
            generator = email.Generator.Generator(fp, mangle_from_=False)
            try:
                generator.flatten(m, 0)
                fp.write('\n')
            except IOError, inst:
                if inst.errno != errno.EPIPE:
                    raise
            if fp is not ui:
                fp.close()
        elif opts.get('mbox'):
            ui.status(_('Writing '), subj, ' ...\n')
            fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
            generator = email.Generator.Generator(fp, mangle_from_=True)
            # Should be time.asctime(), but Windows prints 2-characters day
            # of month instead of one. Make them print the same thing.
            date = time.strftime('%a %b %d %H:%M:%S %Y',
                                 time.localtime(start_time[0]))
            fp.write('From %s %s\n' % (sender_addr, date))
            generator.flatten(m, 0)
            fp.write('\n\n')
            fp.close()
コード例 #48
0
 def gitopen(self, s, noerr=False):
     if noerr:
         (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
         return so
     else:
         return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
コード例 #49
0
    def getfile(self, name, rev):
        cmd = 'p4 -G print %s' \
            % util.shellquote("%s#%s" % (self.depotname[name], rev))

        lasterror = None
        while True:
            stdout = util.popen(cmd, mode='rb')

            mode = None
            contents = []
            keywords = None

            for d in loaditer(stdout):
                code = d["code"]
                data = d.get("data")

                if code == "error":
                    # if this is the first time error happened
                    # re-attempt getting the file
                    if not lasterror:
                        lasterror = IOError(d["generic"], data)
                        # this will exit inner-most for-loop
                        break
                    else:
                        raise lasterror

                elif code == "stat":
                    action = d.get("action")
                    if action in ["purge", "delete", "move/delete"]:
                        return None, None
                    p4type = self.re_type.match(d["type"])
                    if p4type:
                        mode = ""
                        flags = ((p4type.group(1) or "") +
                                 (p4type.group(3) or ""))
                        if "x" in flags:
                            mode = "x"
                        if p4type.group(2) == "symlink":
                            mode = "l"
                        if "ko" in flags:
                            keywords = self.re_keywords_old
                        elif "k" in flags:
                            keywords = self.re_keywords

                elif code == "text" or code == "binary":
                    contents.append(data)

                lasterror = None

            if not lasterror:
                break

        if mode is None:
            return None, None

        contents = ''.join(contents)

        if keywords:
            contents = keywords.sub("$\\1$", contents)
        if mode == "l" and contents.endswith("\n"):
            contents = contents[:-1]

        return contents, mode
コード例 #50
0
    def _parse(self):
        if self.changeset is not None:
            return
        self.changeset = {}

        maxrev = 0
        cmd = self.cmd
        if self.rev:
            # TODO: handle tags
            try:
                # patchset number?
                maxrev = int(self.rev)
            except ValueError:
                try:
                    # date
                    util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
                    cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd,
                                                                   self.rev)
                except util.Abort:
                    raise util.Abort(
                        _('revision %s is not a patchset number or date') %
                        self.rev)

        d = os.getcwd()
        try:
            os.chdir(self.path)
            id = None
            state = 0
            filerevids = {}

            if self.builtin:
                # builtin cvsps code
                self.ui.status(_('using builtin cvsps\n'))

                cache = 'update'
                if not self.ui.configbool('convert', 'cvsps.cache', True):
                    cache = None
                db = cvsps.createlog(self.ui, cache=cache)
                db = cvsps.createchangeset(
                    self.ui,
                    db,
                    fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
                    mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
                    mergefrom=self.ui.config('convert', 'cvsps.mergefrom',
                                             None))

                for cs in db:
                    if maxrev and cs.id > maxrev:
                        break
                    id = str(cs.id)
                    cs.author = self.recode(cs.author)
                    self.lastbranch[cs.branch] = id
                    cs.comment = self.recode(cs.comment)
                    date = util.datestr(cs.date)
                    self.tags.update(dict.fromkeys(cs.tags, id))

                    files = {}
                    for f in cs.entries:
                        files[f.file] = "%s%s" % ('.'.join(
                            [str(x)
                             for x in f.revision]), ['', '(DEAD)'][f.dead])

                    # add current commit to set
                    c = commit(author=cs.author,
                               date=date,
                               parents=[str(p.id) for p in cs.parents],
                               desc=cs.comment,
                               branch=cs.branch or '')
                    self.changeset[id] = c
                    self.files[id] = files
            else:
                # external cvsps
                for l in util.popen(cmd):
                    if state == 0:  # header
                        if l.startswith("PatchSet"):
                            id = l[9:-2]
                            if maxrev and int(id) > maxrev:
                                # ignore everything
                                state = 3
                        elif l.startswith("Date:"):
                            date = util.parsedate(l[6:-1],
                                                  ["%Y/%m/%d %H:%M:%S"])
                            date = util.datestr(date)
                        elif l.startswith("Branch:"):
                            branch = l[8:-1]
                            self.parent[id] = self.lastbranch.get(
                                branch, 'bad')
                            self.lastbranch[branch] = id
                        elif l.startswith("Ancestor branch:"):
                            ancestor = l[17:-1]
                            # figure out the parent later
                            self.parent[id] = self.lastbranch[ancestor]
                        elif l.startswith("Author:"):
                            author = self.recode(l[8:-1])
                        elif l.startswith("Tag:") or l.startswith("Tags:"):
                            t = l[l.index(':') + 1:]
                            t = [ut.strip() for ut in t.split(',')]
                            if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
                                self.tags.update(dict.fromkeys(t, id))
                        elif l.startswith("Log:"):
                            # switch to gathering log
                            state = 1
                            log = ""
                    elif state == 1:  # log
                        if l == "Members: \n":
                            # switch to gathering members
                            files = {}
                            oldrevs = []
                            log = self.recode(log[:-1])
                            state = 2
                        else:
                            # gather log
                            log += l
                    elif state == 2:  # members
                        if l == "\n":  # start of next entry
                            state = 0
                            p = [self.parent[id]]
                            if id == "1":
                                p = []
                            if branch == "HEAD":
                                branch = ""
                            if branch:
                                latest = 0
                                # the last changeset that contains a base
                                # file is our parent
                                for r in oldrevs:
                                    latest = max(filerevids.get(r, 0), latest)
                                if latest:
                                    p = [latest]

                            # add current commit to set
                            c = commit(author=author,
                                       date=date,
                                       parents=p,
                                       desc=log,
                                       branch=branch)
                            self.changeset[id] = c
                            self.files[id] = files
                        else:
                            colon = l.rfind(':')
                            file = l[1:colon]
                            rev = l[colon + 1:-2]
                            oldrev, rev = rev.split("->")
                            files[file] = rev

                            # save some information for identifying branch points
                            oldrevs.append("%s:%s" % (oldrev, file))
                            filerevids["%s:%s" % (rev, file)] = id
                    elif state == 3:
                        # swallow all input
                        continue

            self.heads = self.lastbranch.values()
        finally:
            os.chdir(d)
コード例 #51
0
 def _fetch_revision(self, rev):
     """Return an output of `p4 describe` including author, commit date as
     a dictionary."""
     cmd = "p4 -G describe -s %s" % rev
     stdout = util.popen(cmd, mode='rb')
     return marshal.load(stdout)
コード例 #52
0
ファイル: rage.py プロジェクト: davidshepherd7/dotfiles
def rage(ui, repo, *pats, **opts):
    """collect useful diagnostics for asking help from the source control team

    The rage command collects useful diagnostic information.

    By default, the information will be uploaded to Phabricator and
    instructions about how to ask for help will be printed.
    """
    srcrepo = shareutil.getsrcrepo(repo)

    def format(pair, basic=True):
        if basic:
            fmt = "%s: %s\n"
        else:
            fmt =  "%s:\n---------------------------\n%s\n"
        return fmt % pair

    def hgcmd(func, *args, **opts):
        _repo = repo
        if '_repo' in opts:
            _repo = opts['_repo']
            del opts['_repo']
        ui.pushbuffer(error=True)
        try:
            func(ui, _repo, *args, **opts)
        finally:
            return ui.popbuffer()

    def hgsrcrepofile(filename):
        if srcrepo.vfs.exists(filename):
            return srcrepo.vfs(filename).read()
        else:
            return "File not found: %s" % srcrepo.vfs.join(filename)

    if opts.get('oncall') and opts.get('preview'):
        raise error.Abort('--preview and --oncall cannot be used together')

    basic = [
        ('date', time.ctime()),
        ('unixname', encoding.environ.get('LOGNAME')),
        ('hostname', socket.gethostname()),
        ('repo location', _failsafe(lambda: repo.root)),
        ('active bookmark',
            _failsafe(lambda: bookmarks._readactive(repo, repo._bookmarks))),
        ('hg version', _failsafe(
            lambda: __import__('mercurial.__version__').__version__.version)),
        ('obsstore size', _failsafe(
            lambda: str(repo.vfs.stat('store/obsstore').st_size))),
    ]

    ui._colormode = None

    detailed = [
        ('df -h', _failsafe(lambda: shcmd('df -h', check=False))),
        # smartlog as the user sees it
        ('hg sl (filtered)', _failsafe(lambda: hgcmd(
            smartlog.smartlog, template='{hsl}'))),
        # unfiltered smartlog for recent hidden changesets, including full
        # node identity
        ('hg sl (unfiltered)', _failsafe(lambda: hgcmd(
            smartlog.smartlog, _repo=repo.unfiltered(),
            template='{node}\n{hsl}'))),
        ('first 20 lines of "hg status"',
            _failsafe(lambda:
                '\n'.join(hgcmd(commands.status).splitlines()[:20]))),
        ('hg blackbox -l60',
            _failsafe(lambda: hgcmd(blackbox.blackbox, limit=60))),
        ('hg summary', _failsafe(lambda: hgcmd(commands.summary))),
        ('hg config (local)', _failsafe(lambda: '\n'.join(localconfig(ui)))),
        ('hg sparse',
            _failsafe(
                lambda: hgcmd(
                    sparse.sparse, include=False, exclude=False, delete=False,
                    force=False, enable_profile=False, disable_profile=False,
                    refresh=False, reset=False, import_rules=False,
                    clear_rules=False))),
        ('usechg', _failsafe(usechginfo)),
        ('rpm info', _failsafe(partial(rpminfo, ui))),
        ('klist', _failsafe(lambda: shcmd('klist', check=False))),
        ('ifconfig', _failsafe(lambda: shcmd('ifconfig'))),
        ('airport', _failsafe(
            lambda: shcmd('/System/Library/PrivateFrameworks/Apple80211.' +
                          'framework/Versions/Current/Resources/airport ' +
                          '--getinfo', check=False))),
        ('hg debugobsolete <smartlog>',
            _failsafe(lambda: obsoleteinfo(repo, hgcmd))),
        ('infinitepush backup state',
            _failsafe(lambda: hgsrcrepofile('infinitepushbackupstate'))),
        ('infinitepush backup logs',
            _failsafe(lambda: infinitepushbackuplogs(ui, repo))),
        ('hg config (all)', _failsafe(lambda: hgcmd(commands.config))),
    ]

    if util.safehasattr(repo, 'name'):
        # Add the contents of both local and shared pack directories.
        packlocs = {
            'local': lambda category: shallowutil.getlocalpackpath(
                repo.svfs.vfs.base, category),
            'shared': lambda category: shallowutil.getcachepackpath(repo,
                category),
        }

        for loc, getpath in packlocs.iteritems():
            for category in constants.ALL_CATEGORIES:
                path = getpath(category)
                detailed.append((
                    "%s packs (%s)" % (loc, constants.getunits(category)),
                    "%s:\n%s" %
                    (path, _failsafe(lambda: shcmd("ls -lhS %s" % path)))
                ))

    # This is quite slow, so we don't want to do it by default
    if ui.configbool("rage", "fastmanifestcached", False):
        detailed.append(
            ('hg sl -r "fastmanifestcached()"',
                _failsafe(lambda: hgcmd(smartlog.smartlog,
                          rev=["fastmanifestcached()"]))),
        )

    msg = '\n'.join(map(format, basic)) + '\n' +\
          '\n'.join(map(lambda x: format(x, False), detailed))
    if _failsafeerrors:
        msg += '\n' + '\n'.join(_failsafeerrors)

    if opts.get('preview'):
        ui.write('%s\n' % msg)
        return

    fp = util.popen('arc paste --lang hgrage --title hgrage', 'w')
    fp.write(msg)
    ret = fp.close()
    if ret:
        ui.warn(_('No paste was created.\n'))
        fd, tmpname = tempfile.mkstemp(prefix='hg-rage-')
        with os.fdopen(fd, r'w') as tmpfp:
            tmpfp.write(msg)
            ui.warn(_('Saved contents to %s\n') % tmpname)
    else:
        if opts.get('oncall'):
            createtask(ui, repo, msg)
        else:
            ui.write(_('Please post your problem and the above link at'
                       ' %s for help.\n')
                     % (ui.config('ui', 'supportcontact'),))
コード例 #53
0
def createlog(ui, directory=None, root="", rlog=True, cache=None):
    '''Collect the CVS rlog'''

    # Because we store many duplicate commit log messages, reusing strings
    # saves a lot of memory and pickle storage space.
    _scache = {}

    def scache(s):
        "return a shared version of a string"
        return _scache.setdefault(s, s)

    ui.status(_('collecting CVS rlog\n'))

    log = []  # list of logentry objects containing the CVS state

    # patterns to match in CVS (r)log output, by state of use
    re_00 = re.compile('RCS file: (.+)$')
    re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
    re_02 = re.compile('cvs (r?log|server): (.+)\n$')
    re_03 = re.compile("(Cannot access.+CVSROOT)|"
                       "(can't create temporary directory.+)$")
    re_10 = re.compile('Working file: (.+)$')
    re_20 = re.compile('symbolic names:')
    re_30 = re.compile('\t(.+): ([\\d.]+)$')
    re_31 = re.compile('----------------------------$')
    re_32 = re.compile('======================================='
                       '======================================$')
    re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
    re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
                       r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
                       r'(\s+commitid:\s+([^;]+);)?'
                       r'(.*mergepoint:\s+([^;]+);)?')
    re_70 = re.compile('branches: (.+);$')

    file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')

    prefix = ''  # leading path to strip of what we get from CVS

    if directory is None:
        # Current working directory

        # Get the real directory in the repository
        try:
            prefix = open(os.path.join('CVS', 'Repository')).read().strip()
            directory = prefix
            if prefix == ".":
                prefix = ""
        except IOError:
            raise logerror(_('not a CVS sandbox'))

        if prefix and not prefix.endswith(os.sep):
            prefix += os.sep

        # Use the Root file in the sandbox, if it exists
        try:
            root = open(os.path.join('CVS', 'Root')).read().strip()
        except IOError:
            pass

    if not root:
        root = os.environ.get('CVSROOT', '')

    # read log cache if one exists
    oldlog = []
    date = None

    if cache:
        cachedir = os.path.expanduser('~/.hg.cvsps')
        if not os.path.exists(cachedir):
            os.mkdir(cachedir)

        # The cvsps cache pickle needs a uniquified name, based on the
        # repository location. The address may have all sort of nasties
        # in it, slashes, colons and such. So here we take just the
        # alphanumeric characters, concatenated in a way that does not
        # mix up the various components, so that
        #    :pserver:user@server:/path
        # and
        #    /pserver/user/server/path
        # are mapped to different cache file names.
        cachefile = root.split(":") + [directory, "cache"]
        cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
        cachefile = os.path.join(cachedir,
                                 '.'.join([s for s in cachefile if s]))

    if cache == 'update':
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile))
            for e in oldlog:
                if not (util.safehasattr(e, 'branchpoints')
                        and util.safehasattr(e, 'commitid')
                        and util.safehasattr(e, 'mergepoint')):
                    ui.status(_('ignoring old cache\n'))
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception as e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date  # last commit date as a (time,tz) tuple
            date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}  # dictionary of revisions on current file with their tags
    branchmap = {}  # mapping between branch names and revision numbers
    rcsmap = {}
    state = 0
    store = False  # set when a new record can be appended

    cmd = [util.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = util.popen(' '.join(cmd))
    peek = pfp.readline()
    while True:
        line = peek
        if line == '':
            break
        peek = pfp.readline()
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
                tags = {}
                if rlog:
                    filename = util.normpath(rcs[:-2])
                    if filename.startswith(prefix):
                        filename = filename[len(prefix):]
                    if filename.startswith('/'):
                        filename = filename[1:]
                    if filename.startswith('Attic/'):
                        filename = filename[6:]
                    else:
                        filename = filename.replace('/Attic/', '/')
                    state = 2
                    continue
                state = 1
                continue
            match = re_01.match(line)
            if match:
                raise logerror(match.group(1))
            match = re_02.match(line)
            if match:
                raise logerror(match.group(2))
            if re_03.match(line):
                raise logerror(line)

        elif state == 1:
            # expect 'Working file' (only when using log instead of rlog)
            match = re_10.match(line)
            assert match, _('RCS file must be followed by working file')
            filename = util.normpath(match.group(1))
            state = 2

        elif state == 2:
            # expect 'symbolic names'
            if re_20.match(line):
                branchmap = {}
                state = 3

        elif state == 3:
            # read the symbolic names and store as tags
            match = re_30.match(line)
            if match:
                rev = [int(x) for x in match.group(2).split('.')]

                # Convert magic branch number to an odd-numbered one
                revn = len(rev)
                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
                    rev = rev[:-2] + rev[-1:]
                rev = tuple(rev)

                if rev not in tags:
                    tags[rev] = []
                tags[rev].append(match.group(1))
                branchmap[match.group(1)] = match.group(2)

            elif re_31.match(line):
                state = 5
            elif re_32.match(line):
                state = 0

        elif state == 4:
            # expecting '------' separator before first revision
            if re_31.match(line):
                state = 5
            else:
                assert not re_32.match(line), _('must have at least '
                                                'some revisions')

        elif state == 5:
            # expecting revision number and possibly (ignored) lock indication
            # we create the logentry here from values stored in states 0 to 4,
            # as this state is re-entered for subsequent revisions of a file.
            match = re_50.match(line)
            assert match, _('expected revision number')
            e = logentry(rcs=scache(rcs),
                         file=scache(filename),
                         revision=tuple(
                             [int(x) for x in match.group(1).split('.')]),
                         branches=[],
                         parent=None,
                         commitid=None,
                         mergepoint=None,
                         branchpoints=set())

            state = 6

        elif state == 6:
            # expecting date, author, state, lines changed
            match = re_60.match(line)
            assert match, _('revision must be followed by date line')
            d = match.group(1)
            if d[2] == '/':
                # Y2K
                d = '19' + d

            if len(d.split()) != 3:
                # cvs log dates always in GMT
                d = d + ' UTC'
            e.date = util.parsedate(d, [
                '%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'
            ])
            e.author = scache(match.group(2))
            e.dead = match.group(3).lower() == 'dead'

            if match.group(5):
                if match.group(6):
                    e.lines = (int(match.group(5)), int(match.group(6)))
                else:
                    e.lines = (int(match.group(5)), 0)
            elif match.group(6):
                e.lines = (0, int(match.group(6)))
            else:
                e.lines = None

            if match.group(7):  # cvs 1.12 commitid
                e.commitid = match.group(8)

            if match.group(9):  # cvsnt mergepoint
                myrev = match.group(10).split('.')
                if len(myrev) == 2:  # head
                    e.mergepoint = 'HEAD'
                else:
                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                    branches = [b for b in branchmap if branchmap[b] == myrev]
                    assert len(branches) == 1, ('unknown branch: %s' %
                                                e.mergepoint)
                    e.mergepoint = branches[0]

            e.comment = []
            state = 7

        elif state == 7:
            # read the revision numbers of branches that start at this revision
            # or store the commit log message otherwise
            m = re_70.match(line)
            if m:
                e.branches = [
                    tuple([int(y) for y in x.strip().split('.')])
                    for x in m.group(1).split(';')
                ]
                state = 8
            elif re_31.match(line) and re_50.match(peek):
                state = 5
                store = True
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        elif state == 8:
            # store commit log message
            if re_31.match(line):
                cpeek = peek
                if cpeek.endswith('\n'):
                    cpeek = cpeek[:-1]
                if re_50.match(cpeek):
                    state = 5
                    store = True
                else:
                    e.comment.append(line)
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        # When a file is added on a branch B1, CVS creates a synthetic
        # dead trunk revision 1.1 so that the branch has a root.
        # Likewise, if you merge such a file to a later branch B2 (one
        # that already existed when the file was added on B1), CVS
        # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
        # these revisions now, but mark them synthetic so
        # createchangeset() can take care of them.
        if (store and e.dead and e.revision[-1] == 1 and  # 1.1 or 1.1.x.1
                len(e.comment) == 1 and file_added_re.match(e.comment[0])):
            ui.debug('found synthetic revision in %s: %r\n' %
                     (e.rcs, e.comment[0]))
            e.synthetic = True

        if store:
            # clean up the results and save in the log.
            store = False
            e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
            e.comment = scache('\n'.join(e.comment))

            revn = len(e.revision)
            if revn > 3 and (revn % 2) == 0:
                e.branch = tags.get(e.revision[:-1], [None])[0]
            else:
                e.branch = None

            # find the branches starting from this revision
            branchpoints = set()
            for branch, revision in branchmap.iteritems():
                revparts = tuple([int(i) for i in revision.split('.')])
                if len(revparts) < 2:  # bad tags
                    continue
                if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                    # normal branch
                    if revparts[:-2] == e.revision:
                        branchpoints.add(branch)
                elif revparts == (1, 1, 1):  # vendor branch
                    if revparts in e.branches:
                        branchpoints.add(branch)
            e.branchpoints = branchpoints

            log.append(e)

            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs

            if len(log) % 100 == 0:
                ui.status(
                    util.ellipsis('%d %s' % (len(log), e.file), 80) + '\n')

    log.sort(key=lambda x: (x.rcs, x.revision))

    # find parent revisions of individual files
    versions = {}
    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
        rcs = e.rcs.replace('/Attic/', '/')
        if rcs in rcsmap:
            e.rcs = rcsmap[rcs]
        branch = e.revision[:-1]
        versions[(e.rcs, branch)] = e.revision

    for e in log:
        branch = e.revision[:-1]
        p = versions.get((e.rcs, branch), None)
        if p is None:
            p = e.revision[:-2]
        e.parent = p
        versions[(e.rcs, branch)] = e.revision

    # update the log cache
    if cache:
        if log:
            # join up the old and new logs
            log.sort(key=lambda x: x.date)

            if oldlog and oldlog[-1].date >= log[0].date:
                raise logerror(
                    _('log cache overlaps with new log entries,'
                      ' re-run without cache.'))

            log = oldlog + log

            # write the new cachefile
            ui.note(_('writing cvs log cache %s\n') % cachefile)
            pickle.dump(log, open(cachefile, 'w'))
        else:
            log = oldlog

    ui.status(_('%d log entries\n') % len(log))

    hook.hook(ui, None, "cvslog", True, log=log)

    return log
コード例 #54
0
ファイル: cvs.py プロジェクト: Techlord-RCE/cygwin
    def _parse(self):
        if self.changeset:
            return

        maxrev = 0
        cmd = self.cmd
        if self.rev:
            # TODO: handle tags
            try:
                # patchset number?
                maxrev = int(self.rev)
            except ValueError:
                try:
                    # date
                    util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
                    cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd,
                                                                   self.rev)
                except util.Abort:
                    raise util.Abort(
                        'revision %s is not a patchset number or date' %
                        self.rev)

        d = os.getcwd()
        try:
            os.chdir(self.path)
            id = None
            state = 0
            filerevids = {}
            for l in util.popen(cmd):
                if state == 0:  # header
                    if l.startswith("PatchSet"):
                        id = l[9:-2]
                        if maxrev and int(id) > maxrev:
                            # ignore everything
                            state = 3
                    elif l.startswith("Date"):
                        date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
                        date = util.datestr(date)
                    elif l.startswith("Branch"):
                        branch = l[8:-1]
                        self.parent[id] = self.lastbranch.get(branch, 'bad')
                        self.lastbranch[branch] = id
                    elif l.startswith("Ancestor branch"):
                        ancestor = l[17:-1]
                        # figure out the parent later
                        self.parent[id] = self.lastbranch[ancestor]
                    elif l.startswith("Author"):
                        author = self.recode(l[8:-1])
                    elif l.startswith("Tag:") or l.startswith("Tags:"):
                        t = l[l.index(':') + 1:]
                        t = [ut.strip() for ut in t.split(',')]
                        if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
                            self.tags.update(dict.fromkeys(t, id))
                    elif l.startswith("Log:"):
                        # switch to gathering log
                        state = 1
                        log = ""
                elif state == 1:  # log
                    if l == "Members: \n":
                        # switch to gathering members
                        files = {}
                        oldrevs = []
                        log = self.recode(log[:-1])
                        state = 2
                    else:
                        # gather log
                        log += l
                elif state == 2:  # members
                    if l == "\n":  # start of next entry
                        state = 0
                        p = [self.parent[id]]
                        if id == "1":
                            p = []
                        if branch == "HEAD":
                            branch = ""
                        if branch:
                            latest = None
                            # the last changeset that contains a base
                            # file is our parent
                            for r in oldrevs:
                                latest = max(filerevids.get(r, None), latest)
                            if latest:
                                p = [latest]

                        # add current commit to set
                        c = commit(author=author,
                                   date=date,
                                   parents=p,
                                   desc=log,
                                   branch=branch)
                        self.changeset[id] = c
                        self.files[id] = files
                    else:
                        colon = l.rfind(':')
                        file = l[1:colon]
                        rev = l[colon + 1:-2]
                        oldrev, rev = rev.split("->")
                        files[file] = rev

                        # save some information for identifying branch points
                        oldrevs.append("%s:%s" % (oldrev, file))
                        filerevids["%s:%s" % (rev, file)] = id
                elif state == 3:
                    # swallow all input
                    continue

            self.heads = self.lastbranch.values()
        finally:
            os.chdir(d)
コード例 #55
0
    def _parse(self, ui, path):
        "Prepare list of P4 filenames and revisions to import"
        ui.status(_('reading p4 views\n'))

        # read client spec or view
        if "/" in path:
            self._parse_view(path)
            if path.startswith("//") and path.endswith("/..."):
                views = {path[:-3]: ""}
            else:
                views = {"//": ""}
        else:
            cmd = 'p4 -G client -o %s' % util.shellquote(path)
            clientspec = marshal.load(util.popen(cmd, mode='rb'))

            views = {}
            for client in clientspec:
                if client.startswith("View"):
                    sview, cview = clientspec[client].split()
                    self._parse_view(sview)
                    if sview.endswith("...") and cview.endswith("..."):
                        sview = sview[:-3]
                        cview = cview[:-3]
                    cview = cview[2:]
                    cview = cview[cview.find("/") + 1:]
                    views[sview] = cview

        # list of changes that affect our source files
        self.p4changes = self.p4changes.keys()
        self.p4changes.sort(key=int)

        # list with depot pathnames, longest first
        vieworder = views.keys()
        vieworder.sort(key=len, reverse=True)

        # handle revision limiting
        startrev = self.ui.config('convert', 'p4.startrev', default=0)
        self.p4changes = [
            x for x in self.p4changes
            if ((not startrev or int(x) >= int(startrev)) and (
                not self.revs or int(x) <= int(self.revs[0])))
        ]

        # now read the full changelists to get the list of file revisions
        ui.status(_('collecting p4 changelists\n'))
        lastid = None
        for change in self.p4changes:
            cmd = "p4 -G describe -s %s" % change
            stdout = util.popen(cmd, mode='rb')
            d = marshal.load(stdout)
            desc = self.recode(d.get("desc", ""))
            shortdesc = desc.split("\n", 1)[0]
            t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
            ui.status(util.ellipsis(t, 80) + '\n')

            if lastid:
                parents = [lastid]
            else:
                parents = []

            date = (int(d["time"]), 0)  # timezone not set
            c = common.commit(author=self.recode(d["user"]),
                              date=util.datestr(date,
                                                '%Y-%m-%d %H:%M:%S %1%2'),
                              parents=parents,
                              desc=desc,
                              branch=None,
                              extra={"p4": change})

            files = []
            copies = {}
            copiedfiles = []
            i = 0
            while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
                oldname = d["depotFile%d" % i]
                filename = None
                for v in vieworder:
                    if oldname.lower().startswith(v.lower()):
                        filename = decodefilename(views[v] + oldname[len(v):])
                        break
                if filename:
                    files.append((filename, d["rev%d" % i]))
                    self.depotname[filename] = oldname
                    if (d.get("action%d" % i) == "move/add"):
                        copiedfiles.append(filename)
                    self.localname[oldname] = filename
                i += 1

            # Collect information about copied files
            for filename in copiedfiles:
                oldname = self.depotname[filename]

                flcmd = 'p4 -G filelog %s' \
                      % util.shellquote(oldname)
                flstdout = util.popen(flcmd, mode='rb')

                copiedfilename = None
                for d in loaditer(flstdout):
                    copiedoldname = None

                    i = 0
                    while ("change%d" % i) in d:
                        if (d["change%d" % i] == change
                                and d["action%d" % i] == "move/add"):
                            j = 0
                            while ("file%d,%d" % (i, j)) in d:
                                if d["how%d,%d" % (i, j)] == "moved from":
                                    copiedoldname = d["file%d,%d" % (i, j)]
                                    break
                                j += 1
                        i += 1

                    if copiedoldname and copiedoldname in self.localname:
                        copiedfilename = self.localname[copiedoldname]
                        break

                if copiedfilename:
                    copies[filename] = copiedfilename
                else:
                    ui.warn(
                        _("cannot find source for copied file: %s@%s\n") %
                        (filename, change))

            self.changeset[change] = c
            self.files[change] = files
            self.copies[change] = copies
            lastid = change

        if lastid:
            self.heads = [lastid]