Exemplo n.º 1
0
def getdiff(ui, repo, r, parent):
    '''return diff for the specified revision'''
    output = ""
    
    # the following is for Git style commit (similarly as in cmdutil.export, previously patch.export command)
    
    ctx = repo[r.node()]
    node = ctx.node()
    parents = [p.node() for p in ctx.parents() if p]
    branch = ctx.branch()

    if parents:
        prev = parents[0]
    else:
        prev = nullid
    
    output += "# HG changeset patch\n"
    output += "# User %s\n" % ctx.user()
    output += "# Date %d %d\n" % ctx.date()
    output += "#      %s\n" % dateutil.datestr(ctx.date())
    if branch and branch != 'default':
        output += "# Branch %s\n" % branch
    output += "# Node ID %s\n" % hex(node)
    if len(parents) > 1:
        output += "# Parent  %s\n" % hex(parents[1])
    output += "# Parent  %s\n" % hex(parent.node())
    output += ctx.description().rstrip()
    output += "\n\n"
    
    opts = mdiff.defaultopts
    opts.git = True
    for chunk in patch.diff(repo, parent.node(), r.node(), opts=opts):
        output += chunk
    return output
Exemplo n.º 2
0
    def _parsecatlog(self, data, rev):
        try:
            catlog = mail.parsebytes(data)

            # Commit date
            self.changes[rev].date = dateutil.datestr(
                dateutil.strdate(catlog['Standard-date'],
                                 b'%Y-%m-%d %H:%M:%S'))

            # Commit author
            self.changes[rev].author = self.recode(catlog['Creator'])

            # Commit description
            self.changes[rev].summary = b'\n\n'.join((
                self.recode(catlog['Summary']),
                self.recode(catlog.get_payload()),
            ))
            self.changes[rev].summary = self.recode(self.changes[rev].summary)

            # Commit revision origin when dealing with a branch or tag
            if 'Continuation-of' in catlog:
                self.changes[rev].continuationof = self.recode(
                    catlog['Continuation-of'])
        except Exception:
            raise error.Abort(_(b'could not parse cat-log of %s') % rev)
Exemplo n.º 3
0
 def _log(self, ui, event, msg, opts):
     default = ui.configdate(b'devel', b'default-date')
     date = dateutil.datestr(default, ui.config(b'blackbox',
                                                b'date-format'))
     user = procutil.getuser()
     pid = b'%d' % procutil.getpid()
     changed = b''
     ctx = self._repo[None]
     parents = ctx.parents()
     rev = b'+'.join([hex(p.node()) for p in parents])
     if ui.configbool(b'blackbox', b'dirty') and ctx.dirty(
             missing=True, merge=False, branch=False):
         changed = b'+'
     if ui.configbool(b'blackbox', b'logsource'):
         src = b' [%s]' % event
     else:
         src = b''
     try:
         fmt = b'%s %s @%s%s (%s)%s> %s'
         args = (date, user, rev, changed, pid, src, msg)
         with loggingutil.openlogfile(
                 ui,
                 self._repo.vfs,
                 name=b'blackbox.log',
                 maxfiles=self._maxfiles,
                 maxsize=self._maxsize,
         ) as fp:
             fp.write(fmt % args)
     except (IOError, OSError) as err:
         # deactivate this to avoid failed logging again
         self._trackedevents.clear()
         ui.debug(b'warning: cannot write to blackbox.log: %s\n' %
                  encoding.strtolocal(err.strerror))
         return
     _lastlogger.logger = self
Exemplo n.º 4
0
    def _parse(self):
        if self.changeset is not None:
            return
        self.changeset = {}

        maxrev = 0
        if self.revs:
            if len(self.revs) > 1:
                raise error.Abort(_('cvs source does not support specifying '
                                   'multiple revs'))
            # TODO: handle tags
            try:
                # patchset number?
                maxrev = int(self.revs[0])
            except ValueError:
                raise error.Abort(_('revision %s is not a patchset number')
                                 % self.revs[0])

        d = encoding.getcwd()
        try:
            os.chdir(self.path)
            id = None

            cache = 'update'
            if not self.ui.configbool('convert', 'cvsps.cache'):
                cache = None
            db = cvsps.createlog(self.ui, cache=cache)
            db = cvsps.createchangeset(self.ui, db,
                fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
                mergeto=self.ui.config('convert', 'cvsps.mergeto'),
                mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))

            for cs in db:
                if maxrev and cs.id > maxrev:
                    break
                id = (b"%d" % cs.id)
                cs.author = self.recode(cs.author)
                self.lastbranch[cs.branch] = id
                cs.comment = self.recode(cs.comment)
                if self.ui.configbool('convert', 'localtimezone'):
                    cs.date = makedatetimestamp(cs.date[0])
                date = dateutil.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
                self.tags.update(dict.fromkeys(cs.tags, id))

                files = {}
                for f in cs.entries:
                    files[f.file] = "%s%s" % ('.'.join([(b"%d" % x)
                                                        for x in f.revision]),
                                              ['', '(DEAD)'][f.dead])

                # add current commit to set
                c = commit(author=cs.author, date=date,
                           parents=[(b"%d" % p.id) for p in cs.parents],
                           desc=cs.comment, branch=cs.branch or '')
                self.changeset[id] = c
                self.files[id] = files

            self.heads = self.lastbranch.values()
        finally:
            os.chdir(d)
Exemplo n.º 5
0
def _ctx_summary(ctx):
    return [
        b'',
        _(b'changeset: %s') % ctx.hex(),
        _(b'user:      %s') % ctx.user(),
        _(b'date:      %s') % dateutil.datestr(ctx.date()),
        _(b'summary:   %s') % ctx.description().splitlines()[0],
    ]
Exemplo n.º 6
0
        def log(self, event, *msg, **opts):
            global lastui
            super(blackboxui, self).log(event, *msg, **opts)

            if not '*' in self.track and not event in self.track:
                return

            if self._bbvfs:
                ui = self
            else:
                # certain ui instances exist outside the context of
                # a repo, so just default to the last blackbox that
                # was seen.
                ui = lastui

            if not ui:
                return
            vfs = ui._bbvfs
            if not vfs:
                return

            repo = getattr(ui, '_bbrepo', None)
            if not lastui or repo:
                lastui = ui
            if getattr(ui, '_bbinlog', False):
                # recursion and failure guard
                return
            ui._bbinlog = True
            default = self.configdate('devel', 'default-date')
            date = dateutil.datestr(default, '%Y/%m/%d %H:%M:%S')
            user = procutil.getuser()
            pid = '%d' % procutil.getpid()
            formattedmsg = msg[0] % msg[1:]
            rev = '(unknown)'
            changed = ''
            if repo:
                ctx = repo[None]
                parents = ctx.parents()
                rev = ('+'.join([hex(p.node()) for p in parents]))
                if (ui.configbool('blackbox', 'dirty') and ctx.dirty(
                        missing=True, merge=False, branch=False)):
                    changed = '+'
            if ui.configbool('blackbox', 'logsource'):
                src = ' [%s]' % event
            else:
                src = ''
            try:
                fmt = '%s %s @%s%s (%s)%s> %s'
                args = (date, user, rev, changed, pid, src, formattedmsg)
                with _openlogfile(ui, vfs) as fp:
                    fp.write(fmt % args)
            except (IOError, OSError) as err:
                self.debug('warning: cannot write to blackbox.log: %s\n' %
                           encoding.strtolocal(err.strerror))
                # do not restore _bbinlog intentionally to avoid failed
                # logging again
            else:
                ui._bbinlog = False
Exemplo n.º 7
0
 def getcommit(self, rev):
     elt = self.changes[rev]
     dateformat = '%a %b %d %H:%M:%S %Z %Y'
     date = dateutil.strdate(elt.get('local_date'), dateformat)
     desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
     # etree can return unicode objects for name, comment, and author,
     # so recode() is used to ensure str objects are emitted.
     newdateformat = '%Y-%m-%d %H:%M:%S %1%2'
     return common.commit(author=self.recode(elt.get('author')),
                          date=dateutil.datestr(date, newdateformat),
                          desc=self.recode(desc).strip(),
                          parents=self.parents[rev])
Exemplo n.º 8
0
 def getcommit(self, rev):
     extra = {}
     certs = self.mtngetcerts(rev)
     if certs.get('suspend') == certs["branch"]:
         extra['close'] = 1
     dateformat = "%Y-%m-%dT%H:%M:%S"
     return common.commit(author=certs["author"],
                          date=dateutil.datestr(
                              dateutil.strdate(certs["date"], dateformat)),
                          desc=certs["changelog"],
                          rev=rev,
                          parents=self.mtnrun("parents", rev).splitlines(),
                          branch=certs["branch"],
                          extra=extra)
Exemplo n.º 9
0
    def _construct_commit(self, obj, parents=None):
        """
        Constructs a common.commit object from an unmarshalled
        `p4 describe` output
        """
        desc = self.recode(obj.get(b"desc", b""))
        date = (int(obj[b"time"]), 0)  # timezone not set
        if parents is None:
            parents = []

        return common.commit(
            author=self.recode(obj[b"user"]),
            date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
            parents=parents,
            desc=desc,
            branch=None,
            rev=obj[b'change'],
            extra={b"p4": obj[b'change'], b"convert_revision": obj[b'change']},
        )
Exemplo n.º 10
0
    def getcommit(self, rev):
        ctx = self._changectx(rev)
        _parents = self._parents(ctx)
        parents = [p.hex() for p in _parents]
        optparents = [
            p.hex() for p in ctx.parents() if p and p not in _parents
        ]
        crev = rev

        return common.commit(author=ctx.user(),
                             date=dateutil.datestr(ctx.date(),
                                                   '%Y-%m-%d %H:%M:%S %1%2'),
                             desc=ctx.description(),
                             rev=crev,
                             parents=parents,
                             optparents=optparents,
                             branch=ctx.branch(),
                             extra=ctx.extra(),
                             sortkey=ctx.rev(),
                             saverev=self.saverev,
                             phase=ctx.phase())
Exemplo n.º 11
0
def utcdate(date):
    '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
    '''
    dateformat = '%Y/%m/%d %H:%M:%S'
    return dateutil.datestr((date[0], 0), dateformat)
Exemplo n.º 12
0
    def send(self, ctx, count, data):
        '''send message.'''

        # Select subscribers by revset
        subs = set()
        for sub, spec in self.subs:
            if spec is None:
                subs.add(sub)
                continue
            revs = self.repo.revs('%r and %d:', spec, ctx.rev())
            if len(revs):
                subs.add(sub)
                continue
        if len(subs) == 0:
            self.ui.debug('notify: no subscribers to selected repo '
                          'and revset\n')
            return

        p = emailparser.Parser()
        try:
            msg = p.parsestr(data)
        except email.Errors.MessageParseError as inst:
            raise error.Abort(inst)

        # store sender and subject
        sender, subject = msg['From'], msg['Subject']
        del msg['From'], msg['Subject']

        if not msg.is_multipart():
            # create fresh mime message from scratch
            # (multipart templates must take care of this themselves)
            headers = msg.items()
            payload = msg.get_payload()
            # for notification prefer readability over data precision
            msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
            # reinstate custom headers
            for k, v in headers:
                msg[k] = v

        msg['Date'] = dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")

        # try to make subject line exist and be useful
        if not subject:
            if count > 1:
                subject = _('%s: %d new changesets') % (self.root, count)
            else:
                s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
                subject = '%s: %s' % (self.root, s)
        maxsubject = int(self.ui.config('notify', 'maxsubject'))
        if maxsubject:
            subject = stringutil.ellipsis(subject, maxsubject)
        msg['Subject'] = mail.headencode(self.ui, subject, self.charsets,
                                         self.test)

        # try to make message have proper sender
        if not sender:
            sender = self.ui.config('email', 'from') or self.ui.username()
        if '@' not in sender or '@localhost' in sender:
            sender = self.fixmail(sender)
        msg['From'] = mail.addressencode(self.ui, sender, self.charsets,
                                         self.test)

        msg['X-Hg-Notification'] = 'changeset %s' % ctx
        if not msg['Message-Id']:
            msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' % (ctx, int(
                time.time()), hash(self.repo.root), socket.getfqdn()))
        msg['To'] = ', '.join(sorted(subs))

        msgtext = msg.as_string()
        if self.test:
            self.ui.write(msgtext)
            if not msgtext.endswith('\n'):
                self.ui.write('\n')
        else:
            self.ui.status(
                _('notify: sending %d subscribers %d changes\n') %
                (len(subs), count))
            mail.sendmail(self.ui,
                          stringutil.email(msg['From']),
                          subs,
                          msgtext,
                          mbox=self.mbox)
Exemplo n.º 13
0
def createlog(ui, directory=None, root="", rlog=True, cache=None):
    '''Collect the CVS rlog'''

    # Because we store many duplicate commit log messages, reusing strings
    # saves a lot of memory and pickle storage space.
    _scache = {}
    def scache(s):
        "return a shared version of a string"
        return _scache.setdefault(s, s)

    ui.status(_('collecting CVS rlog\n'))

    log = []      # list of logentry objects containing the CVS state

    # patterns to match in CVS (r)log output, by state of use
    re_00 = re.compile(b'RCS file: (.+)$')
    re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
    re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
    re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
                       b"(can't create temporary directory.+)$")
    re_10 = re.compile(b'Working file: (.+)$')
    re_20 = re.compile(b'symbolic names:')
    re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
    re_31 = re.compile(b'----------------------------$')
    re_32 = re.compile(b'======================================='
                       b'======================================$')
    re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
    re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
                       br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
                       br'(\s+commitid:\s+([^;]+);)?'
                       br'(.*mergepoint:\s+([^;]+);)?')
    re_70 = re.compile(b'branches: (.+);$')

    file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')

    prefix = ''   # leading path to strip of what we get from CVS

    if directory is None:
        # Current working directory

        # Get the real directory in the repository
        try:
            prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
            directory = prefix
            if prefix == ".":
                prefix = ""
        except IOError:
            raise logerror(_('not a CVS sandbox'))

        if prefix and not prefix.endswith(pycompat.ossep):
            prefix += pycompat.ossep

        # Use the Root file in the sandbox, if it exists
        try:
            root = open(os.path.join('CVS','Root'), 'rb').read().strip()
        except IOError:
            pass

    if not root:
        root = encoding.environ.get('CVSROOT', '')

    # read log cache if one exists
    oldlog = []
    date = None

    if cache:
        cachedir = os.path.expanduser('~/.hg.cvsps')
        if not os.path.exists(cachedir):
            os.mkdir(cachedir)

        # The cvsps cache pickle needs a uniquified name, based on the
        # repository location. The address may have all sort of nasties
        # in it, slashes, colons and such. So here we take just the
        # alphanumeric characters, concatenated in a way that does not
        # mix up the various components, so that
        #    :pserver:user@server:/path
        # and
        #    /pserver/user/server/path
        # are mapped to different cache file names.
        cachefile = root.split(":") + [directory, "cache"]
        cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
        cachefile = os.path.join(cachedir,
                                 '.'.join([s for s in cachefile if s]))

    if cache == 'update':
        try:
            ui.note(_('reading cvs log cache %s\n') % cachefile)
            oldlog = pickle.load(open(cachefile, 'rb'))
            for e in oldlog:
                if not (util.safehasattr(e, 'branchpoints') and
                        util.safehasattr(e, 'commitid') and
                        util.safehasattr(e, 'mergepoint')):
                    ui.status(_('ignoring old cache\n'))
                    oldlog = []
                    break

            ui.note(_('cache has %d log entries\n') % len(oldlog))
        except Exception as e:
            ui.note(_('error reading cache: %r\n') % e)

        if oldlog:
            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
            date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')

    # build the CVS commandline
    cmd = ['cvs', '-q']
    if root:
        cmd.append('-d%s' % root)
        p = util.normpath(getrepopath(root))
        if not p.endswith('/'):
            p += '/'
        if prefix:
            # looks like normpath replaces "" by "."
            prefix = p + util.normpath(prefix)
        else:
            prefix = p
    cmd.append(['log', 'rlog'][rlog])
    if date:
        # no space between option and date string
        cmd.append('-d>%s' % date)
    cmd.append(directory)

    # state machine begins here
    tags = {}     # dictionary of revisions on current file with their tags
    branchmap = {} # mapping between branch names and revision numbers
    rcsmap = {}
    state = 0
    store = False # set when a new record can be appended

    cmd = [procutil.shellquote(arg) for arg in cmd]
    ui.note(_("running %s\n") % (' '.join(cmd)))
    ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))

    pfp = procutil.popen(' '.join(cmd), 'rb')
    peek = util.fromnativeeol(pfp.readline())
    while True:
        line = peek
        if line == '':
            break
        peek = util.fromnativeeol(pfp.readline())
        if line.endswith('\n'):
            line = line[:-1]
        #ui.debug('state=%d line=%r\n' % (state, line))

        if state == 0:
            # initial state, consume input until we see 'RCS file'
            match = re_00.match(line)
            if match:
                rcs = match.group(1)
                tags = {}
                if rlog:
                    filename = util.normpath(rcs[:-2])
                    if filename.startswith(prefix):
                        filename = filename[len(prefix):]
                    if filename.startswith('/'):
                        filename = filename[1:]
                    if filename.startswith('Attic/'):
                        filename = filename[6:]
                    else:
                        filename = filename.replace('/Attic/', '/')
                    state = 2
                    continue
                state = 1
                continue
            match = re_01.match(line)
            if match:
                raise logerror(match.group(1))
            match = re_02.match(line)
            if match:
                raise logerror(match.group(2))
            if re_03.match(line):
                raise logerror(line)

        elif state == 1:
            # expect 'Working file' (only when using log instead of rlog)
            match = re_10.match(line)
            assert match, _('RCS file must be followed by working file')
            filename = util.normpath(match.group(1))
            state = 2

        elif state == 2:
            # expect 'symbolic names'
            if re_20.match(line):
                branchmap = {}
                state = 3

        elif state == 3:
            # read the symbolic names and store as tags
            match = re_30.match(line)
            if match:
                rev = [int(x) for x in match.group(2).split('.')]

                # Convert magic branch number to an odd-numbered one
                revn = len(rev)
                if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
                    rev = rev[:-2] + rev[-1:]
                rev = tuple(rev)

                if rev not in tags:
                    tags[rev] = []
                tags[rev].append(match.group(1))
                branchmap[match.group(1)] = match.group(2)

            elif re_31.match(line):
                state = 5
            elif re_32.match(line):
                state = 0

        elif state == 4:
            # expecting '------' separator before first revision
            if re_31.match(line):
                state = 5
            else:
                assert not re_32.match(line), _('must have at least '
                                                'some revisions')

        elif state == 5:
            # expecting revision number and possibly (ignored) lock indication
            # we create the logentry here from values stored in states 0 to 4,
            # as this state is re-entered for subsequent revisions of a file.
            match = re_50.match(line)
            assert match, _('expected revision number')
            e = logentry(rcs=scache(rcs),
                         file=scache(filename),
                         revision=tuple([int(x) for x in
                                         match.group(1).split('.')]),
                         branches=[],
                         parent=None,
                         commitid=None,
                         mergepoint=None,
                         branchpoints=set())

            state = 6

        elif state == 6:
            # expecting date, author, state, lines changed
            match = re_60.match(line)
            assert match, _('revision must be followed by date line')
            d = match.group(1)
            if d[2] == '/':
                # Y2K
                d = '19' + d

            if len(d.split()) != 3:
                # cvs log dates always in GMT
                d = d + ' UTC'
            e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
                                        '%Y/%m/%d %H:%M:%S',
                                        '%Y-%m-%d %H:%M:%S'])
            e.author = scache(match.group(2))
            e.dead = match.group(3).lower() == 'dead'

            if match.group(5):
                if match.group(6):
                    e.lines = (int(match.group(5)), int(match.group(6)))
                else:
                    e.lines = (int(match.group(5)), 0)
            elif match.group(6):
                e.lines = (0, int(match.group(6)))
            else:
                e.lines = None

            if match.group(7): # cvs 1.12 commitid
                e.commitid = match.group(8)

            if match.group(9): # cvsnt mergepoint
                myrev = match.group(10).split('.')
                if len(myrev) == 2: # head
                    e.mergepoint = 'HEAD'
                else:
                    myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                    branches = [b for b in branchmap if branchmap[b] == myrev]
                    assert len(branches) == 1, ('unknown branch: %s'
                                                % e.mergepoint)
                    e.mergepoint = branches[0]

            e.comment = []
            state = 7

        elif state == 7:
            # read the revision numbers of branches that start at this revision
            # or store the commit log message otherwise
            m = re_70.match(line)
            if m:
                e.branches = [tuple([int(y) for y in x.strip().split('.')])
                                for x in m.group(1).split(';')]
                state = 8
            elif re_31.match(line) and re_50.match(peek):
                state = 5
                store = True
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        elif state == 8:
            # store commit log message
            if re_31.match(line):
                cpeek = peek
                if cpeek.endswith('\n'):
                    cpeek = cpeek[:-1]
                if re_50.match(cpeek):
                    state = 5
                    store = True
                else:
                    e.comment.append(line)
            elif re_32.match(line):
                state = 0
                store = True
            else:
                e.comment.append(line)

        # When a file is added on a branch B1, CVS creates a synthetic
        # dead trunk revision 1.1 so that the branch has a root.
        # Likewise, if you merge such a file to a later branch B2 (one
        # that already existed when the file was added on B1), CVS
        # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
        # these revisions now, but mark them synthetic so
        # createchangeset() can take care of them.
        if (store and
              e.dead and
              e.revision[-1] == 1 and      # 1.1 or 1.1.x.1
              len(e.comment) == 1 and
              file_added_re.match(e.comment[0])):
            ui.debug('found synthetic revision in %s: %r\n'
                     % (e.rcs, e.comment[0]))
            e.synthetic = True

        if store:
            # clean up the results and save in the log.
            store = False
            e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
            e.comment = scache('\n'.join(e.comment))

            revn = len(e.revision)
            if revn > 3 and (revn % 2) == 0:
                e.branch = tags.get(e.revision[:-1], [None])[0]
            else:
                e.branch = None

            # find the branches starting from this revision
            branchpoints = set()
            for branch, revision in branchmap.iteritems():
                revparts = tuple([int(i) for i in revision.split('.')])
                if len(revparts) < 2: # bad tags
                    continue
                if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                    # normal branch
                    if revparts[:-2] == e.revision:
                        branchpoints.add(branch)
                elif revparts == (1, 1, 1): # vendor branch
                    if revparts in e.branches:
                        branchpoints.add(branch)
            e.branchpoints = branchpoints

            log.append(e)

            rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs

            if len(log) % 100 == 0:
                ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
                          + '\n')

    log.sort(key=lambda x: (x.rcs, x.revision))

    # find parent revisions of individual files
    versions = {}
    for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
        rcs = e.rcs.replace('/Attic/', '/')
        if rcs in rcsmap:
            e.rcs = rcsmap[rcs]
        branch = e.revision[:-1]
        versions[(e.rcs, branch)] = e.revision

    for e in log:
        branch = e.revision[:-1]
        p = versions.get((e.rcs, branch), None)
        if p is None:
            p = e.revision[:-2]
        e.parent = p
        versions[(e.rcs, branch)] = e.revision

    # update the log cache
    if cache:
        if log:
            # join up the old and new logs
            log.sort(key=lambda x: x.date)

            if oldlog and oldlog[-1].date >= log[0].date:
                raise logerror(_('log cache overlaps with new log entries,'
                                 ' re-run without cache.'))

            log = oldlog + log

            # write the new cachefile
            ui.note(_('writing cvs log cache %s\n') % cachefile)
            pickle.dump(log, open(cachefile, 'wb'))
        else:
            log = oldlog

    ui.status(_('%d log entries\n') % len(log))

    encodings = ui.configlist('convert', 'cvsps.logencoding')
    if encodings:
        def revstr(r):
            # this is needed, because logentry.revision is a tuple of "int"
            # (e.g. (1, 2) for "1.2")
            return '.'.join(pycompat.maplist(pycompat.bytestr, r))

        for entry in log:
            comment = entry.comment
            for e in encodings:
                try:
                    entry.comment = comment.decode(
                        pycompat.sysstr(e)).encode('utf-8')
                    if ui.debugflag:
                        ui.debug("transcoding by %s: %s of %s\n" %
                                 (e, revstr(entry.revision), entry.file))
                    break
                except UnicodeDecodeError:
                    pass # try next encoding
                except LookupError as inst: # unknown encoding, maybe
                    raise error.Abort(inst,
                                      hint=_('check convert.cvsps.logencoding'
                                             ' configuration'))
            else:
                raise error.Abort(_("no encoding can transcode"
                                    " CVS log message for %s of %s")
                                  % (revstr(entry.revision), entry.file),
                                  hint=_('check convert.cvsps.logencoding'
                                         ' configuration'))

    hook.hook(ui, None, "cvslog", True, log=log)

    return log
Exemplo n.º 14
0
def debugcvsps(ui, *args, **opts):
    '''Read CVS rlog for current directory or named path in
    repository, and convert the log to changesets based on matching
    commit log entries and dates.
    '''
    opts = pycompat.byteskwargs(opts)
    if opts["new_cache"]:
        cache = "write"
    elif opts["update_cache"]:
        cache = "update"
    else:
        cache = None

    revisions = opts["revisions"]

    try:
        if args:
            log = []
            for d in args:
                log += createlog(ui, d, root=opts["root"], cache=cache)
        else:
            log = createlog(ui, root=opts["root"], cache=cache)
    except logerror as e:
        ui.write("%r\n"%e)
        return

    changesets = createchangeset(ui, log, opts["fuzz"])
    del log

    # Print changesets (optionally filtered)

    off = len(revisions)
    branches = {}    # latest version number in each branch
    ancestors = {}   # parent branch
    for cs in changesets:

        if opts["ancestors"]:
            if cs.branch not in branches and cs.parents and cs.parents[0].id:
                ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
                                        cs.parents[0].id)
            branches[cs.branch] = cs.id

        # limit by branches
        if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
            continue

        if not off:
            # Note: trailing spaces on several lines here are needed to have
            #       bug-for-bug compatibility with cvsps.
            ui.write('---------------------\n')
            ui.write(('PatchSet %d \n' % cs.id))
            ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
                                                 '%Y/%m/%d %H:%M:%S %1%2')))
            ui.write(('Author: %s\n' % cs.author))
            ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
            ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
                                  ','.join(cs.tags) or '(none)')))
            if cs.branchpoints:
                ui.write(('Branchpoints: %s \n') %
                         ', '.join(sorted(cs.branchpoints)))
            if opts["parents"] and cs.parents:
                if len(cs.parents) > 1:
                    ui.write(('Parents: %s\n' %
                             (','.join([(b"%d" % p.id) for p in cs.parents]))))
                else:
                    ui.write(('Parent: %d\n' % cs.parents[0].id))

            if opts["ancestors"]:
                b = cs.branch
                r = []
                while b:
                    b, c = ancestors[b]
                    r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
                if r:
                    ui.write(('Ancestors: %s\n' % (','.join(r))))

            ui.write(('Log:\n'))
            ui.write('%s\n\n' % cs.comment)
            ui.write(('Members: \n'))
            for f in cs.entries:
                fn = f.file
                if fn.startswith(opts["prefix"]):
                    fn = fn[len(opts["prefix"]):]
                ui.write('\t%s:%s->%s%s \n' % (
                        fn,
                        '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
                        '.'.join([(b"%d" % x) for x in f.revision]),
                        ['', '(DEAD)'][f.dead]))
            ui.write('\n')

        # have we seen the start tag?
        if revisions and off:
            if revisions[0] == (b"%d" % cs.id) or \
                revisions[0] in cs.tags:
                off = False

        # see if we reached the end tag
        if len(revisions) > 1 and not off:
            if revisions[1] == (b"%d" % cs.id) or \
                revisions[1] in cs.tags:
                break
Exemplo n.º 15
0
def iadd(ui, repo, id=None, comment=0, **opts):
    """Adds a new issue, or comment to an existing issue ID or its comment COMMENT"""

    comment = int(comment)

    # First, make sure issues have a directory
    issues_dir = ui.config('artemis', 'issues', default=default_issues_dir)
    issues_path = os.path.join(repo.root, issues_dir)
    if not os.path.exists(issues_path): os.mkdir(issues_path)

    if id:
        issue_fn, issue_id = _find_issue(ui, repo, id)
        if not issue_fn:
            ui.warn('No such issue\n')
            return
        _create_missing_dirs(issues_path, issue_id)
        mbox = mailbox.Maildir(issue_fn, factory=mailbox.MaildirMessage)
        keys = _order_keys_date(mbox)
        root = keys[0]

    user = ui.username()

    default_issue_text = "From: %s\nDate: %s\n" % (user,
                                                   datestr(format=date_format))
    if not id:
        default_issue_text += "State: %s\n" % default_state
        default_issue_text += "Subject: brief description\n\n"
    else:
        subject = mbox[(comment < len(mbox) and keys[comment])
                       or root]['Subject']
        if not subject.startswith('Re: '): subject = 'Re: ' + subject
        default_issue_text += "Subject: %s\n\n" % subject
    default_issue_text += "Detailed description."

    # Get properties, and figure out if we need an explicit comment
    properties = _get_properties(opts['property'])
    no_comment = id and properties and opts['no_property_comment']
    message = opts['message']

    # Create the text
    if message:
        if not id:
            state_str = 'State: %s\n' % default_state
        else:
            state_str = ''
        issue = "From: %s\nDate: %s\nSubject: %s\n%s" % \
                (user, datestr(format=date_format), message, state_str)
    elif not no_comment:
        issue = ui.edit(default_issue_text, user)

        if issue.strip() == '':
            ui.warn('Empty issue, ignoring\n')
            return
        if issue.strip() == default_issue_text:
            ui.warn('Unchanged issue text, ignoring\n')
            return
    else:
        # Write down a comment about updated properties
        properties_subject = ', '.join(
            ['%s=%s' % (property, value) for (property, value) in properties])

        issue =     "From: %s\nDate: %s\nSubject: changed properties (%s)\n" % \
                     (user, datestr(format = date_format), properties_subject)

    # Create the message
    msg = mailbox.MaildirMessage(issue)
    if opts['attach']:
        outer = _attach_files(msg, opts['attach'])
    else:
        outer = msg

    # Pick random filename
    if not id:
        issue_fn = issues_path
        while os.path.exists(issue_fn):
            issue_id = _random_id()
            issue_fn = os.path.join(issues_path, issue_id)
        mbox = mailbox.Maildir(issue_fn, factory=mailbox.MaildirMessage)
        keys = _order_keys_date(mbox)
    # else: issue_fn already set

    # Add message to the mailbox
    mbox.lock()
    if id and comment >= len(mbox):
        ui.warn(
            'No such comment number in mailbox, commenting on the issue itself\n'
        )

    if not id:
        outer.add_header(
            'Message-Id',
            "<%s-0-artemis@%s>" % (issue_id, socket.gethostname()))
    else:
        root = keys[0]
        outer.add_header(
            'Message-Id', "<%s-%s-artemis@%s>" %
            (issue_id, _random_id(), socket.gethostname()))
        outer.add_header(
            'References', mbox[(comment < len(mbox) and keys[comment])
                               or root]['Message-Id'])
        outer.add_header(
            'In-Reply-To', mbox[(comment < len(mbox) and keys[comment])
                                or root]['Message-Id'])
    new_bug_path = issue_fn + '/new/' + mbox.add(outer)
    commands.add(ui, repo, new_bug_path)

    # Fix properties in the root message
    if properties:
        root = _find_root_key(mbox)
        msg = mbox[root]
        for property, value in properties:
            if property in msg:
                msg.replace_header(property, value)
            else:
                msg.add_header(property, value)
        mbox[root] = msg

    mbox.close()

    if opts['commit']:
        commands.commit(ui, repo, issue_fn)

    # If adding issue, add the new mailbox to the repository
    if not id:
        ui.status('Added new issue %s\n' % issue_id)
    else:
        _show_mbox(ui, mbox, 0)
Exemplo n.º 16
0
    def send(self, ctx, count, data):
        '''send message.'''

        # Select subscribers by revset
        subs = set()
        for sub, spec in self.subs:
            if spec is None:
                subs.add(sub)
                continue
            revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
            if len(revs):
                subs.add(sub)
                continue
        if len(subs) == 0:
            self.ui.debug(
                b'notify: no subscribers to selected repo and revset\n')
            return

        try:
            msg = mail.parsebytes(data)
        except emailerrors.MessageParseError as inst:
            raise error.Abort(inst)

        # store sender and subject
        sender = msg['From']
        subject = msg['Subject']
        if sender is not None:
            sender = mail.headdecode(sender)
        if subject is not None:
            subject = mail.headdecode(subject)
        del msg['From'], msg['Subject']

        if not msg.is_multipart():
            # create fresh mime message from scratch
            # (multipart templates must take care of this themselves)
            headers = msg.items()
            payload = msg.get_payload(decode=pycompat.ispy3)
            # for notification prefer readability over data precision
            msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
            # reinstate custom headers
            for k, v in headers:
                msg[k] = v

        msg['Date'] = encoding.strfromlocal(
            dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2"))

        # try to make subject line exist and be useful
        if not subject:
            if count > 1:
                subject = _(b'%s: %d new changesets') % (self.root, count)
            else:
                s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
                subject = b'%s: %s' % (self.root, s)
        maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
        if maxsubject:
            subject = stringutil.ellipsis(subject, maxsubject)
        msg['Subject'] = mail.headencode(self.ui, subject, self.charsets,
                                         self.test)

        # try to make message have proper sender
        if not sender:
            sender = self.ui.config(b'email', b'from') or self.ui.username()
        if b'@' not in sender or b'@localhost' in sender:
            sender = self.fixmail(sender)
        msg['From'] = mail.addressencode(self.ui, sender, self.charsets,
                                         self.test)

        msg['X-Hg-Notification'] = 'changeset %s' % ctx
        if not msg['Message-Id']:
            msg['Message-Id'] = messageid(ctx, self.domain, self.messageidseed)
        if self.reply:
            unfi = self.repo.unfiltered()
            has_node = unfi.changelog.index.has_node
            predecessors = [
                unfi[ctx2] for ctx2 in obsutil.allpredecessors(
                    unfi.obsstore, [ctx.node()])
                if ctx2 != ctx.node() and has_node(ctx2)
            ]
            if predecessors:
                # There is at least one predecessor, so which to pick?
                # Ideally, there is a unique root because changesets have
                # been evolved/rebased one step at a time. In this case,
                # just picking the oldest known changeset provides a stable
                # base. It doesn't help when changesets are folded. Any
                # better solution would require storing more information
                # in the repository.
                pred = min(predecessors, key=lambda ctx: ctx.rev())
                msg['In-Reply-To'] = messageid(pred, self.domain,
                                               self.messageidseed)
        msg['To'] = ', '.join(sorted(subs))

        msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
        if self.test:
            self.ui.write(msgtext)
            if not msgtext.endswith(b'\n'):
                self.ui.write(b'\n')
        else:
            self.ui.status(
                _(b'notify: sending %d subscribers %d changes\n') %
                (len(subs), count))
            mail.sendmail(
                self.ui,
                emailutils.parseaddr(msg['From'])[1],
                subs,
                msgtext,
                mbox=self.mbox,
            )
Exemplo n.º 17
0
def svnisodate(date):
    '''Date. Returns a date in this format: "2009-08-18 13:00:13
    +0200 (Tue, 18 Aug 2009)".
    '''
    return dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
Exemplo n.º 18
0
    def send(self, ctx, count, data):
        '''send message.'''

        # Select subscribers by revset
        subs = set()
        for sub, spec in self.subs:
            if spec is None:
                subs.add(sub)
                continue
            revs = self.repo.revs(b'%r and %d:', spec, ctx.rev())
            if len(revs):
                subs.add(sub)
                continue
        if len(subs) == 0:
            self.ui.debug(
                b'notify: no subscribers to selected repo and revset\n')
            return

        try:
            msg = mail.parsebytes(data)
        except emailerrors.MessageParseError as inst:
            raise error.Abort(inst)

        # store sender and subject
        sender = msg[r'From']
        subject = msg[r'Subject']
        if sender is not None:
            sender = mail.headdecode(sender)
        if subject is not None:
            subject = mail.headdecode(subject)
        del msg[r'From'], msg[r'Subject']

        if not msg.is_multipart():
            # create fresh mime message from scratch
            # (multipart templates must take care of this themselves)
            headers = msg.items()
            payload = msg.get_payload(decode=pycompat.ispy3)
            # for notification prefer readability over data precision
            msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
            # reinstate custom headers
            for k, v in headers:
                msg[k] = v

        msg[r'Date'] = encoding.strfromlocal(
            dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2"))

        # try to make subject line exist and be useful
        if not subject:
            if count > 1:
                subject = _(b'%s: %d new changesets') % (self.root, count)
            else:
                s = ctx.description().lstrip().split(b'\n', 1)[0].rstrip()
                subject = b'%s: %s' % (self.root, s)
        maxsubject = int(self.ui.config(b'notify', b'maxsubject'))
        if maxsubject:
            subject = stringutil.ellipsis(subject, maxsubject)
        msg[r'Subject'] = encoding.strfromlocal(
            mail.headencode(self.ui, subject, self.charsets, self.test))

        # try to make message have proper sender
        if not sender:
            sender = self.ui.config(b'email', b'from') or self.ui.username()
        if b'@' not in sender or b'@localhost' in sender:
            sender = self.fixmail(sender)
        msg[r'From'] = encoding.strfromlocal(
            mail.addressencode(self.ui, sender, self.charsets, self.test))

        msg[r'X-Hg-Notification'] = r'changeset %s' % ctx
        if not msg[r'Message-Id']:
            msg[r'Message-Id'] = messageid(ctx, self.domain,
                                           self.messageidseed)
        msg[r'To'] = encoding.strfromlocal(b', '.join(sorted(subs)))

        msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
        if self.test:
            self.ui.write(msgtext)
            if not msgtext.endswith(b'\n'):
                self.ui.write(b'\n')
        else:
            self.ui.status(
                _(b'notify: sending %d subscribers %d changes\n') %
                (len(subs), count))
            mail.sendmail(
                self.ui,
                emailutils.parseaddr(msg[r'From'])[1],
                subs,
                msgtext,
                mbox=self.mbox,
            )
Exemplo n.º 19
0
def svnutcdate(date):
    '''Date. Returns a UTC-date in this format: "2009-08-18
    11:00:13Z".
    '''
    dateformat = '%Y-%m-%d %H:%M:%SZ'
    return dateutil.datestr((date[0], 0), dateformat)
Exemplo n.º 20
0
def _report_commit(ui, repo, ctx):
    domain = ui.config(b'notify_published', b'domain') or ui.config(
        b'notify', b'domain')
    messageidseed = ui.config(b'notify_published',
                              b'messageidseed') or ui.config(
                                  b'notify', b'messageidseed')
    template = ui.config(b'notify_published', b'template')
    spec = formatter.literal_templatespec(template)
    templater = logcmdutil.changesettemplater(ui, repo, spec)
    ui.pushbuffer()
    n = notify.notifier(ui, repo, b'incoming')

    subs = set()
    for sub, spec in n.subs:
        if spec is None:
            subs.add(sub)
            continue
        revs = repo.revs(b'%r and %d:', spec, ctx.rev())
        if len(revs):
            subs.add(sub)
            continue
    if len(subs) == 0:
        ui.debug(
            b'notify_published: no subscribers to selected repo and revset\n')
        return

    templater.show(
        ctx,
        changes=ctx.changeset(),
        baseurl=ui.config(b'web', b'baseurl'),
        root=repo.root,
        webroot=n.root,
    )
    data = ui.popbuffer()

    try:
        msg = mail.parsebytes(data)
    except emailerrors.MessageParseError as inst:
        raise error.Abort(inst)

    msg['In-reply-to'] = notify.messageid(ctx, domain, messageidseed)
    msg['Message-Id'] = notify.messageid(ctx, domain,
                                         messageidseed + b'-published')
    msg['Date'] = encoding.strfromlocal(
        dateutil.datestr(format=b"%a, %d %b %Y %H:%M:%S %1%2"))
    if not msg['From']:
        sender = ui.config(b'email', b'from') or ui.username()
        if b'@' not in sender or b'@localhost' in sender:
            sender = n.fixmail(sender)
        msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
    msg['To'] = ', '.join(sorted(subs))

    msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
    if ui.configbool(b'notify', b'test'):
        ui.write(msgtext)
        if not msgtext.endswith(b'\n'):
            ui.write(b'\n')
    else:
        ui.status(_(b'notify_published: sending mail for %d\n') % ctx.rev())
        mail.sendmail(ui,
                      emailutils.parseaddr(msg['From'])[1],
                      subs,
                      msgtext,
                      mbox=n.mbox)