Ejemplo n.º 1
0
 def _testPhase1(self, d):
     g = self.getStatusPush()
     self.path = g.path
     # Now the slave is connected, trigger a change.
     cm = self.master.change_svc
     c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
     cm.addChange(c)
     d = self.requestBuild("dummy")
     d.addCallback(self._testPhase2)
     return d
Ejemplo n.º 2
0
 def test_constructor_most_recent_change(self):
     chgs = [
         changes.Change('author', [],
                        'comments',
                        branch='branch',
                        revision='2'),
         changes.Change('author', [],
                        'comments',
                        branch='branch',
                        revision='3'),
         changes.Change('author', [],
                        'comments',
                        branch='branch',
                        revision='1'),
     ]
     for ch in chgs:  # mock the DB changeid (aka build number) to match rev
         ch.number = int(ch.revision)
     ss = sourcestamp.SourceStamp(changes=chgs)
     self.assertEquals(ss.revision, '3')
Ejemplo n.º 3
0
    def doBuilderEnvTest(self, branch, cb):
        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed",
                           branch=branch)
        self.master.change_svc.addChange(c)

        d = defer.Deferred()
        reactor.callLater(0.5, d.callback, None)
        d.addCallback(cb)

        return d
Ejemplo n.º 4
0
 def _process_changes(self, query):
     change_list = self._parse_changes(query)
     for change in change_list:
         c = changes.Change(
             who=change["author"],
             files=[],  # sucks
             comments=change["description"],
             when=change["pubDate"],
             branch=self.branch)
         self.parent.addChange(c)
     self.lastChange = max(self.lastPoll,
                           *[c["pubDate"] for c in change_list])
Ejemplo n.º 5
0
    def testTestFlag(self):
        m = self.master
        m.loadConfig(config_test_flag)
        m.readConfig = True
        m.startService()

        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
        m.change_svc.addChange(c)

        d = self.connectSlave()
        d.addCallback(self._testTestFlag_1)
        return d
Ejemplo n.º 6
0
    def setupTest(self, args, dummyclass, checkFn):
        self.clearFlags()
        m = self.master
        m.loadConfig(self.mkConfig(args, dummyclass))
        m.readConfig = True
        m.startService()

        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
        m.change_svc.addChange(c)

        d = self.connectSlave(builders=['triggerer', 'triggeree'])
        d.addCallback(self.startTimer, 0.5, checkFn)
        return d
Ejemplo n.º 7
0
    def setupTest(self, config, builders, checkFn):
        self.clearFlags()
        m = self.master
        m.loadConfig(config)
        m.readConfig = True
        m.startService()

        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
        m.change_svc.addChange(c)

        d = self.connectSlave(builders=builders)
        d.addCallback(self.startTimer, 0.5, checkFn)
        return d
Ejemplo n.º 8
0
 def _send(res):
     cm = self.master.change_svc
     c = changes.Change("bob", ["Makefile", "foo/bar.c"],
                        "changed stuff")
     cm.addChange(c)
     # send some build requests
     reqs = []
     ss = SourceStamp(changes=[c])
     for i in range(NB_CHANGES):
         bss = self.control.submitBuildSet(["dummy"],
                                           ss,
                                           reason='Reason %d' % i)
         reqs.append(bss.waitUntilFinished())
     return defer.DeferredList(reqs)
Ejemplo n.º 9
0
    def do_test(self, config, builder_should_run):
        self.master.loadConfig(config)
        self.master.readConfig = True
        self.master.startService()
        d = self.connectSlave()

        # send a change
        cm = self.master.change_svc
        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
        cm.addChange(c)

        d.addCallback(self._do_test1, builder_should_run)

        return d
Ejemplo n.º 10
0
 def _process_changes(self, results, url):
     """ Take the results of polling the locales list and ftp page
         and send a change to the parent branch if all the locales
         are ready """
     pageContents = results['pageContents']
     locales = results['locales']
     if self.parseContents(pageContents, locales):
         c = changes.Change(who=url,
                            comments="success",
                            files=[],
                            branch=self.branch)
         self.parent.addChange(c)
     #return the locales list for the next ftp poller in the callback chain
     return locales
Ejemplo n.º 11
0
 def processData(self, query):
     change_list = _parse_changes(query, self.lastChange)
     for change in change_list:
         adjustedChangeTime = change["updated"]
         c = changes.Change(
             who=change["author"],
             files=[],  # sucks
             revision=change["changeset"],
             comments=change["link"],
             when=adjustedChangeTime,
             branch=self.branch)
         self.parent.addChange(c)
     if len(change_list) > 0:
         self.lastChange = max(self.lastChange,
                               *[c["updated"] for c in change_list])
Ejemplo n.º 12
0
    def _process_changes(self, query):
        change_list = self._parse_changes(query)

        # Skip calling addChange() if this is the first successful poll.
        if self.lastChange is not None:
            for change in change_list:
                c = changes.Change(revision=change["revision"],
                                   who=change["author"],
                                   files=change["files"],
                                   comments=change["comments"],
                                   when=change["when"],
                                   branch=self.branch)
                self.parent.addChange(c)
        if change_list:
            self.lastChange = change_list[-1]["revision"]
Ejemplo n.º 13
0
 def setUp(self):
     self.master = fakemaster.make_master(testcase=self, wantDb=True)
     self.change23 = changes.Change(**dict(  # using **dict(..) forces kwargs
         category='devel',
         repository=u'git://warner',
         codebase=u'mainapp',
         who=u'dustin',
         when=266738404,
         comments=u'fix whitespace',
         project=u'Buildbot',
         branch=u'warnerdb',
         revlink=u'http://warner/0e92a098b',
         properties={'notest': "no"},
         files=[u'master/README.txt', u'worker/README.txt'],
         revision=u'deadbeef'))
     self.change23.number = 23
Ejemplo n.º 14
0
    def _add_change(self, results, rev):
        log.msg('gitpoller: _add_change results: "%s", rev: "%s" in "%s"' %
                (results, rev, self.workdir))

        c = changes.Change(who=self.commitInfo['name'],
                           revision=rev,
                           files=self.commitInfo['files'],
                           comments=self.commitInfo['comments'],
                           when=self.commitInfo['timestamp'],
                           branch=self.branch,
                           category=self.category,
                           project=self.project,
                           repository=self.repourl)
        log.msg('gitpoller: change "%s" in "%s"' % (c, self.workdir))
        self.parent.addChange(c)
        self.lastChange = self.lastPoll
Ejemplo n.º 15
0
 def testMaster(self):
     self.rmtree("basedir")
     os.mkdir("basedir")
     m = master.BuildMaster("basedir")
     m.loadConfig(config_run)
     m.readConfig = True
     m.startService()
     cm = m.change_svc
     c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
     cm.addChange(c)
     # verify that the Scheduler is now waiting
     s = m.allSchedulers()[0]
     self.failUnless(s.timer)
     # halting the service will also stop the timer
     d = defer.maybeDeferred(m.stopService)
     return d
Ejemplo n.º 16
0
    def _add_change(self, results, rev, branch):
        log.msg('gitpoller: _add_change results: "%s", rev: "%s" in "%s"' %
                (results, rev, self.workdir))

        c = changes.Change(
            who=self.commitInfo["name"],
            revision=rev,
            files=self.commitInfo["files"],
            comments=self.commitInfo["comments"],
            when=self.commitInfo["timestamp"],
            branch=branch,
            category=self.category,
            project=self.project,
            repository=self.repourl,
        )
        log.msg('gitpoller: change "%s" in "%s on branch %s"' %
                (c, self.workdir, branch))
        self.parent.addChange(c)
        self.lastChange = self.lastPoll
Ejemplo n.º 17
0
    def _process_describe(self, result, num):
        lines = result.split('\n')
        # SF#1555985: Wade Brainerd reports a stray ^M at the end of the date
        # field. The rstrip() is intended to remove that.
        lines[0] = lines[0].rstrip()
        m = self.describe_header_re.match(lines[0])
        if not m:
            raise P4PollerError("Unexpected 'p4 describe -s' result: %r" %
                                result)
        who = m.group('who')
        when = time.mktime(time.strptime(m.group('when'), self.datefmt))
        comments = ''
        while not lines[0].startswith('Affected files'):
            comments += lines.pop(0) + '\n'
        lines.pop(0)  # affected files

        branch_files = {}  # dict for branch mapped to file(s)
        while lines:
            line = lines.pop(0).strip()
            if not line: continue
            m = self.file_re.match(line)
            if not m:
                raise P4PollerError("Invalid file line: %r" % line)
            path = m.group('path')
            if path.startswith(self.p4base):
                branch, file = self.split_file(path[len(self.p4base):])
                if (branch == None and file == None): continue
                if branch_files.has_key(branch):
                    branch_files[branch].append(file)
                else:
                    branch_files[branch] = [file]

        for branch in branch_files:
            c = changes.Change(who=who,
                               files=branch_files[branch],
                               comments=comments,
                               revision=str(num),
                               when=when,
                               branch=branch)
            self.parent.addChange(c)

        self.last_change = num
Ejemplo n.º 18
0
    def perspective_addChange(self, changedict):
        log.msg("perspective_addChange called")
        pathnames = []
        prefixpaths = None
        for path in changedict['files']:
            if self.prefix:
                if not path.startswith(self.prefix):
                    # this file does not start with the prefix, so ignore it
                    continue
                path = path[len(self.prefix):]
            pathnames.append(path)

        if pathnames:
            change = changes.Change(
                changedict['who'],
                pathnames,
                changedict['comments'],
                branch=changedict.get('branch'),
                revision=changedict.get('revision'),
            )
            self.changemaster.addChange(change)
Ejemplo n.º 19
0
    def testWatchers(self):
        self.clearFlags()
        m = self.master
        m.loadConfig(self.config_watchable)
        m.readConfig = True
        m.startService()

        c = changes.Change("bob", ["Makefile", "foo/bar.c"], "changed stuff")
        m.change_svc.addChange(c)

        d = self.connectSlave(builders=['a', 'b'])

        def pause(res):
            d = defer.Deferred()
            reactor.callLater(1, d.callback, res)
            return d
        d.addCallback(pause)

        def checkFn(res):
            self.failUnlessEqual(self.getFlag('numCalls'), 1)
        d.addCallback(checkFn)
        return d
Ejemplo n.º 20
0
 def change(self):
     return changes.Change(self.who,
                           self.files,
                           self.comment,
                           when=self.when,
                           branch=self.branch)
Ejemplo n.º 21
0
    def processData(self, query):
        push_data = parse_pushlog_json(query)

        # The payload tells us the most recent push ID. If it is the empty
        # string, the pushlog is empty and there is no data to consume.
        if not push_data['lastpushid']:
            self.emptyRepo = True
            self.lastPushID = None
            if self.verbose:
                log.msg('%s is empty' % self.baseURL)
            return

        # If nothing has changed and we're fully caught up, the remote
        # lastpushid will be the same as self.lastPushID.
        #
        # If the remote lastpushid is less than a previously observed value,
        # this could mean one of the following:
        #
        #    a) Data from the pushlog was removed (perhaps the repo was
        #       stripped)
        #    b) The repo/pushlog was reset.
        #
        # These scenarios should be rare. In both of them, our assumption
        # about the behavior of the pushlog always being monotonically
        # increasing have been invalidated. So we reset state and start
        # again.
        #
        # It's worth noting that a reset repo's pushlog could have *more*
        # entries than the former repo. In this case, this code will fail
        # to detect a reset repo from the pushlog alone.
        if self.lastPushID and push_data['lastpushid'] < self.lastPushID:
            self.emptyRepo = False
            self.lastPushID = None
            log.msg('%s appears to have been reset; clearing state' %
                    self.baseURL)
            return

        # No pushes to process. Exit early.
        if not push_data['pushes']:
            return

        # We want to add at most self.maxChanges changes per push. If
        # mergePushChanges is True, then we'll get up to maxChanges pushes,
        # each with up to maxChanges changes.
        # Go through the list of pushes backwards, since we want to keep the
        # latest ones and possibly discard earlier ones.
        change_list = []
        too_many = False
        for push in reversed(push_data['pushes']):
            # If no changesets in this push, do nothing. This likely
            # occurs when changesets are obsoleted.
            if not push['changesets']:
                continue

            # Used for merging push changes
            c = dict(
                user=push['user'],
                date=push['date'],
                files=[],
                desc="",
                node=None,
                commit_titles=[],
                commit_titles_total_length=0,
            )

            i = 0
            for change in reversed(push['changesets']):
                if self.maxChanges is not None and (len(change_list) >= self.maxChanges or
                                                    i >= self.maxChanges):
                    too_many = True
                    log.msg("%s: got too many changes" % self.baseURL)
                    break

                # Ignore changes not on the specified in-repo branch.
                if self.repo_branch is not None and self.repo_branch != change['branch']:
                    continue

                i += 1

                if self.mergePushChanges:
                    # Collect all the files for this push
                    c['files'].extend(change['files'])
                    # Keep the comments and revision of the last change of this push.
                    # We're going through the changes in reverse order, so we
                    # should use the comments and revision of the first change
                    # in this loop
                    if c['node'] is None:
                        c['desc'] = change['desc'][:500]
                        c['node'] = change['node']

                    title = change['desc'].split('\n', 1)[0]
                    if len(title) > 100:
                        trim_pos = title.rfind(' ', 0, 100)
                        if trim_pos == -1:
                            trim_pos = 100
                        title = title[:trim_pos]
                    # The commit titles are stored in a Change property, which
                    # are limited to 1024 chars in the database (see
                    # change_properties in buildbot/db/scheme/tables.sql). In
                    # order to avoid insert/update failures, we enforce a cap
                    # on the total length with enough room for JSON overhead.
                    if c['commit_titles_total_length'] + len(title) + 5 <= 800:
                        c['commit_titles_total_length'] += len(title) + 5  # for json encoding like , " etc.
                        c['commit_titles'].append(title)
                else:
                    c = dict(
                        user=push['user'],
                        date=push['date'],
                        files=change['files'],
                        desc=change['desc'][:500],
                        node=change['node'],
                        branch=change['branch'],
                    )
                    change_list.append(c)

            if too_many and self.mergePushChanges:
                # Add a dummy change to indicate we had too many changes
                c['files'].extend(['overflow'])

            if self.mergePushChanges and c['node'] is not None:
                change_list.append(c)

        if too_many and not self.mergePushChanges:
            # We add this at the end, and the list gets reversed below. That
            # means this dummy change ends up being the 'first' change of the
            # set, and buildbot chooses the last change as the one to
            # build, so this dummy change doesn't impact which revision
            # gets built.
            c = dict(
                user='******',
                files=['overflow'],
                node=None,
                desc='more than maxChanges(%i) received; ignoring the rest' % self.maxChanges,
                date=time.time(),
            )
            change_list.append(c)

        # Un-reverse the list of changes so they get added in the right order
        change_list.reverse()

        # If we have a lastPushID, we've consumed data already so any changes
        # returned here are new.

        # If the repository was previously empty (indicated by emptyRepo=True),
        # we also want to pay attention to all these pushes.

        # If we don't have a lastPushID and the repository isn't empty, then
        # don't trigger any new builds, and start monitoring for changes
        # from the last push ID.
        if self.lastPushID is not None or self.emptyRepo:
            for change in change_list:
                link = "%s/rev/%s" % (self.baseURL, change["node"])
                # change['desc'] can contain unicode characters that break DB
                # insertion, convert them to '?'
                change['desc'] = change['desc'].encode('ascii', 'replace')
                c = changes.Change(who=change["user"],
                                   files=change["files"],
                                   revision=change["node"],
                                   comments=change["desc"],
                                   revlink=link,
                                   when=change["date"],
                                   branch=self.branch)
                if 'commit_titles' in change:
                    c.properties.setProperty('commit_titles',
                                             change['commit_titles'],
                                             'BaseHgPoller')
                self.changeHook(c)
                self.parent.addChange(c)

        # The repository isn't empty any more!
        self.emptyRepo = False
        # Use the last change found by the poller, regardless of if it's on our
        # branch or not.
        self.lastPushID = push_data['pushes'][-1]['pushid']
        if self.verbose:
            log.msg('last processed push id on %s is %d' %
                    (self.baseURL, self.lastPushID))
                if node not in self.previousChange:
                    result.nodes.append(node)
            self.previousChange = oldResults
        else:
            self.previousChange = result.nodes
            return
        
        allBuildDates = []
        for buildNode in result.nodes:
            buildDate = int(buildNode['date'])
            if self.lastChange > buildDate:
                # change too old
                continue
            allBuildDates.append(buildDate)
            # ignore if build is busted
            if buildNode['status'] <> 'success':
                continue
            c = changes.Change(who = buildNode['hostname'],
                               files = ['TODO: filename goes here'],
                               comments = buildNode['status'],
                               branch = self.branch,
                               when = buildDate)
            self.parent.addChange(c)
        
        # do not allow repeats - count the last change as the largest
        # build start time that has been seen
        if allBuildDates:
            self.lastChange = max(allBuildDates)
    

Ejemplo n.º 23
0
def _createDummyChange(revision):
    return changes.Change('Committer', ['files'], 'comment', revision=revision)
Ejemplo n.º 24
0
    def parse(self, m, prefix=None):
        """Parse branch notification messages sent by Launchpad.
        """

        subject = m["subject"]
        match = re.search(r"^\s*\[Branch\s+([^]]+)\]", subject)
        if match:
            repository = match.group(1)
        else:
            repository = None

        # Put these into a dictionary, otherwise we cannot assign them
        # from nested function definitions.
        d = { 'files': [], 'comments': "" }
        gobbler = None
        rev = None
        who = None
        when = util.now()
        def gobble_comment(s):
            d['comments'] += s + "\n"
        def gobble_removed(s):
            d['files'].append('%s REMOVED' % s)
        def gobble_added(s):
            d['files'].append('%s ADDED' % s)
        def gobble_modified(s):
            d['files'].append('%s MODIFIED' % s)
        def gobble_renamed(s):
            match = re.search(r"^(.+) => (.+)$", s)
            if match:
                d['files'].append('%s RENAMED %s' % (match.group(1), match.group(2)))
            else:
                d['files'].append('%s RENAMED' % s)

        lines = list(body_line_iterator(m, True))
        rev = None
        while lines:
            line = lines.pop(0)

            # revno: 101
            match = re.search(r"^revno: ([0-9.]+)", line)
            if match:
                rev = match.group(1)

            # committer: Joe <*****@*****.**>
            match = re.search(r"^committer: (.*)$", line)
            if match:
                who = match.group(1)

            # timestamp: Fri 2009-05-15 10:35:43 +0200
            # datetime.strptime() is supposed to support %z for time zone, but
            # it does not seem to work. So handle the time zone manually.
            match = re.search(r"^timestamp: [a-zA-Z]{3} (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}) ([-+])(\d{2})(\d{2})$", line)
            if match:
                datestr = match.group(1)
                tz_sign = match.group(2)
                tz_hours = match.group(3)
                tz_minutes = match.group(4)
                when = parseLaunchpadDate(datestr, tz_sign, tz_hours, tz_minutes)

            if re.search(r"^message:\s*$", line):
                gobbler = gobble_comment
            elif re.search(r"^removed:\s*$", line):
                gobbler = gobble_removed
            elif re.search(r"^added:\s*$", line):
                gobbler = gobble_added
            elif re.search(r"^renamed:\s*$", line):
                gobbler = gobble_renamed
            elif re.search(r"^modified:\s*$", line):
                gobbler = gobble_modified
            elif re.search(r"^  ", line) and gobbler:
                gobbler(line[2:-1]) # Use :-1 to gobble trailing newline

        # Determine the name of the branch.
        branch = None
        if self.branchMap and repository:
            if self.branchMap.has_key(repository):
                branch = self.branchMap[repository]
            elif self.branchMap.has_key('lp:' + repository):
                branch = self.branchMap['lp:' + repository]
        if not branch:
            if self.defaultBranch:
                branch = self.defaultBranch
            else:
                if repository:
                    branch = 'lp:' + repository
                else:
                    branch = None

        #log.msg("parse(): rev=%s who=%s files=%s comments='%s' when=%s branch=%s" % (rev, who, d['files'], d['comments'], time.asctime(time.localtime(when)), branch))
        if rev and who:
            return changes.Change(who, d['files'], d['comments'],
                                  when=when, revision=rev, branch=branch)
        else:
            return None
Ejemplo n.º 25
0
    def parse(self, m, prefix=None):
        """Parse messages sent by the svn 'commit-email.pl' trigger.
        """

        # The mail is sent from the person doing the checkin. Assume that the
        # local username is enough to identify them (this assumes a one-server
        # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
        # model)
        name, addr = parseaddr(m["from"])
        if not addr:
            return None # no From means this message isn't from FreshCVS
        at = addr.find("@")
        if at == -1:
            who = addr # might still be useful
        else:
            who = addr[:at]

        # we take the time of receipt as the time of checkin. Not correct (it
        # depends upon the email latency), but it avoids the
        # out-of-order-changes issue. Also syncmail doesn't give us anything
        # better to work with, unless you count pulling the v1-vs-v2
        # timestamp out of the diffs, which would be ugly. TODO: Pulling the
        # 'Date:' header from the mail is a possibility, and
        # email.Utils.parsedate_tz may be useful. It should be configurable,
        # however, because there are a lot of broken clocks out there.
        when = util.now()

        files = []
        comments = ""
        isdir = 0
        lines = list(body_line_iterator(m))
        rev = None
        while lines:
            line = lines.pop(0)

            # "Author: jmason"
            match = re.search(r"^Author: (\S+)", line)
            if match:
                who = match.group(1)

            # "New Revision: 105955"
            match = re.search(r"^New Revision: (\d+)", line)
            if match:
                rev = match.group(1)

            # possible TODO: use "Date: ..." data here instead of time of
            # commit message receipt, above. however, this timestamp is
            # specified *without* a timezone, in the server's local TZ, so to
            # be accurate buildbot would need a config setting to specify the
            # source server's expected TZ setting! messy.

            # this stanza ends with the "Log:"
            if (line == "Log:\n"):
                break

        # commit message is terminated by the file-listing section
        while lines:
            line = lines.pop(0)
            if (line == "Modified:\n" or
                line == "Added:\n" or
                line == "Removed:\n"):
                break
            comments += line
        comments = comments.rstrip() + "\n"

        while lines:
            line = lines.pop(0)
            if line == "\n":
                break
            if line.find("Modified:\n") == 0:
                continue            # ignore this line
            if line.find("Added:\n") == 0:
                continue            # ignore this line
            if line.find("Removed:\n") == 0:
                continue            # ignore this line
            line = line.strip()

            thesefiles = line.split(" ")
            for f in thesefiles:
                if prefix:
                    # insist that the file start with the prefix: we may get
                    # changes we don't care about too
                    if f.startswith(prefix):
                        f = f[len(prefix):]
                    else:
                        log.msg("ignored file from svn commit: prefix '%s' "
                                "does not match filename '%s'" % (prefix, f))
                        continue

                # TODO: figure out how new directories are described, set
                # .isdir
                files.append(f)

        if not files:
            log.msg("no matching files found, ignoring commit")
            return None

        return changes.Change(who, files, comments, when=when, revision=rev)
Ejemplo n.º 26
0
    def parse(self, m, prefix=None):
        """Parse mail sent by the Bonsai cvs loginfo script."""

        # we don't care who the email came from b/c the cvs user is in the
        # msg text

        who = "unknown"
        timestamp = None
        files = []
        lines = list(body_line_iterator(m))

        # read the control lines (what/who/where/file/etc.)
        while lines:
            line = lines.pop(0)
            if line == "LOGCOMMENT\n":
                break;
            line = line.rstrip("\n")

            # we'd like to do the following but it won't work if the number of
            # items doesn't match so...
            #   what, timestamp, user, repo, module, file = line.split( '|' )
            items = line.split('|')
            if len(items) < 6:
                # not a valid line, assume this isn't a bonsai message
                return None

            try:
                # just grab the bottom-most timestamp, they're probably all the
                # same. TODO: I'm assuming this is relative to the epoch, but
                # this needs testing.
                timestamp = int(items[1])
            except ValueError:
                pass

            user = items[2]
            if user:
                who = user

            module = items[4]
            file = items[5]
            if module and file:
                path = "%s/%s" % (module, file)
                files.append(path)
            sticky = items[7]
            branch = items[8]

        # if no files changed, return nothing
        if not files:
            return None

        # read the comments
        comments = ""
        while lines:
            line = lines.pop(0)
            if line == ":ENDLOGCOMMENT\n":
                break
            comments += line
        comments = comments.rstrip() + "\n"

        # return buildbot Change object
        return changes.Change(who, files, comments, when=timestamp,
                              branch=branch)
Ejemplo n.º 27
0
    def parse(self, m, prefix=None):
        """Parse messages sent by the 'syncmail' program, as suggested by the
        sourceforge.net CVS Admin documentation. Syncmail is maintained at
        syncmail.sf.net .
        """
        # pretty much the same as freshcvs mail, not surprising since CVS is
        # the one creating most of the text

        # The mail is sent from the person doing the checkin. Assume that the
        # local username is enough to identify them (this assumes a one-server
        # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
        # model)
        name, addr = parseaddr(m["from"])
        if not addr:
            return None # no From means this message isn't from FreshCVS
        at = addr.find("@")
        if at == -1:
            who = addr # might still be useful
        else:
            who = addr[:at]

        # we take the time of receipt as the time of checkin. Not correct (it
        # depends upon the email latency), but it avoids the
        # out-of-order-changes issue. Also syncmail doesn't give us anything
        # better to work with, unless you count pulling the v1-vs-v2
        # timestamp out of the diffs, which would be ugly. TODO: Pulling the
        # 'Date:' header from the mail is a possibility, and
        # email.Utils.parsedate_tz may be useful. It should be configurable,
        # however, because there are a lot of broken clocks out there.
        when = util.now()

        subject = m["subject"]
        # syncmail puts the repository-relative directory in the subject:
        # mprefix + "%(dir)s %(file)s,%(oldversion)s,%(newversion)s", where
        # 'mprefix' is something that could be added by a mailing list
        # manager.
        # this is the only reasonable way to determine the directory name
        space = subject.find(" ")
        if space != -1:
            directory = subject[:space]
        else:
            directory = subject

        files = []
        comments = ""
        isdir = 0
        branch = None

        lines = list(body_line_iterator(m))
        while lines:
            line = lines.pop(0)

            if (line == "Modified Files:\n" or
                line == "Added Files:\n" or
                line == "Removed Files:\n"):
                break

        while lines:
            line = lines.pop(0)
            if line == "\n":
                break
            if line == "Log Message:\n":
                lines.insert(0, line)
                break
            line = line.lstrip()
            line = line.rstrip()
            # note: syncmail will send one email per directory involved in a
            # commit, with multiple files if they were in the same directory.
            # Unlike freshCVS, it makes no attempt to collect all related
            # commits into a single message.

            # note: syncmail will report a Tag underneath the ... Files: line
            # e.g.:       Tag: BRANCH-DEVEL

            if line.startswith('Tag:'):
                branch = line.split(' ')[-1].rstrip()
                continue

            thesefiles = line.split(" ")
            for f in thesefiles:
                f = directory + "/" + f
                if prefix:
                    # insist that the file start with the prefix: we may get
                    # changes we don't care about too
                    if f.startswith(prefix):
                        f = f[len(prefix):]
                    else:
                        continue
                        break
                # TODO: figure out how new directories are described, set
                # .isdir
                files.append(f)

        if not files:
            return None

        while lines:
            line = lines.pop(0)
            if line == "Log Message:\n":
                break
        # message is terminated by "Index:..." (patch) or "--- NEW FILE.."
        # or "--- filename DELETED ---". Sigh.
        while lines:
            line = lines.pop(0)
            if line.find("Index: ") == 0:
                break
            if re.search(r"^--- NEW FILE", line):
                break
            if re.search(r" DELETED ---$", line):
                break
            comments += line
        comments = comments.rstrip() + "\n"

        change = changes.Change(who, files, comments, isdir, when=when,
                                branch=branch)

        return change
Ejemplo n.º 28
0
    def parse(self, m, prefix=None):
        """Parse mail sent by FreshCVS"""

        # FreshCVS sets From: to "user CVS <user>", but the <> part may be
        # modified by the MTA (to include a local domain)
        name, addr = parseaddr(m["from"])
        if not name:
            return None # no From means this message isn't from FreshCVS
        cvs = name.find(" CVS")
        if cvs == -1:
            return None # this message isn't from FreshCVS
        who = name[:cvs]

        # we take the time of receipt as the time of checkin. Not correct,
        # but it avoids the out-of-order-changes issue. See the comment in
        # parseSyncmail about using the 'Date:' header
        when = util.now()

        files = []
        comments = ""
        isdir = 0
        lines = list(body_line_iterator(m))
        while lines:
            line = lines.pop(0)
            if line == "Modified files:\n":
                break
        while lines:
            line = lines.pop(0)
            if line == "\n":
                break
            line = line.rstrip("\n")
            linebits = line.split(None, 1)
            file = linebits[0]
            if prefix:
                # insist that the file start with the prefix: FreshCVS sends
                # changes we don't care about too
                if file.startswith(prefix):
                    file = file[len(prefix):]
                else:
                    continue
            if len(linebits) == 1:
                isdir = 1
            elif linebits[1] == "0 0":
                isdir = 1
            files.append(file)
        while lines:
            line = lines.pop(0)
            if line == "Log message:\n":
                break
        # message is terminated by "ViewCVS links:" or "Index:..." (patch)
        while lines:
            line = lines.pop(0)
            if line == "ViewCVS links:\n":
                break
            if line.find("Index: ") == 0:
                break
            comments += line
        comments = comments.rstrip() + "\n"

        if not files:
            return None

        change = changes.Change(who, files, comments, isdir, when=when)

        return change
Ejemplo n.º 29
0
            buildDate = int(dir)
            if self.lastChanges[url] >= buildDate:
                # change too old
                if not self.forceBuild:
                    continue
                else:
                    self.forceBuild = 0
            self.lastChanges[url] = buildDate
            self.working = self.working + 1
            d = self._get_changes(url + dir + '/')
            d.addBoth(self._process_changes, buildDate)
            d.addBoth(self._finished)

        #if we have a new browser to test, test it
        for buildname, fullpath, buildDate in dateList:
            if (url in self.lastChanges):
                if (self.lastChanges[url] >= buildDate):
                    # change too old
                     continue
            if forceDate > 0:
                buildDate = forceDate
            else:
                self.lastChanges[url] = buildDate
            c = changes.Change(who = url,
                               comments = "success",
                               files = [fullpath,],
                               branch = self.branch,
                               when = buildDate,)
            self.parent.addChange(c)
            log.msg("found a browser to test (%s)" % (fullpath))
Ejemplo n.º 30
0
    def parse(self, m, prefix=None):
        if m is None:
            # not a mail at all
            return None

        from_header = m['from']
        if '<' in from_header:
            from_email = m['from'].split('<')[1][:-1]
        else:
            from_email = m['from']

        # From is [email protected]
        name, domain = from_email.split("@")

        # If this e-mail is valid, it will come from an svn/src.gnome.org email
        if domain != 'src.gnome.org':
            return None

        # we take the time of receipt as the time of checkin. Not correct, but it
        # avoids the out-of-order-changes issue. See the comment in parseSyncmail
        # about using the 'Date:' header
        when = util.now()

        revision = None
        files = []
        comments = ""
        isdir = 0
        links = []

        subject = m['subject']

        if not subject.startswith('['):
            # not a git message, abort
            return None

        # git message
        revision = m.get('X-Git-Newrev')
        if not revision:
            # not a new git revision, may be a new tag, a new branch, etc.
            return None

        if revision == '0000000000000000000000000000000000000000':
            # probably a deleted branch, ignore
            return None

        if m.get('X-Git-Refname', '').startswith('refs/tags/'):
            # ignore tags
            return None

        try:
            project = subject[1:subject.index(']')]
        except ValueError:
            return None  # old git commit message format; ignored

        if '/' in project:
            # remove the branch part (ex: [anjal/inline-composer-quotes])
            project = project.split('/')[0]

        if ':' in project:
            # remove the patch number part (ex: [anjal: 3/3])
            project = project.split(':')[0]

        if 'Created branch' in subject:
            # new branches don't have to trigger rebuilds
            return None

        if 'Merge branch' in subject:
            comments = subject[subject.index('Merge branch'):]
        elif 'Merge commit' in subject:
            comments = subject[subject.index('Merge commit'):]
        else:
            lines = list(body_line_iterator(m, m['Content-Transfer-Encoding']))
            after_date = False
            in_files = False
            while lines:
                line = lines.pop(0)
                if line.startswith('Date:'):
                    after_date = True
                    continue
                if not after_date:
                    continue
                if len(line) > 3 and line[
                        0] == ' ' and line[1] != ' ' and '|' in line:
                    in_files = True
                if line.startswith('---'):
                    break
                if in_files:
                    if not '|' in line:
                        break
                    files.append(line.split()[0])
                else:
                    comments += line[4:] + '\n'

            comments = unicode(comments.strip(),
                               m.get_content_charset() or 'ascii', 'ignore')

        c = changes.Change(name,
                           files,
                           comments,
                           isdir,
                           revision=revision,
                           links=links,
                           when=when)
        c.project = project
        c.git_module_name = project

        # some modules may have alternate checkouts under different names, look
        # for those, and create appropriate Change objects
        for module in self.modules:
            if hasattr(module, 'branch') and isinstance(
                    module.branch, GitBranch):
                git_module_name = module.branch.module.rsplit('/', 1)[-1]
                if module.name != project and git_module_name == project:
                    change = changes.Change(name,
                                            files,
                                            comments,
                                            isdir,
                                            revision=revision,
                                            links=links,
                                            when=when)
                    change.project = module.name
                    change.git_module_name = git_module_name
                    self.parent.addChange(change)

        return c