Exemplo n.º 1
0
    def _dovccmdImpl(self, command, args, path, ssh_workdir):
        full_args = []
        full_env = os.environ.copy()

        if self._isSshPrivateKeyNeededForCommand(command):
            key_path = self._getSshPrivateKeyPath(ssh_workdir)
            self._downloadSshPrivateKey(key_path)

            known_hosts_path = None
            if self.sshHostKey is not None or self.sshKnownHosts is not None:
                known_hosts_path = self._getSshKnownHostsPath(ssh_workdir)
                self._downloadSshKnownHosts(known_hosts_path)

            self.adjustCommandParamsForSshPrivateKey(full_args, full_env,
                                                     key_path, None,
                                                     known_hosts_path)

        full_args += [command] + args

        res = yield utils.getProcessOutputAndValue(self.gitbin,
            full_args, path=path, env=full_env)
        (stdout, stderr, code) = res
        stdout = bytes2unicode(stdout, self.encoding)
        stderr = bytes2unicode(stderr, self.encoding)
        if code != 0:
            if code == 128:
                raise GitError('command {} in {} on repourl {} failed with exit code {}: {}'.format(
                               full_args, path, self.repourl, code, stderr))
            raise EnvironmentError('command {} in {} on repourl {} failed with exit code {}: {}'.format(
                                   full_args, path, self.repourl, code, stderr))
        return stdout.strip()
Exemplo n.º 2
0
    def perspective_try(self, branch, revision, patch, repository, project,
                        builderNames, who="", comment="", properties=None):
        log.msg("user %s requesting build on builders %s" % (self.username,
                                                             builderNames))
        if properties is None:
            properties = {}
        # build the intersection of the request and our configured list
        builderNames = self.scheduler.filterBuilderList(builderNames)
        if not builderNames:
            return

        reason = "'try' job"

        if who:
            reason += " by user {}".format(bytes2unicode(who))

        if comment:
            reason += " ({})".format(bytes2unicode(comment))

        sourcestamp = dict(
            branch=branch, revision=revision, repository=repository,
            project=project, patch_level=patch[0], patch_body=patch[1],
            patch_subdir='', patch_author=who or '',
            patch_comment=comment or '', codebase='',
        )           # note: no way to specify patch subdir - #1769

        requested_props = Properties()
        requested_props.update(properties, "try build")
        (bsid, brids) = yield self.scheduler.addBuildsetForSourceStamps(
            sourcestamps=[sourcestamp], reason=reason,
            properties=requested_props, builderNames=builderNames)

        # return a remotely-usable BuildSetStatus object
        bss = RemoteBuildSetStatus(self.scheduler.master, bsid, brids)
        return bss
Exemplo n.º 3
0
    def __init__(self, owner, slug,
                 branch=None,
                 pollInterval=10 * 60,
                 useTimestamps=True,
                 category=None,
                 project='',
                 pullrequest_filter=True,
                 encoding='utf-8',
                 pollAtLaunch=False
                 ):

        self.owner = owner
        self.slug = slug
        self.branch = branch
        base.PollingChangeSource.__init__(
            self, name='/'.join([owner, slug]), pollInterval=pollInterval, pollAtLaunch=pollAtLaunch)
        self.encoding = encoding

        if hasattr(pullrequest_filter, '__call__'):
            self.pullrequest_filter = pullrequest_filter
        else:
            self.pullrequest_filter = (lambda _: pullrequest_filter)

        self.lastChange = time.time()
        self.lastPoll = time.time()
        self.useTimestamps = useTimestamps
        self.category = category if callable(
            category) else bytes2unicode(category)
        self.project = bytes2unicode(project)
        self.initLock = defer.DeferredLock()
Exemplo n.º 4
0
 def addHTMLLog(self, name, html):
     logid = yield self.master.data.updates.addLog(self.stepid,
                                                   util.bytes2unicode(name), 'h')
     _log = self._newLog(name, 'h', logid)
     html = bytes2unicode(html)
     yield _log.addContent(html)
     yield _log.finish()
Exemplo n.º 5
0
 def decode_file(file):
     # git use octal char sequences in quotes when non ASCII
     match = re.match('^"(.*)"$', file)
     if match:
         file = bytes2unicode(match.groups()[0], encoding=self.encoding,
                              errors='unicode_escape')
     return bytes2unicode(file, encoding=self.encoding)
Exemplo n.º 6
0
    def setProperty(self, name, value, source, runtime=False):
        name = util.bytes2unicode(name)
        json.dumps(value)  # Let the exception propagate ...
        source = util.bytes2unicode(source)

        self.properties[name] = (value, source)
        if runtime:
            self.runtime.add(name)
Exemplo n.º 7
0
 def getResultSummary(self):
     src = bytes2unicode(self.src, errors='replace')
     dest = bytes2unicode(self.dest, errors='replace')
     copy = "{} to {}".format(src, dest)
     if self.results == SUCCESS:
         rv = 'Copied ' + copy
     else:
         rv = 'Copying ' + copy + ' failed.'
     return {'step': rv}
Exemplo n.º 8
0
    def __init__(self, repourl, branches=None, branch=None,
                 workdir=None, pollInterval=10 * 60,
                 gitbin='git', usetimestamps=True,
                 category=None, project=None,
                 pollinterval=-2, fetch_refspec=None,
                 encoding='utf-8', name=None, pollAtLaunch=False,
                 buildPushesWithNoCommits=False, only_tags=False,
                 sshPrivateKey=None):

        # for backward compatibility; the parameter used to be spelled with 'i'
        if pollinterval != -2:
            pollInterval = pollinterval

        if name is None:
            name = repourl

        base.PollingChangeSource.__init__(self, name=name,
                                          pollInterval=pollInterval,
                                          pollAtLaunch=pollAtLaunch,
                                          sshPrivateKey=sshPrivateKey)

        if project is None:
            project = ''

        if only_tags and (branch or branches):
            config.error("GitPoller: can't specify only_tags and branch/branches")
        if branch and branches:
            config.error("GitPoller: can't specify both branch and branches")
        elif branch:
            branches = [branch]
        elif not branches:
            if only_tags:
                branches = lambda ref: ref.startswith('refs/tags/')  # noqa: E731
            else:
                branches = ['master']

        self.repourl = repourl
        self.branches = branches
        self.encoding = encoding
        self.buildPushesWithNoCommits = buildPushesWithNoCommits
        self.gitbin = gitbin
        self.workdir = workdir
        self.usetimestamps = usetimestamps
        self.category = category if callable(
            category) else bytes2unicode(category, encoding=self.encoding)
        self.project = bytes2unicode(project, encoding=self.encoding)
        self.changeCount = 0
        self.lastRev = {}
        self.sshPrivateKey = sshPrivateKey
        self.setupGit()

        if fetch_refspec is not None:
            config.error("GitPoller: fetch_refspec is no longer supported. "
                         "Instead, only the given branches are downloaded.")

        if self.workdir is None:
            self.workdir = 'gitpoller-work'
Exemplo n.º 9
0
 def addHTMLLog(self, name, html):
     if self.stepid is None:
         raise BuildStepCancelled
     logid = yield self.master.data.updates.addLog(self.stepid,
                                                   util.bytes2unicode(name), 'h')
     _log = self._newLog(name, 'h', logid)
     html = bytes2unicode(html)
     yield _log.addContent(html)
     yield _log.finish()
Exemplo n.º 10
0
    def setupWorkerBuildirProperty(self, workerforbuilder):
        path_module = workerforbuilder.worker.path_module

        # navigate our way back to the L{buildbot.worker.Worker}
        # object that came from the config, and get its properties
        if workerforbuilder.worker.worker_basedir:
            builddir = path_module.join(
                bytes2unicode(workerforbuilder.worker.worker_basedir),
                bytes2unicode(self.builder.config.workerbuilddir))
            self.setProperty("builddir", builddir, "Worker")
 def decode(x):
     if isinstance(x, bytes):
         return bytes2unicode(x)
     elif isinstance(x, (list, tuple)):
         return [bytes2unicode(y) for y in x]
     elif isinstance(x, dict):
         newArgs = {}
         for a, b in x.items():
             newArgs[decode(a)] = decode(b)
         return newArgs
     return x
Exemplo n.º 12
0
    def __init__(self, repourl, split_file=None,
                 svnuser=None, svnpasswd=None,
                 pollInterval=10 * 60, histmax=100,
                 svnbin='svn', revlinktmpl='', category=None,
                 project='', cachepath=None, pollinterval=-2,
                 extra_args=None, name=None, pollAtLaunch=False):

        # for backward compatibility; the parameter used to be spelled with 'i'
        if pollinterval != -2:
            pollInterval = pollinterval

        if name is None:
            name = repourl

        base.PollingChangeSource.__init__(self, name=name,
                                          pollInterval=pollInterval,
                                          pollAtLaunch=pollAtLaunch,
                                          svnuser=svnuser, svnpasswd=svnpasswd)

        if repourl.endswith("/"):
            repourl = repourl[:-1]  # strip the trailing slash
        self.repourl = repourl
        self.extra_args = extra_args
        self.split_file = split_file or split_file_alwaystrunk
        self.svnuser = svnuser
        self.svnpasswd = svnpasswd

        self.revlinktmpl = revlinktmpl

        # include environment variables required for ssh-agent auth
        self.environ = os.environ.copy()

        self.svnbin = svnbin
        self.histmax = histmax
        self._prefix = None
        self.category = category if callable(
            category) else util.bytes2unicode(category)
        self.project = util.bytes2unicode(project)

        self.cachepath = cachepath
        if self.cachepath and os.path.exists(self.cachepath):
            try:
                with open(self.cachepath, "r") as f:
                    self.last_change = int(f.read().strip())
                    log.msg("SVNPoller: SVNPoller(%s) setting last_change to %s" % (
                        self.repourl, self.last_change))
                # try writing it, too
                with open(self.cachepath, "w") as f:
                    f.write(str(self.last_change))
            except Exception:
                self.cachepath = None
                log.msg(("SVNPoller: SVNPoller(%s) cache file corrupt or unwriteable; " +
                         "skipping and not using") % self.repourl)
                log.err()
Exemplo n.º 13
0
 def test_getLogLines_bug3101(self):
     # regression test for #3101
     content = self.bug3101Content
     yield self.insertTestData(self.backgroundData + self.bug3101Rows)
     # overall content is the same, with '\n' padding at the end
     expected = bytes2unicode(self.bug3101Content + b'\n')
     self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 99)),
                      expected)
     # try to fetch just one line
     expected = bytes2unicode(content.split(b'\n')[0] + b'\n')
     self.assertEqual((yield self.db.logs.getLogLines(1470, 0, 0)),
                      expected)
Exemplo n.º 14
0
    def _processChanges(self, unused_output):
        """Send info about pulled changes to the master and record current.

        HgPoller does the recording by moving the working dir to the head
        of the branch.
        We don't update the tree (unnecessary treatment and waste of space)
        instead, we simply store the current rev number in a file.
        Recall that hg rev numbers are local and incremental.
        """
        oid, current = yield self._getCurrentRev()
        # hg log on a range of revisions is never empty
        # also, if a numeric revision does not exist, a node may match.
        # Therefore, we have to check explicitly that branch head > current.
        head = yield self._getHead()
        if head is None:
            return
        elif current is not None and head <= current:
            return
        if current is None:
            # we could have used current = -1 convention as well (as hg does)
            revrange = '%d:%d' % (head, head)
        else:
            revrange = '%d:%s' % (current + 1, head)

        # two passes for hg log makes parsing simpler (comments is multi-lines)
        revListArgs = ['log', '-b', self.branch, '-r', revrange,
                       r'--template={rev}:{node}\n']
        results = yield utils.getProcessOutput(self.hgbin, revListArgs,
                                               path=self._absWorkdir(), env=os.environ, errortoo=False)
        results = results.decode(self.encoding)

        revNodeList = [rn.split(u':', 1) for rn in results.strip().split()]

        log.msg('hgpoller: processing %d changes: %r in %r'
                % (len(revNodeList), revNodeList, self._absWorkdir()))
        for rev, node in revNodeList:
            timestamp, author, files, comments = yield self._getRevDetails(
                node)
            yield self.master.data.updates.addChange(
                author=author,
                revision=text_type(node),
                files=files,
                comments=comments,
                when_timestamp=int(timestamp) if timestamp else None,
                branch=bytes2unicode(self.branch),
                category=bytes2unicode(self.category),
                project=bytes2unicode(self.project),
                repository=bytes2unicode(self.repourl),
                src=u'hg')
            # writing after addChange so that a rev is never missed,
            # but at once to avoid impact from later errors
            yield self._setCurrentRev(rev, oid=oid)
Exemplo n.º 15
0
    def _get_payload(self, request):
        content = request.content.read()
        content = bytes2unicode(content)
        content_type = request.getHeader(b'Content-Type')
        content_type = bytes2unicode(content_type)
        if content_type.startswith('application/json'):
            payload = json.loads(content)
        else:
            raise ValueError('Unknown content type: {}'
                             .format(content_type))

        log.msg("Payload: {}".format(payload))

        return payload
Exemplo n.º 16
0
    def makeConfiguration(self, request):

        config = {}
        config.update(self.default)
        for k, v in self.ep.config.items():
            if k == 'color_scheme':
                config[k].update(v)
            else:
                config[k] = v

        for k, v in request.args.items():
            k = bytes2unicode(k)
            config[k] = escape(bytes2unicode(v[0]))
        return config
Exemplo n.º 17
0
    def addChange(self, who=None, files=None, comments=None, **kwargs):
        # deprecated in 0.9.0; will be removed in 1.0.0
        log.msg("WARNING: change source is using deprecated "
                "self.master.addChange method; this method will disappear in "
                "Buildbot-1.0.0")
        # handle positional arguments
        kwargs['who'] = who
        kwargs['files'] = files
        kwargs['comments'] = comments

        def handle_deprec(oldname, newname):
            if oldname in kwargs:
                old = kwargs.pop(oldname)
                if old is not None:
                    if kwargs.get(newname) is None:
                        log.msg("WARNING: change source is using deprecated "
                                "addChange parameter '%s'" % oldname)
                        return old
                    raise TypeError("Cannot provide '%s' and '%s' to addChange"
                                    % (oldname, newname))
            return kwargs.get(newname)

        kwargs['author'] = handle_deprec("who", "author")
        kwargs['when_timestamp'] = handle_deprec("when", "when_timestamp")

        # timestamp must be an epoch timestamp now
        if isinstance(kwargs.get('when_timestamp'), datetime.datetime):
            kwargs['when_timestamp'] = datetime2epoch(kwargs['when_timestamp'])

        # unicodify stuff
        for k in ('comments', 'author', 'revision', 'branch', 'category',
                  'revlink', 'repository', 'codebase', 'project'):
            if k in kwargs:
                kwargs[k] = bytes2unicode(kwargs[k])
        if kwargs.get('files'):
            kwargs['files'] = [bytes2unicode(f)
                               for f in kwargs['files']]
        if kwargs.get('properties'):
            kwargs['properties'] = dict((bytes2unicode(k), v)
                                        for k, v in iteritems(kwargs['properties']))

        # pass the converted call on to the data API
        changeid = yield self.data.updates.addChange(**kwargs)

        # and turn that changeid into a change object, since that's what
        # callers expected (and why this method was deprecated)
        chdict = yield self.db.changes.getChange(changeid)
        change = yield changes.Change.fromChdict(self, chdict)
        defer.returnValue(change)
Exemplo n.º 18
0
 def _convert_nonzero_to_failure(res,
                                 command,
                                 args,
                                 path):
     "utility to handle the result of getProcessOutputAndValue"
     (stdout, stderr, code) = res
     stdout = bytes2unicode(stdout, self.encoding)
     stderr = bytes2unicode(stderr, self.encoding)
     if code != 0:
         if code == 128:
             raise GitError('command %s %s in %s on repourl %s failed with exit code %d: %s'
                            % (command, args, path, self.repourl, code, stderr))
         raise EnvironmentError('command %s %s in %s on repourl %s failed with exit code %d: %s'
                                % (command, args, path, self.repourl, code, stderr))
     return stdout.strip()
Exemplo n.º 19
0
    def _get_payload(self, request):
        content = request.content.read()
        content = bytes2unicode(content)

        signature = request.getHeader(_HEADER_SIGNATURE)
        signature = bytes2unicode(signature)

        if not signature and self._strict:
            raise ValueError('Request has no required signature')

        if self._secret and signature:
            try:
                hash_type, hexdigest = signature.split('=')
            except ValueError:
                raise ValueError(
                    'Wrong signature format: {}'.format(signature))

            if hash_type != 'sha1':
                raise ValueError('Unknown hash type: {}'.format(hash_type))

            mac = hmac.new(unicode2bytes(self._secret),
                           msg=unicode2bytes(content),
                           digestmod=sha1)

            def _cmp(a, b):
                try:
                    # try the more secure compare_digest() first
                    from hmac import compare_digest
                    return compare_digest(a, b)
                except ImportError:  # pragma: no cover
                    # and fallback to the insecure simple comparison otherwise
                    return a == b

            if not _cmp(bytes2unicode(mac.hexdigest()), hexdigest):
                raise ValueError('Hash mismatch')

        content_type = request.getHeader(b'Content-Type')

        if content_type == b'application/json':
            payload = json.loads(content)
        elif content_type == b'application/x-www-form-urlencoded':
            payload = json.loads(bytes2unicode(request.args[b'payload'][0]))
        else:
            raise ValueError('Unknown content type: {}'.format(content_type))

        log.msg("Payload: {}".format(payload), logLevel=logging.DEBUG)

        return payload
Exemplo n.º 20
0
    def handleJobFile(self, filename, f):
        try:
            parsed_job = self.parseJob(f)
            builderNames = parsed_job['builderNames']
        except BadJobfile:
            log.msg("%s reports a bad jobfile in %s" % (self, filename))
            log.err()
            return defer.succeed(None)

        # Validate/fixup the builder names.
        builderNames = self.filterBuilderList(builderNames)
        if not builderNames:
            log.msg(
                "incoming Try job did not specify any allowed builder names")
            return defer.succeed(None)

        who = ""
        if parsed_job['who']:
            who = parsed_job['who']

        comment = ""
        if parsed_job['comment']:
            comment = parsed_job['comment']

        sourcestamp = dict(branch=parsed_job['branch'],
                           codebase='',
                           revision=parsed_job['baserev'],
                           patch_body=parsed_job['patch_body'],
                           patch_level=parsed_job['patch_level'],
                           patch_author=who,
                           patch_comment=comment,
                           # TODO: can't set this remotely - #1769
                           patch_subdir='',
                           project=parsed_job['project'],
                           repository=parsed_job['repository'])
        reason = "'try' job"
        if parsed_job['who']:
            reason += " by user {}".format(bytes2unicode(parsed_job['who']))
        properties = parsed_job['properties']
        requested_props = Properties()
        requested_props.update(properties, "try build")

        return self.addBuildsetForSourceStamps(
            sourcestamps=[sourcestamp],
            reason=reason,
            external_idstring=bytes2unicode(parsed_job['jobid']),
            builderNames=builderNames,
            properties=requested_props)
Exemplo n.º 21
0
    def getChanges(self, request):
        """
        Take the logic from the change hook, and then delegate it
        to the proper handler

        We use the buildbot plugin mechanisms to find out about dialects

        and call getChanges()

        the return value is a list of changes

        if DIALECT is unspecified, a sample implementation is provided
        """
        uriRE = re.search(r'^/change_hook/?([a-zA-Z0-9_]*)', bytes2unicode(request.uri))

        if not uriRE:
            log.msg("URI doesn't match change_hook regex: %s" % request.uri)
            raise ValueError(
                "URI doesn't match change_hook regex: %s" % request.uri)

        changes = []
        src = None

        # Was there a dialect provided?
        if uriRE.group(1):
            dialect = uriRE.group(1)
        else:
            dialect = 'base'

        handler = self.makeHandler(dialect)
        changes, src = yield handler.getChanges(request)
        return (changes, src)
Exemplo n.º 22
0
 def getEndpoint(self, request, method, params):
     # note that trailing slashes are not allowed
     request_postpath = tuple(bytes2unicode(p) for p in request.postpath)
     yield self.master.www.assertUserAllowed(request, request_postpath,
                                             method, params)
     ret = yield self.master.data.getEndpoint(request_postpath)
     return ret
Exemplo n.º 23
0
 def checkFields(fields, negOk=False):
     for field in fields:
         k = bytes2unicode(field)
         if k[0] == '-' and negOk:
             k = k[1:]
         if k not in entityType.fieldNames:
             raise BadRequest("no such field '{}'".format(k))
Exemplo n.º 24
0
    def testBasicWorker2_16(self):
        s = transfer.JSONPropertiesDownload("props.json")
        s.build = Mock()
        props = Properties()
        props.setProperty('key1', 'value1', 'test')
        s.build.getProperties.return_value = props
        s.build.getWorkerCommandVersion.return_value = '2.16'
        ss = Mock()
        ss.asDict.return_value = dict(revision="12345")
        s.build.getAllSourceStamps.return_value = [ss]

        s.worker = Mock()
        s.remote = Mock()

        s.start()

        for c in s.remote.method_calls:
            name, command, args = c
            commandName = command[3]
            kwargs = command[-1]
            if commandName == 'downloadFile':
                self.assertEqual(kwargs['slavedest'], 'props.json')
                reader = kwargs['reader']
                data = reader.remote_read(100)
                data = bytes2unicode(data)
                actualJson = json.loads(data)
                expectedJson = dict(sourcestamps=[ss.asDict()], properties={'key1': 'value1'})
                self.assertEqual(actualJson, expectedJson)
                break
        else:
            raise ValueError("No downloadFile command found")
Exemplo n.º 25
0
    def requestAvatar(self, username, mind, interface):
        assert interface == pb.IPerspective
        username = bytes2unicode(username)
        if username not in self.users:
            d = defer.succeed(None)  # no perspective
        else:
            _, afactory = self.users.get(username)
            d = defer.maybeDeferred(afactory, mind, username)

        # check that we got a perspective
        @d.addCallback
        def check(persp):
            if not persp:
                raise ValueError("no perspective for '%s'" % username)
            return persp

        # call the perspective's attached(mind)
        @d.addCallback
        def call_attached(persp):
            d = defer.maybeDeferred(persp.attached, mind)
            d.addCallback(lambda _: persp)  # keep returning the perspective
            return d

        # return the tuple requestAvatar is expected to return
        @d.addCallback
        def done(persp):
            return (pb.IPerspective, persp, lambda: persp.detached(mind))

        return d
Exemplo n.º 26
0
    def stepDone(self, results, step):
        """This method is called when the BuildStep completes. It is passed a
        status object from the BuildStep and is responsible for merging the
        Step's results into those of the overall Build."""

        terminate = False
        text = None
        if isinstance(results, tuple):
            results, text = results
        assert isinstance(results, type(SUCCESS)), "got %r" % (results,)
        summary = yield step.getBuildResultSummary()
        if 'build' in summary:
            text = [summary['build']]
        log.msg(" step '%s' complete: %s (%s)" % (step.name, statusToString(results), text))
        if text:
            self.text.extend(text)
            self.master.data.updates.setBuildStateString(self.buildid,
                                                         bytes2unicode(" ".join(self.text)))
        self.results, terminate = computeResultAndTermination(step, results,
                                                              self.results)
        if not self.conn:
            # force the results to retry if the connection was lost
            self.results = RETRY
            terminate = True
        defer.returnValue(terminate)
Exemplo n.º 27
0
    def test_createEmail_message_with_patch_and_log_containing_unicode(self):
        _, builds = yield self.setupBuildResults(SUCCESS)
        msgdict = create_msgdict()
        patches = [{'body': u'\u00E5\u00E4\u00F6'}]
        logs = yield self.master.data.get(("steps", 50, 'logs'))
        for l in logs:
            l['stepname'] = "fakestep"
            l['content'] = yield self.master.data.get(("logs", l['logid'], 'contents'))

        mn = yield self.setupMailNotifier('*****@*****.**', addLogs=True)
        m = yield mn.createEmail(msgdict, u'builder-n\u00E5me',
                                 u'project-n\u00E5me', SUCCESS,
                                 builds, patches, logs)

        try:
            s = m.as_string()
            # python 2.6 default transfer in base64 for utf-8
            if "base64" not in s:
                self.assertIn("Unicode log", s)
            else:  # b64encode and remove '=' padding (hence [:-1])
                logStr = bytes2unicode(base64.b64encode(b"Unicode log")[:-1])
                self.assertIn(logStr, s)

            self.assertIn(
                'Content-Disposition: attachment; filename="fakestep.stdio"', s)
        except UnicodeEncodeError:
            self.fail('Failed to call as_string() on email message.')
Exemplo n.º 28
0
    def __init__(self, repourl, branch='default',
                 workdir=None, pollInterval=10 * 60,
                 hgbin='hg', usetimestamps=True,
                 category=None, project='', pollinterval=-2,
                 encoding='utf-8', name=None, pollAtLaunch=False):

        # for backward compatibility; the parameter used to be spelled with 'i'
        if pollinterval != -2:
            pollInterval = pollinterval

        if name is None:
            name = "%s[%s]" % (repourl, branch)

        self.repourl = repourl
        self.branch = branch
        base.PollingChangeSource.__init__(
            self, name=name, pollInterval=pollInterval, pollAtLaunch=pollAtLaunch)
        self.encoding = encoding
        self.lastChange = time.time()
        self.lastPoll = time.time()
        self.hgbin = hgbin
        self.workdir = workdir
        self.usetimestamps = usetimestamps
        self.category = category if callable(
            category) else bytes2unicode(category)
        self.project = project
        self.commitInfo = {}
        self.initLock = defer.DeferredLock()

        if self.workdir is None:
            config.error("workdir is mandatory for now in HgPoller")
    def render_GET(self, request):
        def decode(x):
            if isinstance(x, bytes):
                return bytes2unicode(x)
            elif isinstance(x, (list, tuple)):
                return [bytes2unicode(y) for y in x]
            elif isinstance(x, dict):
                newArgs = {}
                for a, b in x.items():
                    newArgs[decode(a)] = decode(b)
                return newArgs
            return x

        args = decode(request.args)
        content_type = request.getHeader(b'content-type')
        if content_type == b"application/json":
            jsonBytes = request.content.read()
            jsonStr = bytes2unicode(jsonBytes)
            args['json_received'] = json.loads(jsonStr)

        data = json.dumps(args)
        data = unicode2bytes(data)
        request.setHeader(b'content-type', b'application/json')
        request.setHeader(b'content-length', intToBytes(len(data)))
        if request.method == b'HEAD':
            return b''
        return data
Exemplo n.º 30
0
        def thd():
            url = self.tokenUri
            data = {'redirect_uri': self.loginUri, 'code': code,
                    'grant_type': self.grantType}
            auth = None
            if self.getTokenUseAuthHeaders:
                auth = (self.clientId, self.clientSecret)
            else:
                data.update(
                    {'client_id': self.clientId, 'client_secret': self.clientSecret})
            data.update(self.tokenUriAdditionalParams)
            response = requests.post(
                url, data=data, auth=auth, verify=self.sslVerify)
            response.raise_for_status()
            responseContent = bytes2unicode(response.content)
            try:
                content = json.loads(responseContent)
            except ValueError:
                content = parse_qs(responseContent)
                for k, v in iteritems(content):
                    content[k] = v[0]
            except TypeError:
                content = responseContent

            session = self.createSessionFromToken(content)
            return self.getUserInfoFromOAuthClient(session)
Exemplo n.º 31
0
 def test_nonascii(self):
     self.assertRaises(
         UnicodeDecodeError,
         lambda: util.bytes2unicode(b'a\x85', encoding='ascii'))
Exemplo n.º 32
0
    def test_bytes2unicode(self):
        rv1 = util.bytes2unicode(b'abcd')
        rv2 = util.bytes2unicode('efgh')

        self.assertEqual(type(rv1), text_type)
        self.assertEqual(type(rv2), text_type)
Exemplo n.º 33
0
    def create_changes(self, new_logentries):
        changes = []

        for el in new_logentries:
            revision = str(el.getAttribute("revision"))

            revlink = ''

            if self.revlinktmpl and revision:
                revlink = self.revlinktmpl % urlquote_plus(revision)
                revlink = str(revlink)

            log.msg(f"Adding change revision {revision}")
            author = self._get_text(el, "author")
            comments = self._get_text(el, "msg")
            # there is a "date" field, but it provides localtime in the
            # repository's timezone, whereas we care about buildmaster's
            # localtime (since this will get used to position the boxes on
            # the Waterfall display, etc). So ignore the date field, and
            # addChange will fill in with the current time
            branches = {}
            try:
                pathlist = el.getElementsByTagName("paths")[0]
            except IndexError:  # weird, we got an empty revision
                log.msg("ignoring commit with no paths")
                continue

            for p in pathlist.getElementsByTagName("path"):
                kind = p.getAttribute("kind")
                action = p.getAttribute("action")
                path = "".join([t.data for t in p.childNodes])
                if path.startswith("/"):
                    path = path[1:]
                if kind == "dir" and not path.endswith("/"):
                    path += "/"
                where = self._transform_path(path)

                # if 'where' is None, the file was outside any project that
                # we care about and we should ignore it
                if where:
                    branch = where.get("branch", None)
                    filename = where["path"]
                    if branch not in branches:
                        branches[branch] = {
                            'files': [], 'number_of_directories': 0}
                    if filename == "":
                        # root directory of branch
                        branches[branch]['files'].append(filename)
                        branches[branch]['number_of_directories'] += 1
                    elif filename.endswith("/"):
                        # subdirectory of branch
                        branches[branch]['files'].append(filename[:-1])
                        branches[branch]['number_of_directories'] += 1
                    else:
                        branches[branch]['files'].append(filename)

                    if "action" not in branches[branch]:
                        branches[branch]['action'] = action

                    for key in ("repository", "project", "codebase"):
                        if key in where:
                            branches[branch][key] = where[key]

            for branch, info in branches.items():
                action = info['action']
                files = info['files']

                number_of_directories_changed = info['number_of_directories']
                number_of_files_changed = len(files)

                if (action == 'D' and number_of_directories_changed == 1 and
                        number_of_files_changed == 1 and files[0] == ''):
                    log.msg(f"Ignoring deletion of branch '{branch}'")
                else:
                    chdict = dict(
                        author=author,
                        committer=None,
                        # weakly assume filenames are utf-8
                        files=[bytes2unicode(f, 'utf-8', 'replace')
                               for f in files],
                        comments=comments,
                        revision=revision,
                        branch=util.bytes2unicode(branch),
                        revlink=revlink,
                        category=self.category,
                        repository=util.bytes2unicode(
                            info.get('repository', self.repourl)),
                        project=util.bytes2unicode(
                            info.get('project', self.project)),
                        codebase=util.bytes2unicode(
                            info.get('codebase', None)))
                    changes.append(chdict)

        return changes
Exemplo n.º 34
0
 def thd():
     res = requests.get('http://localhost:5000/auth/login')
     content = bytes2unicode(res.content)
     webbrowser.open(content)
Exemplo n.º 35
0
    def parseJob(self, f):
        # jobfiles are serialized build requests. Each is a list of
        # serialized netstrings, in the following order:
        #  format version number:
        #  "1" the original
        #  "2" introduces project and repository
        #  "3" introduces who
        #  "4" introduces comment
        #  "5" introduces properties and JSON serialization of values after
        #      version
        #  jobid: arbitrary string, used to find the buildSet later
        #  branch: branch name, "" for default-branch
        #  baserev: revision, "" for HEAD
        #  patch_level: usually "1"
        #  patch_body: patch to be applied for build
        #  repository
        #  project
        #  who: user requesting build
        #  comment: comment from user about diff and/or build
        #  builderNames: list of builder names
        #  properties: dict of build properties
        p = netstrings.NetstringParser()
        f.seek(0, 2)
        if f.tell() > basic.NetstringReceiver.MAX_LENGTH:
            raise BadJobfile(
                "The patch size is greater that NetStringReceiver.MAX_LENGTH. "
                "Please Set this higher in the master.cfg")
        f.seek(0, 0)
        try:
            p.feed(f.read())
        except basic.NetstringParseError as e:
            raise BadJobfile("unable to parse netstrings") from e
        if not p.strings:
            raise BadJobfile("could not find any complete netstrings")
        ver = bytes2unicode(p.strings.pop(0))

        v1_keys = ['jobid', 'branch', 'baserev', 'patch_level', 'patch_body']
        v2_keys = v1_keys + ['repository', 'project']
        v3_keys = v2_keys + ['who']
        v4_keys = v3_keys + ['comment']
        keys = [v1_keys, v2_keys, v3_keys, v4_keys]
        # v5 introduces properties and uses JSON serialization

        parsed_job = {}

        def extract_netstrings(p, keys):
            for i, key in enumerate(keys):
                parsed_job[key] = bytes2unicode(p.strings[i])

        def postprocess_parsed_job():
            # apply defaults and handle type casting
            parsed_job['branch'] = parsed_job['branch'] or None
            parsed_job['baserev'] = parsed_job['baserev'] or None
            parsed_job['patch_level'] = int(parsed_job['patch_level'])
            for key in 'repository project who comment'.split():
                parsed_job[key] = parsed_job.get(key, '')
            parsed_job['properties'] = parsed_job.get('properties', {})

        if ver <= "4":
            i = int(ver) - 1
            extract_netstrings(p, keys[i])
            parsed_job['builderNames'] = [
                bytes2unicode(s) for s in p.strings[len(keys[i]):]
            ]
            postprocess_parsed_job()
        elif ver == "5":
            try:
                data = bytes2unicode(p.strings[0])
                parsed_job = json.loads(data)
            except ValueError as e:
                raise BadJobfile("unable to parse JSON") from e
            postprocess_parsed_job()
        else:
            raise BadJobfile("unknown version '{}'".format(ver))
        return parsed_job
Exemplo n.º 36
0
 def valueFromString(self, arg):
     val = util.bytes2unicode(arg)
     if not self.identRe.match(val) or not 0 < len(val) <= self.len:
         raise TypeError
     return val
Exemplo n.º 37
0
 def addHeader(self, data):
     data = bytes2unicode(data)
     self.chunks.append((self.HEADER, data))
     self._delay(lambda: self.asyncLogfile.addHeader(data))
Exemplo n.º 38
0
    def decodeResultSpec(self, request, endpoint):
        reqArgs = request.args

        def checkFields(fields, negOk=False):
            for field in fields:
                k = bytes2unicode(field)
                if k[0] == '-' and negOk:
                    k = k[1:]
                if k not in entityType.fieldNames:
                    raise BadRequest("no such field '{}'".format(k))

        entityType = endpoint.rtype.entityType
        limit = offset = order = fields = None
        filters, properties = [], []
        for arg in reqArgs:
            argStr = bytes2unicode(arg)
            if arg == b'order':
                order = tuple([bytes2unicode(o) for o in reqArgs[arg]])
                checkFields(order, True)
            elif arg == b'field':
                fields = reqArgs[arg]
                checkFields(fields, False)
            elif arg == b'limit':
                try:
                    limit = int(reqArgs[arg][0])
                except Exception:
                    raise BadRequest('invalid limit')
            elif arg == b'offset':
                try:
                    offset = int(reqArgs[arg][0])
                except Exception:
                    raise BadRequest('invalid offset')
            elif arg == b'property':
                try:
                    props = []
                    for v in reqArgs[arg]:
                        if not isinstance(v, (bytes, str)):
                            raise TypeError("Invalid type {} for {}".format(
                                type(v), v))
                        props.append(bytes2unicode(v))
                except Exception:
                    raise BadRequest(
                        'invalid property value for {}'.format(arg))
                properties.append(resultspec.Property(arg, 'eq', props))
            elif argStr in entityType.fieldNames:
                field = entityType.fields[argStr]
                try:
                    values = [field.valueFromString(v) for v in reqArgs[arg]]
                except Exception:
                    raise BadRequest(
                        'invalid filter value for {}'.format(argStr))

                filters.append(resultspec.Filter(argStr, 'eq', values))
            elif '__' in argStr:
                field, op = argStr.rsplit('__', 1)
                args = reqArgs[arg]
                operators = (resultspec.Filter.singular_operators if len(args)
                             == 1 else resultspec.Filter.plural_operators)
                if op in operators and field in entityType.fieldNames:
                    fieldType = entityType.fields[field]
                    try:
                        values = [
                            fieldType.valueFromString(v) for v in reqArgs[arg]
                        ]
                    except Exception:
                        raise BadRequest(
                            'invalid filter value for {}'.format(argStr))
                    filters.append(resultspec.Filter(field, op, values))
            else:
                raise BadRequest(
                    "unrecognized query parameter '{}'".format(argStr))

        # if ordering or filtering is on a field that's not in fields, bail out
        if fields:
            fields = [bytes2unicode(f) for f in fields]
            fieldsSet = set(fields)
            if order and {o.lstrip('-') for o in order} - fieldsSet:
                raise BadRequest("cannot order on un-selected fields")
            for filter in filters:
                if filter.field not in fieldsSet:
                    raise BadRequest("cannot filter on un-selected fields")

        # build the result spec
        rspec = resultspec.ResultSpec(fields=fields,
                                      limit=limit,
                                      offset=offset,
                                      order=order,
                                      filters=filters,
                                      properties=properties)

        # for singular endpoints, only allow fields
        if not endpoint.isCollection:
            if rspec.filters:
                raise BadRequest("this is not a collection")

        return rspec
Exemplo n.º 39
0
    def __init__(self, name=None, workername=None, workernames=None,
                 builddir=None, workerbuilddir=None, factory=None,
                 tags=None,
                 nextWorker=None, nextBuild=None, locks=None, env=None,
                 properties=None, collapseRequests=None, description=None,
                 canStartBuild=None, defaultProperties=None
                 ):
        # name is required, and can't start with '_'
        if not name or type(name) not in (bytes, str):
            error("builder's name is required")
            name = '<unknown>'
        elif name[0] == '_' and name not in RESERVED_UNDERSCORE_NAMES:
            error("builder names must not start with an underscore: '{}'".format(name))
        try:
            self.name = util.bytes2unicode(name, encoding="ascii")
        except UnicodeDecodeError:
            error("builder names must be unicode or ASCII")

        # factory is required
        if factory is None:
            error("builder '{}' has no factory".format(name))
        from buildbot.process.factory import BuildFactory
        if factory is not None and not isinstance(factory, BuildFactory):
            error("builder '{}'s factory is not a BuildFactory instance".format(name))
        self.factory = factory

        # workernames can be a single worker name or a list, and should also
        # include workername, if given
        if isinstance(workernames, str):
            workernames = [workernames]
        if workernames:
            if not isinstance(workernames, list):
                error("builder '{}': workernames must be a list or a string".format(name))
        else:
            workernames = []

        if workername:
            if not isinstance(workername, str):
                error(("builder '{}': workername must be a string but it is {}"
                       ).format(name, repr(workername)))
            workernames = workernames + [workername]
        if not workernames:
            error("builder '{}': at least one workername is required".format(name))

        self.workernames = workernames

        # builddir defaults to name
        if builddir is None:
            builddir = safeTranslate(name)
            builddir = bytes2unicode(builddir)
        self.builddir = builddir

        # workerbuilddir defaults to builddir
        if workerbuilddir is None:
            workerbuilddir = builddir
        self.workerbuilddir = workerbuilddir

        # remainder are optional
        if tags:
            if not isinstance(tags, list):
                error("builder '{}': tags must be a list".format(name))
            bad_tags = any((tag for tag in tags if not isinstance(tag, str)))
            if bad_tags:
                error(
                    "builder '{}': tags list contains something that is not a string".format(name))

            if len(tags) != len(set(tags)):
                dupes = " ".join({x for x in tags if tags.count(x) > 1})
                error(
                    "builder '{}': tags list contains duplicate tags: {}".format(name, dupes))
        else:
            tags = []

        self.tags = tags

        self.nextWorker = nextWorker
        if nextWorker and not callable(nextWorker):
            error('nextWorker must be a callable')
        self.nextBuild = nextBuild
        if nextBuild and not callable(nextBuild):
            error('nextBuild must be a callable')
        self.canStartBuild = canStartBuild
        if canStartBuild and not callable(canStartBuild):
            error('canStartBuild must be a callable')

        self.locks = locks or []
        self.env = env or {}
        if not isinstance(self.env, dict):
            error("builder's env must be a dictionary")
        self.properties = properties or {}
        self.defaultProperties = defaultProperties or {}
        self.collapseRequests = collapseRequests

        self.description = description
Exemplo n.º 40
0
 def sendComment(self, pr_url, text):
     path = urlparse(unicode2bytes(pr_url)).path
     payload = {'text': text}
     return self._http.post(
         COMMENT_API_URL.format(path=bytes2unicode(path)), json=payload)
Exemplo n.º 41
0
 def setHost(self, host):
     self.host = bytes2unicode(host)
Exemplo n.º 42
0
    def __init__(self,
                 p4port=None,
                 p4user=None,
                 p4passwd=None,
                 p4base='//',
                 p4bin='p4',
                 split_file=lambda branchfile: (None, branchfile),
                 pollInterval=60 * 10,
                 histmax=None,
                 pollinterval=-2,
                 encoding='utf8',
                 project=None,
                 name=None,
                 use_tickets=False,
                 ticket_login_interval=60 * 60 * 24,
                 server_tz=None,
                 pollAtLaunch=False,
                 revlink=lambda branch, revision: (u''),
                 resolvewho=lambda who: (who)):

        # for backward compatibility; the parameter used to be spelled with 'i'
        if pollinterval != -2:
            pollInterval = pollinterval

        if name is None:
            name = "P4Source:%s:%s" % (p4port, p4base)

        base.PollingChangeSource.__init__(self,
                                          name=name,
                                          pollInterval=pollInterval,
                                          pollAtLaunch=pollAtLaunch)

        if project is None:
            project = ''

        if use_tickets and not p4passwd:
            config.error(
                "You need to provide a P4 password to use ticket authentication"
            )

        if not callable(revlink):
            config.error("You need to provide a valid callable for revlink")

        if not callable(resolvewho):
            config.error("You need to provide a valid callable for resolvewho")

        self.p4port = p4port
        self.p4user = p4user
        self.p4passwd = p4passwd
        self.p4base = p4base
        self.p4bin = p4bin
        self.split_file = split_file
        self.encoding = encoding
        self.project = util.bytes2unicode(project)
        self.use_tickets = use_tickets
        self.ticket_login_interval = ticket_login_interval
        self.revlink_callable = revlink
        self.resolvewho_callable = resolvewho
        self.server_tz = dateutil.tz.gettz(server_tz) if server_tz else None
        if server_tz is not None and self.server_tz is None:
            raise P4PollerError(
                "Failed to get timezone from server_tz string '{}'".format(
                    server_tz))

        self._ticket_passwd = None
        self._ticket_login_counter = 0
Exemplo n.º 43
0
 def test_unicode(self):
     rv = util.bytes2unicode(u'\N{SNOWMAN}', encoding='ascii')
     self.assertEqual((rv, type(rv)), (u'\N{SNOWMAN}', text_type))
Exemplo n.º 44
0
 def setAdmin(self, admin):
     self.admin = bytes2unicode(admin)
Exemplo n.º 45
0
 def addStderr(self, data):
     data = bytes2unicode(data)
     self.chunks.append((self.STDERR, data))
     self._delay(lambda: self.asyncLogfile.addStderr(data))
Exemplo n.º 46
0
    def renderRest(self, request):
        def writeError(msg, errcode=404, jsonrpccode=None):
            if self.debug:
                log.msg("REST error: {}".format(msg))
            request.setResponseCode(errcode)
            request.setHeader(b'content-type', b'text/plain; charset=utf-8')
            msg = bytes2unicode(msg)
            data = json.dumps(dict(error=msg))
            data = unicode2bytes(data)
            request.write(data)

        with self.handleErrors(writeError):
            ep, kwargs = yield self.getEndpoint(request,
                                                bytes2unicode(request.method),
                                                {})

            rspec = self.decodeResultSpec(request, ep)
            data = yield ep.get(rspec, kwargs)
            if data is None:
                msg = ("not found while getting from {} with "
                       "arguments {} and {}").format(repr(ep), repr(rspec),
                                                     str(kwargs))
                msg = unicode2bytes(msg)
                writeError(msg, errcode=404)
                return

            if ep.isRaw:
                self.encodeRaw(data, request)
                return

            # post-process any remaining parts of the resultspec
            data = rspec.apply(data)

            # annotate the result with some metadata
            meta = {}
            if ep.isCollection:
                offset, total = data.offset, data.total
                if offset is None:
                    offset = 0

                # add total, if known
                if total is not None:
                    meta['total'] = total

                # get the real list instance out of the ListResult
                data = data.data
            else:
                data = [data]

            typeName = ep.rtype.plural
            data = {typeName: data, 'meta': meta}

            # set up the content type and formatting options; if the request
            # accepts text/html or text/plain, the JSON will be rendered in a
            # readable, multiline format.

            if b'application/json' in (request.getHeader(b'accept') or b''):
                compact = True
                request.setHeader(b"content-type",
                                  b'application/json; charset=utf-8')
            else:
                compact = False
                request.setHeader(b"content-type",
                                  b'text/plain; charset=utf-8')

            # set up caching
            if self.cache_seconds:
                now = datetime.datetime.utcnow()
                expires = now + datetime.timedelta(seconds=self.cache_seconds)
                expiresBytes = unicode2bytes(
                    expires.strftime("%a, %d %b %Y %H:%M:%S GMT"))
                request.setHeader(b"Expires", expiresBytes)
                request.setHeader(b"Pragma", b"no-cache")

            # filter out blanks if necessary and render the data
            if compact:
                data = json.dumps(data,
                                  default=toJson,
                                  sort_keys=True,
                                  separators=(',', ':'))
            else:
                data = json.dumps(data,
                                  default=toJson,
                                  sort_keys=True,
                                  indent=2)

            if request.method == b"HEAD":
                request.setHeader(b"content-length",
                                  unicode2bytes(str(len(data))))
            else:
                data = unicode2bytes(data)
                request.write(data)
Exemplo n.º 47
0
 def valueFromString(self, arg):
     val = util.bytes2unicode(arg)
     return val
Exemplo n.º 48
0
 def gettype(self):
     mimetype, options = cgi.parse_header(bytes2unicode(self.typeheader))
     return mimetype
Exemplo n.º 49
0
    def parse(self, m, prefix=None):
        """Parse messages sent by the 'buildbot-cvs-mail' program.
        """
        # The mail is sent from the person doing the checkin. Assume that the
        # local username is enough to identify them (this assumes a one-server
        # cvs-over-rsh environment rather than the server-dirs-shared-over-NFS
        # model)
        _, addr = parseaddr(m["from"])
        if not addr:
            # no From means this message isn't from buildbot-cvs-mail
            return None
        at = addr.find("@")
        if at == -1:
            author = addr  # might still be useful
        else:
            author = addr[:at]
        author = util.bytes2unicode(author, encoding="ascii")

        # CVS accepts RFC822 dates. buildbot-cvs-mail adds the date as
        # part of the mail header, so use that.
        # This assumes cvs is being access via ssh or pserver, so the time
        # will be the CVS server's time.

        # calculate a "revision" based on that timestamp, or the current time
        # if we're unable to parse the date.
        log.msg('Processing CVS mail')
        dateTuple = parsedate_tz(m["date"])
        if dateTuple is None:
            when = util.now()
        else:
            when = mktime_tz(dateTuple)

        theTime = datetime.datetime.utcfromtimestamp(float(when))
        rev = theTime.strftime('%Y-%m-%d %H:%M:%S')

        catRE = re.compile(r'^Category:\s*(\S.*)')
        cvsRE = re.compile(r'^CVSROOT:\s*(\S.*)')
        cvsmodeRE = re.compile(r'^Cvsmode:\s*(\S.*)')
        filesRE = re.compile(r'^Files:\s*(\S.*)')
        modRE = re.compile(r'^Module:\s*(\S.*)')
        pathRE = re.compile(r'^Path:\s*(\S.*)')
        projRE = re.compile(r'^Project:\s*(\S.*)')
        singleFileRE = re.compile(r'(.*) (NONE|\d(\.|\d)+) (NONE|\d(\.|\d)+)')
        tagRE = re.compile(r'^\s+Tag:\s*(\S.*)')
        updateRE = re.compile(r'^Update of:\s*(\S.*)')
        comments = ""
        branch = None
        cvsroot = None
        fileList = None
        files = []
        isdir = 0
        path = None
        project = None

        lines = list(body_line_iterator(m))
        while lines:
            line = lines.pop(0)
            m = catRE.match(line)
            if m:
                category = m.group(1)
                continue
            m = cvsRE.match(line)
            if m:
                cvsroot = m.group(1)
                continue
            m = cvsmodeRE.match(line)
            if m:
                cvsmode = m.group(1)
                continue
            m = filesRE.match(line)
            if m:
                fileList = m.group(1)
                continue
            m = modRE.match(line)
            if m:
                # We don't actually use this
                # module = m.group(1)
                continue
            m = pathRE.match(line)
            if m:
                path = m.group(1)
                continue
            m = projRE.match(line)
            if m:
                project = m.group(1)
                continue
            m = tagRE.match(line)
            if m:
                branch = m.group(1)
                continue
            m = updateRE.match(line)
            if m:
                # We don't actually use this
                # updateof = m.group(1)
                continue
            if line == "Log Message:\n":
                break

        # CVS 1.11 lists files as:
        #   repo/path file,old-version,new-version file2,old-version,new-version
        # Version 1.12 lists files as:
        #   file1 old-version new-version file2 old-version new-version
        #
        # files consists of tuples of 'file-name old-version new-version'
        # The versions are either dotted-decimal version numbers, ie 1.1
        # or NONE. New files are of the form 'NONE NUMBER', while removed
        # files are 'NUMBER NONE'. 'NONE' is a literal string
        # Parsing this instead of files list in 'Added File:' etc
        # makes it possible to handle files with embedded spaces, though
        # it could fail if the filename was 'bad 1.1 1.2'
        # For cvs version 1.11, we expect
        #  my_module new_file.c,NONE,1.1
        #  my_module removed.txt,1.2,NONE
        #  my_module modified_file.c,1.1,1.2
        # While cvs version 1.12 gives us
        #  new_file.c NONE 1.1
        #  removed.txt 1.2 NONE
        #  modified_file.c 1.1,1.2

        if fileList is None:
            log.msg('CVSMaildirSource Mail with no files. Ignoring')
            return None  # We don't have any files. Email not from CVS

        if cvsmode == '1.11':
            # Please, no repo paths with spaces!
            m = re.search('([^ ]*) ', fileList)
            if m:
                path = m.group(1)
            else:
                log.msg(
                    'CVSMaildirSource can\'t get path from file list. Ignoring mail'
                )
                return None
            fileList = fileList[len(path):].strip()
            singleFileRE = re.compile(
                r'(.+?),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)),(NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)')  # noqa pylint: disable=line-too-long
        elif cvsmode == '1.12':
            singleFileRE = re.compile(
                r'(.+?) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+)) (NONE|(?:\d+\.(?:\d+\.\d+\.)*\d+))(?: |$)')  # noqa pylint: disable=line-too-long
            if path is None:
                raise ValueError(
                    'CVSMaildirSource cvs 1.12 require path. Check cvs loginfo config'
                )
        else:
            raise ValueError(f'Expected cvsmode 1.11 or 1.12. got: {cvsmode}')

        log.msg(f"CVSMaildirSource processing filelist: {fileList}")
        while fileList:
            m = singleFileRE.match(fileList)
            if m:
                curFile = path + '/' + m.group(1)
                files.append(curFile)
                fileList = fileList[m.end():]
            else:
                log.msg('CVSMaildirSource no files matched regex. Ignoring')
                return None  # bail - we couldn't parse the files that changed
        # Now get comments
        while lines:
            line = lines.pop(0)
            comments += line

        comments = comments.rstrip() + "\n"
        if comments == '\n':
            comments = None
        return ('cvs',
                dict(author=author,
                     committer=None,
                     files=files,
                     comments=comments,
                     isdir=isdir,
                     when=when,
                     branch=branch,
                     revision=rev,
                     category=category,
                     repository=cvsroot,
                     project=project,
                     properties=self.properties))
Exemplo n.º 50
0
 def extract_netstrings(p, keys):
     for i, key in enumerate(keys):
         if key == 'patch_body':
             parsed_job[key] = p.strings[i]
         else:
             parsed_job[key] = bytes2unicode(p.strings[i])
Exemplo n.º 51
0
 def extract_netstrings(p, keys):
     for i, key in enumerate(keys):
         parsed_job[key] = bytes2unicode(p.strings[i])
Exemplo n.º 52
0
 def getBuilderIdForName(self, name):
     # buildbot.config should ensure this is already unicode, but it doesn't
     # hurt to check again
     name = bytes2unicode(name)
     return self.master.data.updates.findBuilderId(name)
Exemplo n.º 53
0
 def match(self, ep, action="", options=None):
     if bytes2unicode(action).lower() != "get":
         return defer.succeed(Match(self.master))
     return defer.succeed(None)
Exemplo n.º 54
0
    def _processChanges(self, page):
        result = json.loads(page, encoding=self.encoding)
        for pr in result['values']:
            branch = pr['source']['branch']['name']
            nr = int(pr['id'])
            # Note that this is a short hash. The full length hash can be accessed via the
            # commit api resource but we want to avoid requesting multiple pages as long as
            # we are not sure that the pull request is new or updated.
            revision = pr['source']['commit']['hash']

            # check branch
            if not self.branch or branch in self.branch:
                current = yield self._getCurrentRev(nr)

                # compare _short_ hashes to check if the PR has been updated
                if not current or current[0:12] != revision[0:12]:
                    # parse pull request api page (required for the filter)
                    page = yield client.getPage(str(pr['links']['self']['href']))
                    pr_json = json.loads(page, encoding=self.encoding)

                    # filter pull requests by user function
                    if not self.pullrequest_filter(pr_json):
                        log.msg('pull request does not match filter')
                        continue

                    # access additional information
                    author = pr['author']['display_name']
                    prlink = pr['links']['html']['href']
                    # Get time updated time. Note that the timezone offset is
                    # ignored.
                    if self.useTimestamps:
                        updated = datetime.strptime(
                            pr['updated_on'].split('.')[0],
                            '%Y-%m-%dT%H:%M:%S')
                    else:
                        updated = epoch2datetime(reactor.seconds())
                    title = pr['title']
                    # parse commit api page
                    page = yield client.getPage(str(pr['source']['commit']['links']['self']['href']))
                    commit_json = json.loads(page, encoding=self.encoding)
                    # use the full-length hash from now on
                    revision = commit_json['hash']
                    revlink = commit_json['links']['html']['href']
                    # parse repo api page
                    page = yield client.getPage(str(pr['source']['repository']['links']['self']['href']))
                    repo_json = json.loads(page, encoding=self.encoding)
                    repo = repo_json['links']['html']['href']

                    # update database
                    yield self._setCurrentRev(nr, revision)
                    # emit the change
                    yield self.master.data.updates.addChange(
                        author=bytes2unicode(author),
                        revision=bytes2unicode(revision),
                        revlink=bytes2unicode(revlink),
                        comments=u'pull-request #%d: %s\n%s' % (
                            nr, title, prlink),
                        when_timestamp=datetime2epoch(updated),
                        branch=bytes2unicode(branch),
                        category=self.category,
                        project=self.project,
                        repository=bytes2unicode(repo),
                        src=u'bitbucket',
                    )
Exemplo n.º 55
0
    def _poll(self):
        if self.use_tickets:
            self._ticket_login_counter -= 1
            if self._ticket_login_counter <= 0:
                # Re-acquire the ticket and reset the counter.
                log.msg("P4Poller: (re)acquiring P4 ticket for %s..." %
                        self.p4base)
                protocol = TicketLoginProtocol(
                    self.p4passwd + "\n", self.p4base)
                self._acquireTicket(protocol)
                yield protocol.deferred

                self._ticket_passwd = self._parseTicketPassword(
                    protocol.stdout)
                self._ticket_login_counter = max(
                    self.ticket_login_interval / self.pollInterval, 1)
                if debug_logging:
                    log.msg("P4Poller: got ticket password: %s" %
                            self._ticket_passwd)
                    log.msg(
                        "P4Poller: next ticket acquisition in %d polls" % self._ticket_login_counter)

        args = []
        if self.p4port:
            args.extend(['-p', self.p4port])
        if self.p4user:
            args.extend(['-u', self.p4user])
        if self.p4passwd:
            args.extend(['-P', self._getPasswd()])
        args.extend(['changes'])
        if self.last_change is not None:
            args.extend(
                ['%s...@%d,#head' % (self.p4base, self.last_change + 1)])
        else:
            args.extend(['-m', '1', '%s...' % (self.p4base,)])

        result = yield self._get_process_output(args)
        # decode the result from its designated encoding
        try:
            result = bytes2unicode(result, self.encoding)
        except UnicodeError as ex:
            log.msg(u"{}: cannot fully decode {} in {}".format(
                    ex, repr(result), self.encoding))
            result = bytes2unicode(result, encoding=self.encoding, errors="replace")

        last_change = self.last_change
        changelists = []
        for line in result.split('\n'):
            line = line.strip()
            if not line:
                continue
            m = self.changes_line_re.match(line)
            if not m:
                raise P4PollerError(
                    "Unexpected 'p4 changes' output: %r" % result)
            num = int(m.group('num'))
            if last_change is None:
                # first time through, the poller just gets a "baseline" for where to
                # start on the next poll
                log.msg('P4Poller: starting at change %d' % num)
                self.last_change = num
                return
            changelists.append(num)
        changelists.reverse()  # oldest first

        # Retrieve each sequentially.
        for num in changelists:
            args = []
            if self.p4port:
                args.extend(['-p', self.p4port])
            if self.p4user:
                args.extend(['-u', self.p4user])
            if self.p4passwd:
                args.extend(['-P', self._getPasswd()])
            args.extend(['describe', '-s', str(num)])
            result = yield self._get_process_output(args)

            # decode the result from its designated encoding
            try:
                result = bytes2unicode(result, self.encoding)
            except UnicodeError as ex:
                log.msg(
                    "P4Poller: couldn't decode changelist description: %s" % ex.encoding)
                log.msg("P4Poller: in object: %s" % ex.object)
                log.err("P4Poller: poll failed on %s, %s" %
                        (self.p4port, self.p4base))
                raise

            lines = result.split('\n')
            # SF#1555985: Wade Brainerd reports a stray ^M at the end of the date
            # field. The rstrip() is intended to remove that.
            lines[0] = lines[0].rstrip()
            m = self.describe_header_re.match(lines[0])
            if not m:
                raise P4PollerError(
                    "Unexpected 'p4 describe -s' result: %r" % result)
            who = self.resolvewho_callable(m.group('who'))
            when = datetime.datetime.strptime(m.group('when'), self.datefmt)
            if self.server_tz:
                # Convert from the server's timezone to the local timezone.
                when = when.replace(tzinfo=self.server_tz)
            when = util.datetime2epoch(when)

            comment_lines = []
            lines.pop(0)  # describe header
            lines.pop(0)  # blank line
            while not lines[0].startswith('Affected files'):
                if lines[0].startswith('\t'):  # comment is indented by one tab
                    comment_lines.append(lines.pop(0)[1:])
                else:
                    lines.pop(0)  # discard non comment line
            comments = '\n'.join(comment_lines)

            lines.pop(0)  # affected files
            branch_files = {}  # dict for branch mapped to file(s)
            while lines:
                line = lines.pop(0).strip()
                if not line:
                    continue
                m = self.file_re.match(line)
                if not m:
                    raise P4PollerError("Invalid file line: %r" % line)
                path = m.group('path')
                if path.startswith(self.p4base):
                    branch, file = self.split_file(path[len(self.p4base):])
                    if (branch is None and file is None):
                        continue
                    if branch in branch_files:
                        branch_files[branch].append(file)
                    else:
                        branch_files[branch] = [file]

            for branch in branch_files:
                yield self.master.data.updates.addChange(
                    author=who,
                    files=branch_files[branch],
                    comments=comments,
                    revision=text_type(num),
                    when_timestamp=when,
                    branch=branch,
                    project=self.project,
                    revlink=self.revlink_callable(branch, text_type(num)))

            self.last_change = num
Exemplo n.º 56
0
 def test_None(self):
     self.assertEqual(util.bytes2unicode(None, encoding='ascii'), None)
Exemplo n.º 57
0
 def addHTMLLog(name, html):
     _log = logfile.FakeLogFile(name)
     html = bytes2unicode(html)
     _log.addStdout(html)
     return defer.succeed(None)
Exemplo n.º 58
0
def _redirect(master, request):
    url = request.args.get(b"redirect", [b"/"])[0]
    url = bytes2unicode(url)
    return resource.Redirect(master.config.buildbotURL + "#" + url)
Exemplo n.º 59
0
 def renderLogin(self, request):
     session = request.getSession()
     session.user_info = dict(username=bytes2unicode(self.username))
     yield self.master.www.auth.updateUserInfo(request)
     raise _redirect(self.master, request)
Exemplo n.º 60
0
 def test_ascii(self):
     rv = util.bytes2unicode(b'abcd', encoding='ascii')
     self.assertEqual((rv, type(rv)), (u'abcd', text_type))