Exemple #1
0
def prepare_app():
    """
    Prepare a worker application containing the custom washer commands and that
    can be run without the use of the `buildbot` command script.

    """
    application = service.Application('buildbot-worker')
    master = (GATEWAY
              if conf.Washer.FORCE_GATEWAY else conf.Buildbot.BUILDMASTER)
    worker = Worker(master,
                    conf.Buildbot.BUILDMASTER_PORT,
                    conf.Buildbot.WORKERNAME,
                    conf.Buildbot.WORKERPASS,
                    conf.Buildbot.BASEDIR,
                    conf.Buildbot.KEEPALIVE,
                    umask=None,
                    maxdelay=conf.Buildbot.MAXDELAY,
                    numcpus=None,
                    allow_shutdown=None,
                    maxRetries=None)
    worker.setServiceParent(application)

    class InlineApplication(UnixApplicationRunner):
        def createOrGetApplication(self):
            nonlocal application
            return application

    options = ServerOptions()
    options["nodaemon"] = not conf.Washer.DAEMON
    options["logfile"] = conf.Washer.LOG_FILE

    commands.register()

    return InlineApplication(options)
Exemple #2
0
def prepare_app():
    """
    Prepare a worker application containing the custom washer commands and that
    can be run without the use of the `buildbot` command script.

    """
    application = service.Application('buildbot-worker')
    master = (GATEWAY
              if conf.Washer.FORCE_GATEWAY
              else conf.Buildbot.BUILDMASTER)
    worker = Worker(master,
                    conf.Buildbot.BUILDMASTER_PORT,
                    conf.Buildbot.WORKERNAME,
                    conf.Buildbot.WORKERPASS,
                    conf.Buildbot.BASEDIR,
                    conf.Buildbot.KEEPALIVE,
                    umask=None,
                    maxdelay=conf.Buildbot.MAXDELAY,
                    numcpus=None,
                    allow_shutdown=None,
                    maxRetries=None)
    worker.setServiceParent(application)

    class InlineApplication(UnixApplicationRunner):
        def createOrGetApplication(self):
            nonlocal application
            return application

    options = ServerOptions()
    options["nodaemon"] = not conf.Washer.DAEMON
    options["logfile"] = conf.Washer.LOG_FILE

    commands.register()

    return InlineApplication(options)
Exemple #3
0
def setup_worker(application: service.Application, id: int) -> None:
    basedir = f"{require_env('BUILDBOT_DIR')}-{id}"
    os.makedirs(basedir, mode=0o700, exist_ok=True)

    master_url = require_env("MASTER_URL")
    hostname = socket.gethostname()
    workername = f"{hostname}-{id}"

    with open(require_env("WORKER_PASSWORD_FILE"), "r",
              encoding="utf-8") as passwd_file:
        passwd = passwd_file.read().strip("\r\n")
    keepalive = 600
    umask = None
    maxdelay = 300
    numcpus = None
    allow_shutdown = None

    s = Worker(
        None,
        None,
        workername,
        passwd,
        basedir,
        keepalive,
        connection_string=master_url,
        umask=umask,
        maxdelay=maxdelay,
        numcpus=numcpus,
        allow_shutdown=allow_shutdown,
    )
    s.setServiceParent(application)
Exemple #4
0
    def setupConfig(self, config_dict, startWorker=True):
        """
        Setup and start a master configured
        by the function configFunc defined in the test module.
        @type config_dict: dict
        @param configFunc: The BuildmasterConfig dictionary.
        """
        # mock reactor.stop (which trial *really* doesn't
        # like test code to call!)
        stop = mock.create_autospec(reactor.stop)
        self.patch(reactor, 'stop', stop)

        if startWorker:
            if self.proto == 'pb':
                proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}}
                workerclass = worker.Worker
            elif self.proto == 'null':
                proto = {"null": {}}
                workerclass = worker.LocalWorker
            config_dict['workers'] = [workerclass("local1", "localpw")]
            config_dict['protocols'] = proto

        m = yield getMaster(self, reactor, config_dict)
        self.master = m
        self.assertFalse(stop.called,
                         "startService tried to stop the reactor; check logs")

        if not startWorker:
            return

        if self.proto == 'pb':
            # We find out the worker port automatically
            workerPort = list(itervalues(m.pbmanager.dispatchers))[
                0].port.getHost().port

            # create a worker, and attach it to the master, it will be started, and stopped
            # along with the master
            worker_dir = FilePath(self.mktemp())
            worker_dir.createDirectory()
            self.w = Worker(
                "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                False)
        elif self.proto == 'null':
            self.w = None
        if self.w is not None:
            self.w.startService()
            self.addCleanup(self.w.stopService)

        @defer.inlineCallbacks
        def dump():
            if not self._passed:
                dump = StringIO.StringIO()
                print("FAILED! dumping build db for debug", file=dump)
                builds = yield self.master.data.get(("builds",))
                for build in builds:
                    yield self.printBuild(build, dump, withLogs=True)

                raise self.failureException(dump.getvalue())
        self.addCleanup(dump)
Exemple #5
0
    def setupConfig(self, config_dict, startWorker=True, **worker_kwargs):
        """
        Setup and start a master configured
        by the function configFunc defined in the test module.
        @type config_dict: dict
        @param configFunc: The BuildmasterConfig dictionary.
        """
        # mock reactor.stop (which trial *really* doesn't
        # like test code to call!)
        stop = mock.create_autospec(reactor.stop)
        self.patch(reactor, 'stop', stop)

        if startWorker:
            if self.proto == 'pb':
                proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}}
                workerclass = worker.Worker
            if self.proto == 'msgpack':
                proto = {"msgpack_experimental_v1": {"port": 0}}
                workerclass = worker.Worker
            elif self.proto == 'null':
                proto = {"null": {}}
                workerclass = worker.LocalWorker
            config_dict['workers'] = [workerclass("local1", password=Interpolate("localpw"),
                                                  missing_timeout=0)]
            config_dict['protocols'] = proto

        m = yield getMaster(self, reactor, config_dict)
        self.master = m
        self.assertFalse(stop.called,
                         "startService tried to stop the reactor; check logs")

        if not startWorker:
            return

        if self.proto in ('pb', 'msgpack'):
            sandboxed_worker_path = os.environ.get("SANDBOXED_WORKER_PATH", None)
            worker_python_version = os.environ.get("WORKER_PYTHON", None)
            if self.proto == 'pb':
                protocol = 'pb'
                dispatcher = list(m.pbmanager.dispatchers.values())[0]
            else:
                protocol = 'msgpack_experimental_v1'
                dispatcher = list(m.msgmanager.dispatchers.values())[0]

                if sandboxed_worker_path is not None and worker_python_version == '2.7':
                    raise SkipTest('MessagePack protocol is not supported on python 2.7 worker')

                # We currently don't handle connection closing cleanly.
                dispatcher.serverFactory.setProtocolOptions(closeHandshakeTimeout=0)

            workerPort = dispatcher.port.getHost().port

            # create a worker, and attach it to the master, it will be started, and stopped
            # along with the master
            worker_dir = FilePath(self.mktemp())
            worker_dir.createDirectory()
            if sandboxed_worker_path is None:
                self.w = Worker(
                    "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                    False, protocol=protocol, **worker_kwargs)
            else:
                self.w = SandboxedWorker(
                    "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                    sandboxed_worker_path, protocol=protocol, **worker_kwargs)
                self.addCleanup(self.w.shutdownWorker)

        elif self.proto == 'null':
            self.w = None

        if self.w is not None:
            yield self.w.setServiceParent(m)

        @defer.inlineCallbacks
        def dump():
            if not self._passed:
                dump = StringIO()
                print("FAILED! dumping build db for debug", file=dump)
                builds = yield self.master.data.get(("builds",))
                for build in builds:
                    yield self.printBuild(build, dump, withLogs=True)

                raise self.failureException(dump.getvalue())
        self.addCleanup(dump)
Exemple #6
0
class RunMasterBase(unittest.TestCase):
    proto = "null"

    if Worker is None:
        skip = "buildbot-worker package is not installed"

    @defer.inlineCallbacks
    def setupConfig(self, config_dict, startWorker=True):
        """
        Setup and start a master configured
        by the function configFunc defined in the test module.
        @type config_dict: dict
        @param configFunc: The BuildmasterConfig dictionary.
        """
        # mock reactor.stop (which trial *really* doesn't
        # like test code to call!)
        stop = mock.create_autospec(reactor.stop)
        self.patch(reactor, 'stop', stop)

        if startWorker:
            if self.proto == 'pb':
                proto = {"pb": {"port": "tcp:0:interface=127.0.0.1"}}
                workerclass = worker.Worker
            elif self.proto == 'null':
                proto = {"null": {}}
                workerclass = worker.LocalWorker
            config_dict['workers'] = [workerclass("local1", "localpw")]
            config_dict['protocols'] = proto

        m = yield getMaster(self, reactor, config_dict)
        self.master = m
        self.assertFalse(stop.called,
                         "startService tried to stop the reactor; check logs")

        if not startWorker:
            return

        if self.proto == 'pb':
            # We find out the worker port automatically
            workerPort = list(itervalues(m.pbmanager.dispatchers))[
                0].port.getHost().port

            # create a worker, and attach it to the master, it will be started, and stopped
            # along with the master
            worker_dir = FilePath(self.mktemp())
            worker_dir.createDirectory()
            self.w = Worker(
                "127.0.0.1", workerPort, "local1", "localpw", worker_dir.path,
                False)
        elif self.proto == 'null':
            self.w = None
        if self.w is not None:
            self.w.startService()
            self.addCleanup(self.w.stopService)

        @defer.inlineCallbacks
        def dump():
            if not self._passed:
                dump = StringIO.StringIO()
                print("FAILED! dumping build db for debug", file=dump)
                builds = yield self.master.data.get(("builds",))
                for build in builds:
                    yield self.printBuild(build, dump, withLogs=True)

                raise self.failureException(dump.getvalue())
        self.addCleanup(dump)

    @defer.inlineCallbacks
    def doForceBuild(self, wantSteps=False, wantProperties=False,
                     wantLogs=False, useChange=False):

        # force a build, and wait until it is finished
        d = defer.Deferred()

        # in order to allow trigger based integration tests
        # we wait until the first started build is finished
        self.firstBuildRequestId = None

        def newCallback(_, data):
            if self.firstBuildRequestId is None:
                self.firstBuildRequestId = data['buildrequestid']
                newConsumer.stopConsuming()

        def finishedCallback(_, data):
            if self.firstBuildRequestId == data['buildrequestid']:
                d.callback(data)

        newConsumer = yield self.master.mq.startConsuming(
            newCallback,
            ('buildrequests', None, 'new'))

        finishedConsumer = yield self.master.mq.startConsuming(
            finishedCallback,
            ('buildrequests', None, 'complete'))

        if useChange is False:
            # use data api to force a build
            yield self.master.data.control("force", {}, ("forceschedulers", "force"))
        else:
            # use data api to force a build, via a new change
            yield self.master.data.updates.addChange(**useChange)

        # wait until we receive the build finished event
        buildrequest = yield d
        builds = yield self.master.data.get(
            ('builds',),
            filters=[resultspec.Filter('buildrequestid', 'eq', [buildrequest['buildrequestid']])])
        # if the build has been retried, there will be several matching builds. We return the last build
        build = builds[-1]
        finishedConsumer.stopConsuming()
        yield self.enrichBuild(build, wantSteps, wantProperties, wantLogs)
        defer.returnValue(build)

    @defer.inlineCallbacks
    def enrichBuild(self, build, wantSteps=False, wantProperties=False, wantLogs=False):
        # enrich the build result, with the step results
        if wantSteps:
            build["steps"] = yield self.master.data.get(("builds", build['buildid'], "steps"))
            # enrich the step result, with the logs results
            if wantLogs:
                build["steps"] = list(build["steps"])
                for step in build["steps"]:
                    step['logs'] = yield self.master.data.get(("steps", step['stepid'], "logs"))
                    step["logs"] = list(step['logs'])
                    for log in step["logs"]:
                        log['contents'] = yield self.master.data.get(("logs", log['logid'], "contents"))

        if wantProperties:
            build["properties"] = yield self.master.data.get(("builds", build['buildid'], "properties"))

    @defer.inlineCallbacks
    def printBuild(self, build, out=sys.stdout, withLogs=False):
        # helper for debugging: print a build
        yield self.enrichBuild(build, wantSteps=True, wantProperties=True, wantLogs=True)
        print("*** BUILD %d *** ==> %s (%s)" % (build['buildid'], build['state_string'],
                                                statusToString(build['results'])), file=out)
        for step in build['steps']:
            print("    *** STEP %s *** ==> %s (%s)" % (step['name'], step['state_string'],
                                                       statusToString(step['results'])), file=out)
            for url in step['urls']:
                print("       url:%s (%s)" %
                      (url['name'], url['url']), file=out)
            for log in step['logs']:
                print("        log:%s (%d)" %
                      (log['name'], log['num_lines']), file=out)
                if step['results'] != SUCCESS or withLogs:
                    self.printLog(log, out)

    def printLog(self, log, out):
        print(" " * 8 + "*********** LOG: %s *********" %
              (log['name'],), file=out)
        if log['type'] == 's':
            for line in log['contents']['content'].splitlines():
                linetype = line[0]
                line = line[1:]
                if linetype == 'h':
                    # cyan
                    line = "\x1b[36m" + line + "\x1b[0m"
                if linetype == 'e':
                    # red
                    line = "\x1b[31m" + line + "\x1b[0m"
                print(" " * 8 + line)
        else:
            print(log['contents']['content'], file=out)
        print(" " * 8 + "********************************", file=out)