def masterConfig(addFailure=False): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Triggerable(name="trigsched", builderNames=["build"]), schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"]) ] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f), BuilderConfig(name="build", workernames=["local1"], factory=f2) ] if addFailure: f3 = BuildFactory() f3.addStep(steps.ShellCommand(command='false')) c['builders'].append( BuilderConfig(name="build2", workernames=["local1"], factory=f3)) c['builders'].append( BuilderConfig(name="build3", workernames=["local1"], factory=f2)) c['schedulers'][0] = schedulers.Triggerable( name="trigsched", builderNames=["build", "build2", "build3"]) return c
def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.Triggerable( name="trigsched", builderNames=["build"]), schedulers.AnyBranchScheduler( name="sched", builderNames=["testy"])] f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) c['builders'] = [ BuilderConfig(name="testy", slavenames=["local1"], factory=f), BuilderConfig(name="build", slavenames=["local1"], factory=f2)] return c
def setupTriggerConfiguration(triggeredFactory, nextBuild=None): c = {} c['schedulers'] = [ schedulers.Triggerable(name="trigsched", builderNames=["triggered"]), schedulers.AnyBranchScheduler(name="sched", builderNames=["main"]) ] f = BuildFactory() f.addStep( steps.Trigger(schedulerNames=['trigsched'], waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) mainBuilder = BuilderConfig(name="main", workernames=["local1"], factory=f) triggeredBuilderKwargs = { 'name': "triggered", 'workernames': ["local1"], 'factory': triggeredFactory } if nextBuild is not None: triggeredBuilderKwargs['nextBuild'] = nextBuild triggeredBuilder = BuilderConfig(**triggeredBuilderKwargs) c['builders'] = [mainBuilder, triggeredBuilder] return c
def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep(steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["marathon0"], factory=f), BuilderConfig(name="build", workernames=["marathon" + str(i) for i in range(num_concurrent)], factory=f2)] url = os.environ.get('BBTEST_MARATHON_URL') creds = os.environ.get('BBTEST_MARATHON_CREDS') if creds is not None: user, password = creds.split(":") else: user = password = None masterFQDN = os.environ.get('masterFQDN') marathon_extra_config = { } c['workers'] = [ MarathonLatentWorker('marathon' + str(i), url, user, password, 'buildbot/buildbot-worker:master', marathon_extra_config=marathon_extra_config, masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c
def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable(name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["hyper0"], factory=f), BuilderConfig( name="build", workernames=["hyper" + str(i) for i in range(num_concurrent)], factory=f2) ] hyperconfig = workerhyper.Hyper.guess_config() if isinstance(hyperconfig, string_types): hyperconfig = json.load(open(hyperconfig)) hyperhost, hyperconfig = hyperconfig['clouds'].items()[0] masterFQDN = os.environ.get('masterFQDN') c['workers'] = [ HyperLatentWorker('hyper' + str(i), 'passwd', hyperhost, hyperconfig['accesskey'], hyperconfig['secretkey'], 'buildbot/buildbot-worker:master', masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c
def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"]) ] triggereables = [] for i in range(NUM_CONCURRENT): c['schedulers'].append( schedulers.Triggerable(name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) c['builders'] = [ BuilderConfig(name="testy", workernames=["hyper0"], factory=f), BuilderConfig( name="build", workernames=["hyper" + str(i) for i in range(NUM_CONCURRENT)], factory=f2) ] hyperconfig = workerhyper.Hyper.guess_config() if isinstance(hyperconfig, string_types): hyperconfig = json.load(open(hyperconfig)) hyperhost, hyperconfig = hyperconfig['clouds'].items()[0] masterFQDN = os.environ.get('masterFQDN') c['workers'] = [ HyperLatentWorker('hyper' + str(i), 'passwd', hyperhost, hyperconfig['accesskey'], hyperconfig['secretkey'], 'buildbot/buildbot-worker:master', masterFQDN=masterFQDN) for i in range(NUM_CONCURRENT) ] # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c
def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable( name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger( schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["kubernetes0"], factory=f), BuilderConfig( name="build", workernames=["kubernetes" + str(i) for i in range(num_concurrent)], factory=f2) ] masterFQDN = os.environ.get('masterFQDN') c['workers'] = [ kubernetes.KubeLatentWorker( 'kubernetes' + str(i), 'buildbot/buildbot-worker', kube_config=kubeclientservice.KubeCtlProxyConfigLoader( namespace=os.getenv("KUBE_NAMESPACE", "default")), masterFQDN=masterFQDN) for i in range(num_concurrent) ] # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} c['protocols'] = {"pb": {"port": "tcp:9989"}} return c
for builder in config.triggered_builders: schedulers.append( sched.ForceScheduler( name=builder, builderNames=[builder], reason=util.StringParameter( name="reason", label="""Reason (please note the reason for triggering the build and any expectations for the build's outcome:""", required=False), properties=props_for_builder(builder), buttonName="Force Build")) # nightly builder triggers various other builders wait = sched.Triggerable(name="wait", builderNames=config.trigger_builders_wait) schedulers.append(wait) nowait = sched.Triggerable(name="nowait", builderNames=config.trigger_builders_nowait) schedulers.append(nowait) schedulers.append( sched.ForceScheduler( name="nightly", builderNames=["nightly"], buttonName="Start Nightly Build", reason=util.StringParameter( name="reason", label="""Reason (please note the reason for triggering the build and any expectations for the build's outcome:""", required=False),
from . import properties BUILD_PROPERTIES = [ properties.build_box(), properties.build_experimental_features(), properties.build_target(), properties.ci_url(), properties.cmake_flags(), properties.host(), properties.keep_virtual_machines(), properties.old_target(), properties.repository_path(), properties.run_upgrade_test(), properties.try_already_running(), ] TRIGGERABLE_SCHEDULER = schedulers.Triggerable( name="build", builderNames=["build"] ) MANUAL_SCHEDULER = schedulers.ForceScheduler( name="build_force", buttonName="Force build", builderNames=["build"], codebases=properties.codebaseParameter(), properties=BUILD_PROPERTIES ) SCHEDULERS = [TRIGGERABLE_SCHEDULER, MANUAL_SCHEDULER]
def get(builders): scheds = [] # pull request scheduler scheds.append( schedulers.AnyBranchScheduler( name="gr_pull_request_handler", change_filter=util.ChangeFilter(category='pull', project="gnuradio/gnuradio"), treeStableTimer=None, builderNames=[ b.name for b in builders if "control" in b.tags and "gnuradio" in b.tags and "pull" in b.tags ])) scheds.append( schedulers.AnyBranchScheduler( name="volk_pull_request_handler", change_filter=util.ChangeFilter(category='pull', project="gnuradio/volk"), treeStableTimer=None, builderNames=[ b.name for b in builders if "control" in b.tags and "volk" in b.tags and "pull" in b.tags ])) # push event scheduler def filter_for_push(change): event = change.properties.getProperty("event") project = change.properties.getProperty("project") if event == "push": return True return False scheds.append( schedulers.AnyBranchScheduler( name="commit_push_handler", change_filter=util.ChangeFilter(filter_fn=filter_for_push, project="gnuradio/gnuradio"), treeStableTimer=60, builderNames=[ b.name for b in builders if "control" in b.tags and "push" in b.tags ])) scheds.append( schedulers.ForceScheduler( name="force_pullrequest", builderNames=["pull_request_runner"], properties=[ util.StringParameter(name="github.number", label="GitHub pull request number", default="", size=80), util.StringParameter(name="github.base.ref", label="pull request base branch", default="master", size=80) ], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), branch=util.StringParameter( name="branch", label="pull request branch", default="refs/pull/<PR#>/merge", size=80), revision=util.FixedParameter(name="revision", default="")) ])) scheds.append( schedulers.ForceScheduler( name="force_build", builderNames=["repo_push_runner"], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), ) ])) scheds.append( schedulers.ForceScheduler( name="force_weekly", builderNames=["weekly_runner"], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), branch=util.StringParameter(name="branch", label="test branch", default="master", size=80), revision=util.FixedParameter(name="revision", default="")) ])) scheds.append( schedulers.Nightly(name="timed_weekly", builderNames=["weekly_runner"], codebases={ "": { "repository": "https://github.com/gnuradio/gnuradio.git", "branch": "master", "revision": "None" } }, dayOfWeek=[0, 4], hour=4, minute=0)) scheds.extend([ schedulers.Triggerable(name="trigger_" + b.name.lstrip("build_"), builderNames=[b.name]) for b in builders if "build" in b.tags ]) scheds.extend([ schedulers.Triggerable(name="trigger_" + b.name.lstrip("test_"), builderNames=[b.name]) for b in builders if "test" in b.tags ]) return scheds
def getGlobalSchedulers(self, platforms): ret = list() change_filter = util.ChangeFilter( repository=[self.baseurl, self.giturl], branch=self.branch) # Fetch scheduler (triggered by event source) ret.append( schedulers.SingleBranchScheduler( name=self.names['sch-sb'], change_filter=change_filter, # Wait for 5 minutes before starting build treeStableTimer=300, builderNames=[self.names['bld-fetch']])) # Nightly scheduler (started by time) # It's triggered after regular builds to take note of the last fetched source # Note that build is not started by trigger # We cleanup after it because we just generated a new package if self.nightly is not None: ret.append( schedulers.NightlyTriggerable(name=self.names['sch-nightly'], branch=self.branch, builderNames=[ self.names['bld-nightly'], self.names['bld-clean'] ], hour=self.nightly[0], minute=self.nightly[1], onlyIfChanged=True)) # All compiling builders comp_builders = list(self.names['bld-platform'](p for p in platforms if p.canBuild(self))) # Global build scheduler (triggered by fetch build and nightly build) ret.append( schedulers.Triggerable(name=self.names['sch-build'], builderNames=comp_builders)) # Force schedulers if self.enable_force: ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-fetch'], buttonName=self.names['sch-force-name-fetch'], label=self.names['sch-force-name-fetch'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=[self.names['bld-fetch']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-build'], buttonName=self.names['sch-force-name-build'], label=self.names['sch-force-name-build'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=comp_builders, codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-clean'], buttonName=self.names['sch-force-name-clean'], label=self.names['sch-force-name-clean'], reason=util.StringParameter(name="reason", hide=True), builderNames=[self.names['bld-clean']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="dry_run", label="Dry run", default=False), ])) return ret
# Create schedulers and builders for builds c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler( name=config.TRIGGER, change_filter=util.ChangeFilter(category="mediasdk"), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER]) ] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append( schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append( util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory'])) # Push status of build to the Github c["services"] = [ reporters.GitHubStatusPush( token=config.GITHUB_TOKEN, context=util.Interpolate("buildbot/%(prop:buildername)s"), startDescription="Started", endDescription="Done", verbose=True) ] # Will be useful for implementing build notifications in the future
from maxscale.config import constants from . import properties BUILD_ALL_PROPERTIES = [properties.buildBoxCheckboxContainer() ] + BUILD_PROPERTIES[1:] MANUAL_SCHEDULER = schedulers.ForceScheduler( name="build_all", builderNames=["build_all"], buttonName="Build all", codebases=properties.codebaseParameter(), properties=BUILD_ALL_PROPERTIES) TRIGGERABLE_SCHEDULER = schedulers.Triggerable( name="build_all_triggerable", builderNames=["build_all"], properties=properties.extractDefaultValues( [properties.buildBoxCheckboxContainer()])) SCHEDULERS = [MANUAL_SCHEDULER, TRIGGERABLE_SCHEDULER] # Add schedulers for every active branch to be built every night # The list of branches is defined by constants.NIGHTLY_SCHEDS # (see maxscale/config/constants.py) i = 0 for branch in constants.NIGHTLY_SCHEDS: nightlyProperties = properties.extractDefaultValues(BUILD_ALL_PROPERTIES) nightlyProperties['target'] = branch nightlyProperties['host'] = "bb-host" nightlyProperties['owners'] = constants.NIGHTLY_MAIL_LIST
properties.test_set(), properties.ci_url(), properties.smoke_tests(), properties.big_number_of_vms(), properties.backend_use_ssl(), properties.use_snapshots(), properties.use_valgrind(), properties.use_callgrind(), properties.do_not_revert_virtual_machines(), properties.test_template(), properties.configuration_to_clone(), properties.host(), properties.upload_server(), ] TRIGGERABLE_SCHEDULER = schedulers.Triggerable( name="run_test", builderNames=["run_test"], codebases=constants.MAXSCALE_CODEBASE, ) MANUAL_SCHEDULER = schedulers.ForceScheduler( name="run_test_force", buttonName="Run tests", builderNames=["run_test"], codebases=properties.codebaseParameter(), properties=RUN_TEST_PROPERTIES ) SCHEDULERS = [TRIGGERABLE_SCHEDULER, MANUAL_SCHEDULER]
def masterConfig(num_concurrent, extra_steps=None): if extra_steps is None: extra_steps = [] c = {} c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] triggereables = [] for i in range(num_concurrent): c['schedulers'].append( schedulers.Triggerable(name="trigsched" + str(i), builderNames=["build"])) triggereables.append("trigsched" + str(i)) f = BuildFactory() f.addStep(steps.ShellCommand(command='echo hello')) f.addStep( steps.Trigger(schedulerNames=triggereables, waitForFinish=True, updateSourceStamp=True)) f.addStep(steps.ShellCommand(command='echo world')) f2 = BuildFactory() f2.addStep(steps.ShellCommand(command='echo ola')) for step in extra_steps: f2.addStep(step) c['builders'] = [ BuilderConfig(name="testy", workernames=["upcloud0"], factory=f), BuilderConfig( name="build", workernames=["upcloud" + str(i) for i in range(num_concurrent)], factory=f2) ] creds = os.environ.get('BBTEST_UPCLOUD_CREDS') if creds is not None: user, password = creds.split(":") else: raise "Cannot run this test without credentials" masterFQDN = os.environ.get('masterFQDN', 'localhost') c['workers'] = [] for i in range(num_concurrent): upcloud_host_config = { "user_data": """ #!/usr/bin/env bash groupadd -g 999 buildbot useradd -u 999 -g buildbot -s /bin/bash -d /buildworker -m buildbot passwd -l buildbot apt update apt install -y git python3 python3-dev python3-pip sudo gnupg curl pip3 install buildbot-worker service_identity chown -R buildbot:buildbot /buildworker cat <<EOF >> /etc/hosts 127.0.1.1 upcloud{} EOF cat <<EOF >/etc/sudoers.d/buildbot buidbot ALL=(ALL) NOPASSWD:ALL EOF sudo -H -u buildbot bash -c "buildbot-worker create-worker /buildworker {} upcloud{} pass" sudo -H -u buildbot bash -c "buildbot-worker start /buildworker" """.format(i, masterFQDN, i) } c['workers'].append( UpcloudLatentWorker('upcloud' + str(i), api_username=user, api_password=password, image='Debian GNU/Linux 9 (Stretch)', hostconfig=upcloud_host_config, masterFQDN=masterFQDN)) # un comment for debugging what happens if things looks locked. # c['www'] = {'port': 8080} # if the masterFQDN is forced (proxy case), then we use 9989 default port # else, we try to find a free port if masterFQDN is not None: c['protocols'] = {"pb": {"port": "tcp:9989"}} else: c['protocols'] = {"pb": {"port": "tcp:0"}} return c
from buildbot.plugins import schedulers from maxscale.schedulers.create_full_repo import BUILD_PROPERTIES from . import properties BUILD_PROPERTIES = [properties.buildBoxCheckboxContainer()] + BUILD_PROPERTIES[1:] MANUAL_SCHEDULER = schedulers.ForceScheduler( name="create_full_repo_all", builderNames=["create_full_repo_all"], buttonName="Create full repo all", codebases=properties.codebaseParameter(), properties=BUILD_PROPERTIES ) TRIGGERABLE_SCHEDULER = schedulers.Triggerable( name="create_full_repo_all_triggerable", builderNames=["create_full_repo_all"], properties=properties.extractDefaultValues([ properties.buildBoxCheckboxContainer(), properties.keep_virtual_machines(), properties.ci_url(), ]) ) SCHEDULERS = [MANUAL_SCHEDULER, TRIGGERABLE_SCHEDULER]
properties.database_version(), properties.host("bb-host"), properties.maxscale_threads(), properties.sysbench_threads(), properties.perf_cnf_template(), properties.perf_port(), properties.perf_runtime(), properties.use_callgrind(), ] MANUAL_SCHEDULER = schedulers.ForceScheduler( name="run_performance_test", buttonName="Force build", builderNames=["run_performance_test"], properties=PERFORMACE_TEST_PROPERTIES) #PERIODIC_SCHEDULER = schedulers.Periodic( # name="run_performance_test_half_hour", # builderNames=["run_performance_test"], # periodicBuildTimer=10*60, # properties=properties.extractDefaultValues(PERFORMACE_TEST_PROPERTIES) #) REPOSITORY_SCHEDULER = schedulers.Triggerable( name="run_performance_test_trigger", builderNames=["run_performance_test"], ) #SCHEDULERS = [MANUAL_SCHEDULER, PERIODIC_SCHEDULER, REPOSITORY_SCHEDULER] SCHEDULERS = [MANUAL_SCHEDULER, REPOSITORY_SCHEDULER]
if worker_pool is None: return ALL_WORKERS_NAMES return list(config.WORKERS[worker_pool].keys()) # Create schedulers and builders for builds c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler(name=config.TRIGGER, change_filter=util.ChangeFilter(), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER])] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append(schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append(util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory'])) class GitHubStatusPushFilter(reporters.GitHubStatusPush): """ This class extend filtering options for reporters.GitHubStatusPush """ def filterBuilds(self, build): # All builds have basic 'repository' property repository = bb.utils.get_repository_name_by_url(build['properties']['repository'][0]) # Status for AUTO_UPDATED_REPOSITORIES will not sent to not affect review requests # in these repositories # TODO: remove workaround for libva notifications
def build_config() -> dict[str, Any]: c = {} c["buildbotNetUsageData"] = None # configure a janitor which will delete all logs older than one month, and will run on sundays at noon c['configurators'] = [util.JanitorConfigurator( logHorizon=timedelta(weeks=4), hour=12, dayOfWeek=6 )] c["schedulers"] = [ # build all pushes to master schedulers.SingleBranchScheduler( name="master", change_filter=util.ChangeFilter(branch="master"), builderNames=["nix-eval"], ), # build all pull requests schedulers.SingleBranchScheduler( name="prs", change_filter=util.ChangeFilter(category="pull"), builderNames=["nix-eval"], ), # this is triggered from `nix-eval` schedulers.Triggerable( name="nix-build", builderNames=["nix-build"], ), # allow to manually trigger a nix-build schedulers.ForceScheduler(name="force", builderNames=["nix-eval"]), # allow to manually update flakes schedulers.ForceScheduler( name="update-flake", builderNames=["nix-update-flake"], buttonName="Update flakes", ), # updates flakes once a weeek schedulers.NightlyTriggerable( name="update-flake-weekly", builderNames=["nix-update-flake"], hour=3, minute=0, dayOfWeek=6, ), ] github_api_token = read_secret_file("github-token") c["services"] = [ reporters.GitHubStatusPush( token=github_api_token, # Since we dynamically create build steps, # we use `virtual_builder_name` in the webinterface # so that we distinguish what has beeing build context=Interpolate("buildbot/%(prop:virtual_builder_name)s"), ), # Notify on irc NotifyFailedBuilds("irc://buildbot|[email protected]:6667/#xxx"), ] # Shape of this file: # [ { "name": "<worker-name>", "pass": "******", "cores": "<cpu-cores>" } ] worker_config = json.loads(read_secret_file("github-workers")) credentials = os.environ.get("CREDENTIALS_DIRECTORY", ".") enable_cachix = os.path.isfile(os.path.join(credentials, "cachix-token")) systemd_secrets = secrets.SecretInAFile(dirname=credentials) c["secretsProviders"] = [systemd_secrets] c["workers"] = [] worker_names = [] for item in worker_config: cores = item.get("cores", 0) for i in range(cores): worker_name = f"{item['name']}-{i}" c["workers"].append(worker.Worker(worker_name, item["pass"])) worker_names.append(worker_name) c["builders"] = [ # Since all workers run on the same machine, we only assign one of them to do the evaluation. # This should prevent exessive memory usage. nix_eval_config([worker_names[0]], github_token_secret="github-token"), nix_build_config(worker_names, enable_cachix), nix_update_flake_config( worker_names, "TUM-DSE/doctor-cluster-config", github_token_secret="github-token", ), ] c["www"] = { "port": int(os.environ.get("PORT", "1810")), "auth": util.GitHubAuth( os.environ.get("GITHUB_OAUTH_ID"), read_secret_file("github-oauth-secret") ), "authz": util.Authz( roleMatchers=[ util.RolesFromGroups(groupPrefix="") # so we can match on TUM-DSE ], allowRules=[ util.AnyEndpointMatcher(role="TUM-DSE", defaultDeny=False), util.AnyControlEndpointMatcher(role="TUM-DSE"), ], ), "plugins": dict(waterfall_view={}, console_view={}, grid_view={}), "change_hook_dialects": dict( github={ "secret": read_secret_file("github-webhook-secret"), "strict": True, "token": github_api_token, "github_property_whitelist": "*", } ), } c["db"] = {"db_url": os.environ.get("DB_URL", "sqlite:///state.sqlite")} c["protocols"] = {"pb": {"port": "tcp:9989:interface=\\:\\:"}} c["buildbotURL"] = "https://buildbot.dse.in.tum.de/" return c
from buildbot.plugins import schedulers from maxscale.config import constants from . import properties BUILD_PROPERTIES = [ properties.build_box(), properties.build_target(), properties.ci_url(), properties.host(), properties.keep_virtual_machines(), properties.major_ver() ] TRIGGERABLE_SCHEDULER = schedulers.Triggerable( name="create_full_repo", builderNames=["create_full_repo"] ) MANUAL_SCHEDULER = schedulers.ForceScheduler( name="create_full_repo_force", buttonName="Force build", builderNames=["create_full_repo"], codebases=properties.codebaseParameter(), properties=BUILD_PROPERTIES ) SCHEDULERS = [TRIGGERABLE_SCHEDULER, MANUAL_SCHEDULER]
force_builder_names = ["build", *builder_names] ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build build_scheduler = schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(repository="https://github.com/scummvm/scummvm"), treeStableTimer=5, fileIsImportant=file_is_director_related, builderNames=["build"], ) director_scheduler = schedulers.Triggerable( name="Director Tests", builderNames=builder_names ) force_scheduler = schedulers.ForceScheduler( name="force", builderNames=force_builder_names ) c["schedulers"] = [] c["schedulers"].append(build_scheduler) c["schedulers"].append(director_scheduler) c["schedulers"].append(force_scheduler) ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which workers can execute them. Note that any particular build will
from buildbot.plugins import schedulers from . import common from . import properties REPOSITORY_SCHEDULER = schedulers.Triggerable( name="run_test_snapshot", builderNames=["run_test_snapshot"], ) MANUAL_SCHEDULER = schedulers.ForceScheduler( name="run_test_snapshot_force", buttonName="Run test snapshots", builderNames=["run_test_snapshot"], codebases=properties.codebaseParameter(), properties=[ properties.build_name(), properties.snapshot_name(), properties.build_target(), properties.build_box(), properties.backend_database(), properties.database_version(), properties.test_set(), properties.ci_url(), properties.smoke_tests(), properties.big_number_of_vms(), properties.backend_use_ssl(), properties.test_template(), properties.test_branch(), properties.host(), properties.use_valgrind(), ])