def create_schedulers(repos_and_packages): return [ DependencyTreeScheduler( 'dependency-tree', build_dependency_tree=create_build_dependency_tree(repo['artifacts']), builds_from_change=builds_from_change_func(repo['artifacts']), change_filter=util.ChangeFilter( repository=repo['repo'], branch='master')) for repo in repos_and_packages ] + [ schedulers.ForceScheduler( artifact['artifact'] + '-force', builderNames=[artifact['artifact']], buttonName='Force' ) for repo in repos_and_packages for artifact in repo['artifacts'] ] + _flatten([ (schedulers.SingleBranchScheduler( 'artifactsrc-yml', fileIsImportant=_artifactsrc_needs_rebuilding, onlyImportant=True, change_filter=util.ChangeFilter( repository=repo['repo'], branch='master'), builderNames=['build-artifactsrc-yml'] ), schedulers.ForceScheduler( 'artifactsrc-yml-force', builderNames=['build-artifactsrc-yml'], buttonName='Force' )) for repo in repos_and_packages ])
def add_all_schedulers(cfg): for s_name in SERVICES: for branch in get_all_possible_branch_names(): realm = BRANCH_TO_REALM_MAPPING[branch] buildername = f"{s_name}_{realm}" cfg['schedulers'].append( schedulers.ForceScheduler( name=f"force-{s_name}-{realm}", codebases=[ util.CodebaseParameter( "", label="Repository", branch=util.FixedParameter(name="branch", default=branch), revision=util.StringParameter(name="revision", default=""), repository=util.FixedParameter( name="repository", default=SERVICES[s_name]['giturl']), project=util.FixedParameter(name="project", default=s_name), ) ], builderNames=[buildername], )) cfg['schedulers'].append( schedulers.SingleBranchScheduler( name=f"commit-{s_name}-{realm}", builderNames=[buildername], treeStableTimer=0, change_filter=util.ChangeFilter(branch=branch, project=s_name), ))
def get_schedulers(): return [ schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"]), schedulers.ForceScheduler(name="force", builderNames=["runtests"]) ]
def getLntSchedulers(): project = "lnt" lnt_builders = [ "publish-lnt-sphinx-docs", ] return [ schedulers.SingleBranchScheduler( name="lnt-scheduler", treeStableTimer=kwargs.get('treeStableTimer', None), reason="Merge to LNT github {} branch".format(filter_branch), builderNames=lnt_builders, change_filter=util.ChangeFilter(project_fn=project, branch=filter_branch)), schedulers.ForceScheduler( name="force-build-scheduler", label="Force Build", buttonName="Force Build", reason=util.ChoiceStringParameter( name="reason", label="reason:", required=True, choices=[ "Build a particular revision", "Force clean build", "Narrow down blamelist", ], default="Build a particular revision"), builderNames=lnt_builders, codebases=[ util.CodebaseParameter( codebase="", branch=util.FixedParameter(name="branch", default=_branch), revision=util.StringParameter(name="revision", label="revision:", size=45, default=''), repository=util.FixedParameter(name="repository", default=_repourl), project=util.FixedParameter(name="project", default=project)) ], properties=[ util.BooleanParameter( name="clean", label="Clean source code and build directory", default=False), util.BooleanParameter(name="clean_obj", label="Clean build directory", default=False) ]), ]
def schedulers_all_projects(c): for project in projects: project_name = project["name"] for i, repo in enumerate(project["repos"]): url = repo["url"] branch = repo["branch"] builder = project["builders"][i] scheduler = schedulers.SingleBranchScheduler( name=builder, # source change hasn't project information change_filter=util.ChangeFilter(repository=url, branch=branch), treeStableTimer=None, builderNames=[builder]) c['schedulers'].append(scheduler)
def getGlobalSchedulers(self, platforms): ret = list() change_filter = util.ChangeFilter( repository=[self.baseurl, self.giturl], branch=self.branch) # Fetch scheduler (triggered by event source) ret.append( schedulers.SingleBranchScheduler( name=self.names['sch-sb'], change_filter=change_filter, # Wait for 5 minutes before starting build treeStableTimer=300, builderNames=[self.names['bld-fetch']])) # Nightly scheduler (started by time) # It's triggered after regular builds to take note of the last fetched source # Note that build is not started by trigger # We cleanup after it because we just generated a new package if self.nightly is not None: ret.append( schedulers.NightlyTriggerable(name=self.names['sch-nightly'], branch=self.branch, builderNames=[ self.names['bld-nightly'], self.names['bld-clean'] ], hour=self.nightly[0], minute=self.nightly[1], onlyIfChanged=True)) # All compiling builders comp_builders = list(self.names['bld-platform'](p for p in platforms if p.canBuild(self))) # Global build scheduler (triggered by fetch build and nightly build) ret.append( schedulers.Triggerable(name=self.names['sch-build'], builderNames=comp_builders)) # Force schedulers if self.enable_force: ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-fetch'], buttonName=self.names['sch-force-name-fetch'], label=self.names['sch-force-name-fetch'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=[self.names['bld-fetch']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-build'], buttonName=self.names['sch-force-name-build'], label=self.names['sch-force-name-build'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=comp_builders, codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-clean'], buttonName=self.names['sch-force-name-clean'], label=self.names['sch-force-name-clean'], reason=util.StringParameter(name="reason", hide=True), builderNames=[self.names['bld-clean']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="dry_run", label="Dry run", default=False), ])) return ret
c["titleURL"] = config.REPO_URL c["buildbotURL"] = config.BUILDBOT_URL def get_workers(worker_pool): if worker_pool is None: return ALL_WORKERS_NAMES return list(config.WORKERS[worker_pool].keys()) # Create schedulers and builders for builds c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler( name=config.TRIGGER, change_filter=util.ChangeFilter(category="mediasdk"), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER]) ] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append( schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append( util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory'])) # Push status of build to the Github
def build_config() -> dict[str, Any]: c = {} c["buildbotNetUsageData"] = None # configure a janitor which will delete all logs older than one month, and will run on sundays at noon c['configurators'] = [util.JanitorConfigurator( logHorizon=timedelta(weeks=4), hour=12, dayOfWeek=6 )] c["schedulers"] = [ # build all pushes to master schedulers.SingleBranchScheduler( name="master", change_filter=util.ChangeFilter(branch="master"), builderNames=["nix-eval"], ), # build all pull requests schedulers.SingleBranchScheduler( name="prs", change_filter=util.ChangeFilter(category="pull"), builderNames=["nix-eval"], ), # this is triggered from `nix-eval` schedulers.Triggerable( name="nix-build", builderNames=["nix-build"], ), # allow to manually trigger a nix-build schedulers.ForceScheduler(name="force", builderNames=["nix-eval"]), # allow to manually update flakes schedulers.ForceScheduler( name="update-flake", builderNames=["nix-update-flake"], buttonName="Update flakes", ), # updates flakes once a weeek schedulers.NightlyTriggerable( name="update-flake-weekly", builderNames=["nix-update-flake"], hour=3, minute=0, dayOfWeek=6, ), ] github_api_token = read_secret_file("github-token") c["services"] = [ reporters.GitHubStatusPush( token=github_api_token, # Since we dynamically create build steps, # we use `virtual_builder_name` in the webinterface # so that we distinguish what has beeing build context=Interpolate("buildbot/%(prop:virtual_builder_name)s"), ), # Notify on irc NotifyFailedBuilds("irc://buildbot|[email protected]:6667/#xxx"), ] # Shape of this file: # [ { "name": "<worker-name>", "pass": "******", "cores": "<cpu-cores>" } ] worker_config = json.loads(read_secret_file("github-workers")) credentials = os.environ.get("CREDENTIALS_DIRECTORY", ".") enable_cachix = os.path.isfile(os.path.join(credentials, "cachix-token")) systemd_secrets = secrets.SecretInAFile(dirname=credentials) c["secretsProviders"] = [systemd_secrets] c["workers"] = [] worker_names = [] for item in worker_config: cores = item.get("cores", 0) for i in range(cores): worker_name = f"{item['name']}-{i}" c["workers"].append(worker.Worker(worker_name, item["pass"])) worker_names.append(worker_name) c["builders"] = [ # Since all workers run on the same machine, we only assign one of them to do the evaluation. # This should prevent exessive memory usage. nix_eval_config([worker_names[0]], github_token_secret="github-token"), nix_build_config(worker_names, enable_cachix), nix_update_flake_config( worker_names, "TUM-DSE/doctor-cluster-config", github_token_secret="github-token", ), ] c["www"] = { "port": int(os.environ.get("PORT", "1810")), "auth": util.GitHubAuth( os.environ.get("GITHUB_OAUTH_ID"), read_secret_file("github-oauth-secret") ), "authz": util.Authz( roleMatchers=[ util.RolesFromGroups(groupPrefix="") # so we can match on TUM-DSE ], allowRules=[ util.AnyEndpointMatcher(role="TUM-DSE", defaultDeny=False), util.AnyControlEndpointMatcher(role="TUM-DSE"), ], ), "plugins": dict(waterfall_view={}, console_view={}, grid_view={}), "change_hook_dialects": dict( github={ "secret": read_secret_file("github-webhook-secret"), "strict": True, "token": github_api_token, "github_property_whitelist": "*", } ), } c["db"] = {"db_url": os.environ.get("DB_URL", "sqlite:///state.sqlite")} c["protocols"] = {"pb": {"port": "tcp:9989:interface=\\:\\:"}} c["buildbotURL"] = "https://buildbot.dse.in.tum.de/" return c
def getSingleBranchSchedulers(builders, explicitly_set_schedulers=None, **kwargs): """ I'm taking over all of not yet assigned builders with the declared source code dependencies, and automatically generate a minimum set of SingleBranchSchedulers to handle all the declared source code dependency combinations. """ builders_with_explicit_schedulers = set() if explicitly_set_schedulers: # TODO: Make a list of builder names with already set schedulers. # builders_with_explicit_schedulers.add(builder) pass # For the builders created with LLVMBuildFactory or similar, # we always use automatic schedulers, # unless schedulers already explicitly set. builders_with_automatic_schedulers = [ builder for builder in builders if builder.name not in builders_with_explicit_schedulers if getattr(builder.factory, 'depends_on_projects', None) ] filter_branch = 'main' treeStableTimer = kwargs.get('treeStableTimer', None) automatic_schedulers = [] # Do we have any to take care of? if builders_with_automatic_schedulers: # Let's reconsile first to get a unique set of dependencies. # We need a set of unique sets of dependent projects. set_of_dependencies = set([ frozenset(getattr(b.factory, 'depends_on_projects')) for b in builders_with_automatic_schedulers ]) for projects in set_of_dependencies: sch_builders = [ b.name for b in builders_with_automatic_schedulers if frozenset( getattr(b.factory, 'depends_on_projects')) == projects ] automatic_scheduler_name = filter_branch + ":" + ",".join( sorted(projects)) automatic_schedulers.append( schedulers.SingleBranchScheduler( name=automatic_scheduler_name, treeStableTimer=kwargs.get('treeStableTimer', None), reason="Merge to github %s branch" % filter_branch, builderNames=sch_builders, change_filter=util.ChangeFilter( project_fn= \ lambda c, projects_of_interest=frozenset(projects): isProjectOfInterest(c, projects_of_interest), branch=filter_branch) ) ) log.msg( "Generated SingleBranchScheduler: { name='%s'" % automatic_scheduler_name, ", builderNames=", sch_builders, ", change_filter=", projects, " (branch: %s)" % filter_branch, ", treeStableTimer=%s" % treeStableTimer, "}") return automatic_schedulers
def ros_testbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, source=True, locks=[]): # Create a Job for Source if source: project_name = '_'.join([job_name, rosdistro, 'source_build']) c['change_source'].append( GitPoller( repourl=url, name=url, branch=branch, category=project_name, pollAtLaunch=True, ) ) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[project_name,], change_filter=util.ChangeFilter(category=project_name) ) ) c['schedulers'].append( schedulers.Nightly( name = project_name+'-nightly-master', codebases = {url:{'repository':url,'branch':'master'}}, builderNames = [project_name,], hour=3, minute=0, ) ) c['schedulers'].append( schedulers.Nightly( name = project_name+'-nightly-develop', codebases = {url:{'repository':url,'branch':'develop'}}, builderNames = [project_name,], hour=5, minute=0, ) ) c['schedulers'].append( schedulers.ForceScheduler( name=project_name+'-force', codebases = [util.CodebaseParameter("", branch=util.ChoiceStringParameter( name="branch", choices=["master", "devel"], default="master"), repository=util.FixedParameter(name="repository", default=url), )], builderNames=[project_name,], ) ) else: r_owner, r_name = (url.split(':')[1])[:-4].split('/') project_name = '_'.join([job_name, rosdistro, 'pr_build']) c['change_source'].append( GitPRPoller( owner=r_owner, repo=r_name, category=project_name, branches=[branch], pollInterval=10*60, pollAtLaunch=True, token=util.Secret("OathToken"), repository_type='ssh' ) ) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[project_name,], change_filter=util.ChangeFilter(category=project_name) ) ) # Directory which will be bind-mounted binddir = '/tmp/'+project_name dockerworkdir = '/tmp/test/' f = BuildFactory() # Remove any old crud in build/src folder f.addStep( ShellCommand( name='rm src', command=['rm', '-rf', 'build/src'], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s') ) ) # Check out repository (to /build/src) f.addStep( Git( repourl=util.Property('repository', default=url), branch=util.Property('branch', default=branch), alwaysUseLatest=True, mode='full', workdir=Interpolate('%(prop:builddir)s/build/src') ) ) # Download testbuild_docker.py script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='scripts/testbuild_docker.py', workerdest=('testbuild_docker.py'), hideStepIf=success ) ) # Download Dockerfile_test script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/Dockerfile_test', workerdest=('Dockerfile_test'), hideStepIf=success ) ) # Download docker-compose.py script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/docker-compose-test.yaml', workerdest=('docker-compose-test.yaml'), hideStepIf=success ) ) f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/rosdep_private.yaml', workerdest=('rosdep_private.yaml'), hideStepIf=success ) ) f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='scripts/docker-container.py', workerdest=('docker-container.py'), hideStepIf=success ) ) # create docker work environment f.addStep( ShellCommand( command=['python','docker-container.py', job_name], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/') ) ) # Make and run tests in a docker container f.addStep( ShellCommand( name=job_name+'-build', command=['docker', 'run', '-v', 'ros-buildbot-docker_deb_repository:/home/package', '--name='+project_name, 'scalable-env:'+job_name, 'python', '/tmp/build/testbuild_docker.py', binddir, rosdistro], descriptionDone=['make and test', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-copytestresults', command=['docker', 'cp', project_name + ':' +binddir + '/testresults', 'testresults'], logfiles={'tests': 'testresults'}, descriptionDone=['testresults', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-rm_container', command=['docker', 'rm', project_name], descriptionDone=['remove docker container', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-rm_image', command=['docker', 'image', 'rm', 'scalable-env:'+job_name], descriptionDone=['remove docker image', job_name] ) ) c['builders'].append( BuilderConfig( name=project_name, workernames=machines, factory=f, locks=locks ) ) # return the name of the job created return project_name
def ros_sysbuild(c, job_name, rosdistro, machines, source=True, locks=[]): # Create a Job for system test job_name = sys_name project_name = '_'.join([job_name, rosdistro, 'system_build']) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[ project_name, ], change_filter=util.ChangeFilter(category=project_name))) c['schedulers'].append( schedulers.Nightly( name=project_name + '-nightly-master', builderNames=[ project_name, ], hour=4, minute=0, )) # Directory which will be bind-mounted binddir = '/tmp' rosinstall_url = "https://raw.githubusercontent.com/ipa-rwu/scalable_system_setup/master/config/" + job_name + ".rosinstall" f = BuildFactory() # Remove any old crud in /tmp folder f.addStep( ShellCommand(name='rm src', command=['rm', '-rf', 'scalable_ws'], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/'))) # wstool init src .rosinstall f.addStep( ShellCommand( haltOnFailure=True, name='wstool_rosintall', command=['wstool', 'init', 'src', rosinstall_url], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/scalable_ws'))) # Download Dockerfile_sys.py script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/Dockerfile_sys', workerdest=('Dockerfile_sys'), hideStepIf=success)) # Download docker-compose-sys.py script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/docker-compose-sys.yaml', workerdest=('docker-compose-sys.yaml'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/rosdep_private.yaml', workerdest=('rosdep_private.yaml'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/docker-container.py', workerdest=('docker-container.py'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='shell/uplode_docker_image.sh', workerdest=('upload_docker_image.sh'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/unique_docker_sys.py', workerdest=('unique_docker_sys.py'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='shell/test_sys.sh', workerdest=('test_sys.sh'), mode=0o755, hideStepIf=success)) # reedit docker-compose-deb.yaml f.addStep( ShellCommand(haltOnFailure=True, name=job_name + '-reedit-docker-compose', command=[ 'python', 'unique_docker_sys.py', 'docker-compose-sys.yaml', Interpolate(job_name) ], workdir=Interpolate('%(prop:builddir)s/build/'), descriptionDone=['reedit docker-compose', job_name])) # Build docker image for creating debian f.addStep( ShellCommand(haltOnFailure=True, name=job_name + '-create_docker_image', command=[ 'docker-compose', '-f', 'docker-compose-sys.yaml', 'build' ], workdir=Interpolate('%(prop:builddir)s/build/'), descriptionDone=['sourcedeb', job_name])) # Make and run tests in a docker container f.addStep( ShellCommand(name=job_name + '-test_system', command=[ 'docker', 'run', '--name=' + project_name, 'scalable-sys:' + job_name, 'bash', '/usr/local/sbin/test_sys.sh' ], descriptionDone=['make and test', job_name])) f.addStep( ShellCommand(name=job_name + '-upload_docker_image', command=[ 'bash', 'upload_docker_image.sh', project_name, binddir, job_name ], descriptionDone=['upload_docker_image', job_name], workdir=Interpolate('%(prop:builddir)s/build/'))) f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', project_name], descriptionDone=['remove docker container', job_name])) f.addStep( ShellCommand( name=job_name + '-rm_image', command=['docker', 'image', 'rm', 'scalable-sys:' + job_name], descriptionDone=['remove docker image', job_name])) c['builders'].append( BuilderConfig(name=project_name, workernames=machines, factory=f, locks=locks)) # return the name of the job created return project_name
from maxscale.config import constants COMMON_PROPERTIES = [ properties.build_box(), properties.cmake_flags(), properties.build_experimental_features(), properties.backend_database(), properties.database_version(), properties.ci_url(), properties.backend_use_ssl(), properties.use_valgrind(), properties.appendTestRunId(), ] DEFAULT_PROPERTIES = properties.extractDefaultValues(COMMON_PROPERTIES) DEFAULT_PROPERTIES['cmake_flags'] = constants.DEFAULT_DAILY_TEST_CMAKE_FLAGS DEFAULT_PROPERTIES["targetInitMode"] = TargetInitOptions.GENERATE DEFAULT_PROPERTIES["nameInitMode"] = NameInitOptions.GENERATE DEFAULT_PROPERTIES["buildHosts"] = ["bb-host"] CHANGE_SOURCE_SCHEDULER = schedulers.SingleBranchScheduler( name="build_and_test_on_push", change_filter=util.ChangeFilter(project=constants.MAXSCALE_PRODUCT, branch_fn=check_branch_fn), treeStableTimer=5, codebases=constants.MAXSCALE_CODEBASE, builderNames=["build_and_test_parall"], properties=DEFAULT_PROPERTIES) SCHEDULERS = [CHANGE_SOURCE_SCHEDULER]
properties={ "github_repo_owner": "rsyslog", "github_repo_name": "librelp", }, )) lc['schedulers'].append(SingleBranchScheduler( name="github_librelp", change_filter=filter.ChangeFilter( category="pull", project="rsyslog/librelp"), builderNames=[ "librelp codecov" , "librelp freebsd" , "librelp build clang-9" , "librelp build gcc-8"] )) lc['schedulers'].append(ForceScheduler( name="forceall-librelp", builderNames=[ "librelp codecov" , "librelp freebsd" , "librelp build clang-9" , "librelp build gcc-8"] )) # build master commits so that CodeCov has references for all commits lc['schedulers'].append(schedulers.SingleBranchScheduler(name='librelp-master-sched', change_filter=util.ChangeFilter(project='rsyslog/librelp', branch='master'), treeStableTimer=30, # otherwise a PR merge with n commits my start n builders builderNames=["librelp codecov"] ))
from buildbot.plugins import schedulers as bbsched from buildbot.plugins import util from metabbotcfg import builders schedulers = [ bbsched.SingleBranchScheduler( name="all", branch='master', treeStableTimer=2, builderNames=[b['name'] for b in builders.builders]), # SingleBranchScheduler( # name="release", # branch='buildbot-0.8.9', # treeStableTimer=10, # builderNames=[b['name'] for b in builders.builders if b['name'] not in ('docs',)])), bbsched.ForceScheduler( name="force", codebases=[ '' ], # ?? {'repository': util.FixedParameter(name="repository", default='git://github.com/buildbot/buildbot.git')}, #branch=util.ChoiceStringParameter(name="branch", default="master", choices=["master", "eight"]), #project=util.FixedParameter(name="project", default=""), properties=[], builderNames=[b['name'] for b in builders.builders]) ]
def make_config(worker_name, worker_password, worker_port, git_repo, branch, poll_interval, builder_name, project_name, project_url, buildbot_url, buildbot_web_port, buildbot_from_email): return { 'workers': [worker.Worker(worker_name, worker_password)], 'protocols': { 'pb': { 'port': worker_port } }, 'change_source': [ changes.GitPoller( git_repo, workdir='gitpoller-workdir', branch=branch, pollinterval=poll_interval, ), ], 'schedulers': [ schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch=branch), treeStableTimer=poll_interval, builderNames=[builder_name], ), schedulers.ForceScheduler( name="force", builderNames=[builder_name], ), ], 'builders': [ util.BuilderConfig( name=builder_name, workernames=[worker_name], factory=util.BuildFactory([ # check out the source steps.Git(repourl=git_repo, mode='incremental'), # run the tests steps.ShellCommand(command=[ "direnv", "allow", ".", ], ), steps.ShellCommand( command=[ "direnv", "exec", ".", "make", "check", ], env={ 'NIX_REMOTE': 'daemon', }, # If we have to rebuild our dependencies from scratch, # we can go a long time without receiving output from # the compiler. Default timeout is 20 mins, bump to # 1hr. timeout=60 * 60, ), ]), ), ], 'status': [], 'title': project_name, 'titleURL': project_url, 'buildbotURL': buildbot_url, 'www': { 'port': buildbot_web_port, 'plugins': { 'waterfall_view': {}, }, }, 'db': { 'db_url': "sqlite:///state.sqlite", }, 'services': [ reporters.MailNotifier( fromaddr=buildbot_from_email, # TODO(jml): Currently sending mail for all builds. We should # send mail under fewer circumstances once we have a better # idea about what we actually want. # # http://buildbot.readthedocs.io/en/latest/manual/cfg-reporters.html?highlight=github#mailnotifier-arguments mode='all', # XXX: Temporarily hard-code until we can figure out how to # get these automatically from commits. extraRecipients=[ "*****@*****.**", "*****@*****.**", ], ) ], }
####### BUILDER NAMES lingo_builder = "lingotests (D4)" builder_names = [lingo_builder] builder_names.extend(target.builder_name for target in test_targets) force_builder_names = ["build", *builder_names] ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build build_scheduler = schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(repository="https://github.com/scummvm/scummvm"), treeStableTimer=5, fileIsImportant=file_is_director_related, builderNames=["build"], ) director_scheduler = schedulers.Triggerable( name="Director Tests", builderNames=builder_names ) force_scheduler = schedulers.ForceScheduler( name="force", builderNames=force_builder_names ) c["schedulers"] = [] c["schedulers"].append(build_scheduler) c["schedulers"].append(director_scheduler)
MANUAL_SCHEDULER = schedulers.ForceScheduler( name="build_and_performance_test", label="Build and performance test", builderNames=["build_and_performance_test"], codebases=properties.codebaseParameter(), properties=BUILD_AND_PERFORMANCE_TEST_PROPERTIES) ON_PUSH_PROPERTIES = properties.extractDefaultValues( BUILD_AND_PERFORMANCE_TEST_PROPERTIES) ON_PUSH_PROPERTIES["targetInitMode"] = TargetInitOptions.SET_FROM_BRANCH CHANGE_SOURCE_SCHEDULER = schedulers.SingleBranchScheduler( name="build_and_performance_test_on_push", change_filter=util.ChangeFilter(project='maxscale', branch_fn=check_branch_fn_perf), treeStableTimer=60, codebases=constants.MAXSCALE_CODEBASE, builderNames=["build_and_performance_test"], properties=ON_PUSH_PROPERTIES) SCHEDULERS = [MANUAL_SCHEDULER, CHANGE_SOURCE_SCHEDULER] # Add schedulers for every active branch to be built every night # The list of branches is defined by constants.NIGHTLY_SCHEDS # (see maxscale/config/constants.py) BUILD_INTERVAL = 1 launchTime = 18 for branch in constants.NIGHTLY_SCHEDS: nightlyProperties = properties.extractDefaultValues( BUILD_AND_PERFORMANCE_TEST_PROPERTIES) nightlyProperties["name"] = "nightly_test_{}".format(branch)