def create_schedulers(repos_and_packages): return [ DependencyTreeScheduler( 'dependency-tree', build_dependency_tree=create_build_dependency_tree(repo['artifacts']), builds_from_change=builds_from_change_func(repo['artifacts']), change_filter=util.ChangeFilter( repository=repo['repo'], branch='master')) for repo in repos_and_packages ] + [ schedulers.ForceScheduler( artifact['artifact'] + '-force', builderNames=[artifact['artifact']], buttonName='Force' ) for repo in repos_and_packages for artifact in repo['artifacts'] ] + _flatten([ (schedulers.SingleBranchScheduler( 'artifactsrc-yml', fileIsImportant=_artifactsrc_needs_rebuilding, onlyImportant=True, change_filter=util.ChangeFilter( repository=repo['repo'], branch='master'), builderNames=['build-artifactsrc-yml'] ), schedulers.ForceScheduler( 'artifactsrc-yml-force', builderNames=['build-artifactsrc-yml'], buttonName='Force' )) for repo in repos_and_packages ])
def initScheduler(self): self.masterConf['schedulers'] = self.shedulers + [ schedulers.AnyBranchScheduler( name='Tester', change_filter=util.ChangeFilter(project_re="qmake-*"), builderNames=['Tester'], properties={ 'clean': True, 'test': True, 'release': False, 'deploy': False, 'Linux': True, 'Windows': True, 'Android': True }, treeStableTimer=None), schedulers.SingleBranchScheduler(name='NPM Deployer', change_filter=util.ChangeFilter( branch='master', project='npm-Chat'), builderNames=['NPM'], properties={}, treeStableTimer=None) ] return self.getMasterConf()
def add_all_schedulers(cfg): for s_name in SERVICES: for branch in get_all_possible_branch_names(): realm = BRANCH_TO_REALM_MAPPING[branch] buildername = f"{s_name}_{realm}" cfg['schedulers'].append( schedulers.ForceScheduler( name=f"force-{s_name}-{realm}", codebases=[ util.CodebaseParameter( "", label="Repository", branch=util.FixedParameter(name="branch", default=branch), revision=util.StringParameter(name="revision", default=""), repository=util.FixedParameter( name="repository", default=SERVICES[s_name]['giturl']), project=util.FixedParameter(name="project", default=s_name), ) ], builderNames=[buildername], )) cfg['schedulers'].append( schedulers.SingleBranchScheduler( name=f"commit-{s_name}-{realm}", builderNames=[buildername], treeStableTimer=0, change_filter=util.ChangeFilter(branch=branch, project=s_name), ))
def schedule(c): force_sched = s_force( name="force-build-" + PROJECT_NAME, cb=FORCE_CODEBASE, builders=[PROJECT_NAME], properties=[ util.NestedParameter(name="options", label="Build Options", layout="vertical", fields=[ util.StringParameter(name="force_feature", label="feature-branch to build:", default="", size=80), util.BooleanParameter(name="force_build_clean", label="force a make clean", default=False), util.BooleanParameter(name="force_complete_rebuild", label="force complete rebuild and fresh git clone", default=False), ]) ] ) c['schedulers'].extend([ s_abranch(PROJECT_NAME + '-sched', CODEBASE, [PROJECT_NAME], change_filter=util.ChangeFilter(branch_fn=trigger_branch_match), treeStableTimer=5 * 60), force_sched, s_trigger('trigger-' + PROJECT_NAME, CODEBASE, [PROJECT_NAME]), ])
def _getBasicSchedulers(props): pretty_branch_name = props['branch_pretty'] branch_cf = util.ChangeFilter(category=None, branch_re=props['git_branch_name']) schedDict = {} schedDict['prs'] = getPullRequestScheduler(props, pretty_branch_name) for build_type in ["Build", "Reports"]: for jdk in common.getJDKBuilds(props): sched = _getAnyBranchScheduler( name=common.getBuildWithJDK(pretty_branch_name, build_type, jdk), change_filter=branch_cf, properties=props, builderNames=[ common.getBuildWithJDK(pretty_branch_name, build_type, jdk), ]) schedDict[build_type + str(jdk)] = sched sched = _getAnyBranchScheduler( name=pretty_branch_name + " Quick Build", change_filter=branch_cf, properties=props, builderNames=[ pretty_branch_name + " Markdown", # pretty_branch_name + " Database Tests" ]) schedDict["markdowndb"] = sched if str(props['package_all']).lower() != 'true': sched = schedulers.Nightly( name=pretty_branch_name + ' Package Generation', change_filter=branch_cf, hour={{nightly_build_hour}}, onlyIfChanged=True, properties=props, builderNames=[ pretty_branch_name + " Debian Packaging", pretty_branch_name + " el7 RPM Packaging", pretty_branch_name + " el8 RPM Packaging" ]) else: defaultJDK = common.getJDKBuilds(props)[0] sched = schedulers.Dependent( name=pretty_branch_name + " Packaging Generation", upstream=schedDict["Build" + str(defaultJDK)], properties=props, builderNames=[ pretty_branch_name + " Debian Packaging", pretty_branch_name + " el7 RPM Packaging", pretty_branch_name + " el8 RPM Packaging" ]) schedDict['package'] = sched return schedDict
def get_schedulers(): return [ schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch='master'), treeStableTimer=None, builderNames=["runtests"]), schedulers.ForceScheduler(name="force", builderNames=["runtests"]) ]
def get_schedulers() : from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.forcesched import StringParameter from buildbot.schedulers.forcesched import FixedParameter from buildbot.plugins import util import builders return [ AnyBranchScheduler( name = 'default', reason = 'main repository source code modification', builderNames = builders.get_builder_names(), treeStableTimer = 20, change_filter = util.ChangeFilter( repository_fn = lambda repository : bool('github.com/SFML/SFML' in repository) ), properties = { 'trigger' : 'internal' } ), AnyBranchScheduler( name = 'foreign', builderNames = builders.get_builder_names(), change_filter = util.ChangeFilter( repository_fn = lambda repository : bool('github.com/SFML/SFML' not in repository) ), properties = { 'trigger' : 'external' } ), ForceScheduler( name = 'force', reason = StringParameter(name = "reason", default = "manual build", size = 100), builderNames = builders.get_builder_names(), branch = StringParameter(name = "branch", default = "master", size = 100), revision = StringParameter(name = "revision", default = "", size = 100), repository = StringParameter(name = "repository", default = "https://github.com/SFML/SFML.git", regex = r"^https://github.com/[\w-]*/[\w-]*\.git$", size = 100), project = StringParameter(name = "project", default = "SFML", size = 100), properties = [ util.FixedParameter(name = "trigger", default = "force") ] ) ]
def getLntSchedulers(): project = "lnt" lnt_builders = [ "publish-lnt-sphinx-docs", ] return [ schedulers.SingleBranchScheduler( name="lnt-scheduler", treeStableTimer=kwargs.get('treeStableTimer', None), reason="Merge to LNT github {} branch".format(filter_branch), builderNames=lnt_builders, change_filter=util.ChangeFilter(project_fn=project, branch=filter_branch)), schedulers.ForceScheduler( name="force-build-scheduler", label="Force Build", buttonName="Force Build", reason=util.ChoiceStringParameter( name="reason", label="reason:", required=True, choices=[ "Build a particular revision", "Force clean build", "Narrow down blamelist", ], default="Build a particular revision"), builderNames=lnt_builders, codebases=[ util.CodebaseParameter( codebase="", branch=util.FixedParameter(name="branch", default=_branch), revision=util.StringParameter(name="revision", label="revision:", size=45, default=''), repository=util.FixedParameter(name="repository", default=_repourl), project=util.FixedParameter(name="project", default=project)) ], properties=[ util.BooleanParameter( name="clean", label="Clean source code and build directory", default=False), util.BooleanParameter(name="clean_obj", label="Clean build directory", default=False) ]), ]
def schedulers_all_projects(c): for project in projects: project_name = project["name"] for i, repo in enumerate(project["repos"]): url = repo["url"] branch = repo["branch"] builder = project["builders"][i] scheduler = schedulers.SingleBranchScheduler( name=builder, # source change hasn't project information change_filter=util.ChangeFilter(repository=url, branch=branch), treeStableTimer=None, builderNames=[builder]) c['schedulers'].append(scheduler)
def get_schedulers(): from buildbot.schedulers.basic import SingleBranchScheduler from buildbot.schedulers.basic import AnyBranchScheduler from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.forcesched import StringParameter from buildbot.schedulers.forcesched import FixedParameter from buildbot.plugins import util import builders return [ SingleBranchScheduler( name='master', reason='main repository source code modification', builderNames=['coverity'], treeStableTimer=20, change_filter=util.ChangeFilter(branch='master')), AnyBranchScheduler(name='default', reason='main repository source code modification', builderNames=builders.get_builder_names(), treeStableTimer=20), ForceScheduler( name='force', reason=StringParameter(name="reason", default="manual build", size=100), builderNames=builders.get_builder_names(), codebases=[ util.CodebaseParameter( "", label="Codebase", branch=StringParameter(name="branch", default="master", size=100), revision=StringParameter(name="revision", default="", size=100), repository=StringParameter( name="repository", default="https://github.com/SFML/SFML.git", regex=r"^https://github.com/[\w-]*/[\w-]*\.git$", size=100), project=StringParameter(name="project", default="SFML", size=100), ) ], properties=[util.FixedParameter(name="trigger", default="force")]) ]
def getPullRequestScheduler(props, pretty_branch_name): builderNames = [ pretty_branch_name + " Pull Request " + build_type + " JDK " + str(jdk) for build_type in ['Build', 'Reports'] for jdk in common.getJDKBuilds(props) ] builderNames.extend([ pretty_branch_name + " Pull Request Markdown", # pretty_branch_name + " Pull Request Database Tests" ]) pull_cf = util.ChangeFilter(category="pull", branch_re=props['git_branch_name']) return _getAnyBranchScheduler(name=pretty_branch_name + " Pull Requests", builderNames=builderNames, change_filter=pull_cf)
def setupSchedulers(self, _schedulers, spawner_name, try_name, deploy_name, importantManager, codebases, dep_properties): filt = dict(repository=self.repository) if self.branch is not None: filt['branch'] = self.branch _schedulers.append(schedulers.AnyBranchScheduler( name=spawner_name, builderNames=[spawner_name], change_filter=util.ChangeFilter(**filt), onlyImportant=True, fileIsImportant=importantManager.fileIsImportant, codebases=codebases, )) _schedulers.append(schedulers.ForceScheduler( name="force" + spawner_name, builderNames=[spawner_name], codebases=self.createCodebaseParams(codebases))) _schedulers.append(schedulers.ForceScheduler( name=deploy_name, builderNames=[deploy_name], codebases=self.createCodebaseParamsForDeploy(codebases), properties=dep_properties))
def builderNames(branch): builders = set() for arch in architecture_testing_list: for toolchain in arch["toolchain"]: builders.add(branch + ':' + arch["name"] + ':' + toolchain["name"]) return list(builders) schedulers = [] for branch in branches_list: schedulers.append( SingleBranchScheduler(name=branch, change_filter=util.ChangeFilter(branch=branch), treeStableTimer=None, builderNames=builderNames(branch))) for arch in architecture_testing_list: for toolchain in arch["toolchain"]: schedulers.append( ForceScheduler(name="Force_%s_%s_%s" % (branch.replace( ".", "_"), arch["name"], toolchain["name"]), builderNames=[ "%s:%s:%s" % (branch, arch["name"], toolchain["name"]) ])) # add a changefilter for the pull requests cf = util.ChangeFilter(category='pull', branch=branch) # but only those that are targeted for that branch cf.checks["prop:github.base.ref"] = cf.checks['branch']
from maxscale.config import constants COMMON_PROPERTIES = [ properties.build_box(), properties.cmake_flags(), properties.build_experimental_features(), properties.backend_database(), properties.database_version(), properties.ci_url(), properties.backend_use_ssl(), properties.use_valgrind(), properties.appendTestRunId(), ] DEFAULT_PROPERTIES = properties.extractDefaultValues(COMMON_PROPERTIES) DEFAULT_PROPERTIES['cmake_flags'] = constants.DEFAULT_DAILY_TEST_CMAKE_FLAGS DEFAULT_PROPERTIES["targetInitMode"] = TargetInitOptions.GENERATE DEFAULT_PROPERTIES["nameInitMode"] = NameInitOptions.GENERATE DEFAULT_PROPERTIES["buildHosts"] = ["bb-host"] CHANGE_SOURCE_SCHEDULER = schedulers.SingleBranchScheduler( name="build_and_test_on_push", change_filter=util.ChangeFilter(project=constants.MAXSCALE_PRODUCT, branch_fn=check_branch_fn), treeStableTimer=5, codebases=constants.MAXSCALE_CODEBASE, builderNames=["build_and_test_parall"], properties=DEFAULT_PROPERTIES) SCHEDULERS = [CHANGE_SOURCE_SCHEDULER]
properties={ "github_repo_owner": "rsyslog", "github_repo_name": "librelp", }, )) lc['schedulers'].append(SingleBranchScheduler( name="github_librelp", change_filter=filter.ChangeFilter( category="pull", project="rsyslog/librelp"), builderNames=[ "librelp codecov" , "librelp freebsd" , "librelp build clang-9" , "librelp build gcc-8"] )) lc['schedulers'].append(ForceScheduler( name="forceall-librelp", builderNames=[ "librelp codecov" , "librelp freebsd" , "librelp build clang-9" , "librelp build gcc-8"] )) # build master commits so that CodeCov has references for all commits lc['schedulers'].append(schedulers.SingleBranchScheduler(name='librelp-master-sched', change_filter=util.ChangeFilter(project='rsyslog/librelp', branch='master'), treeStableTimer=30, # otherwise a PR merge with n commits my start n builders builderNames=["librelp codecov"] ))
def getGlobalSchedulers(self, platforms): ret = list() change_filter = util.ChangeFilter( repository=[self.baseurl, self.giturl], branch=self.branch) # Fetch scheduler (triggered by event source) ret.append( schedulers.SingleBranchScheduler( name=self.names['sch-sb'], change_filter=change_filter, # Wait for 5 minutes before starting build treeStableTimer=300, builderNames=[self.names['bld-fetch']])) # Nightly scheduler (started by time) # It's triggered after regular builds to take note of the last fetched source # Note that build is not started by trigger # We cleanup after it because we just generated a new package if self.nightly is not None: ret.append( schedulers.NightlyTriggerable(name=self.names['sch-nightly'], branch=self.branch, builderNames=[ self.names['bld-nightly'], self.names['bld-clean'] ], hour=self.nightly[0], minute=self.nightly[1], onlyIfChanged=True)) # All compiling builders comp_builders = list(self.names['bld-platform'](p for p in platforms if p.canBuild(self))) # Global build scheduler (triggered by fetch build and nightly build) ret.append( schedulers.Triggerable(name=self.names['sch-build'], builderNames=comp_builders)) # Force schedulers if self.enable_force: ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-fetch'], buttonName=self.names['sch-force-name-fetch'], label=self.names['sch-force-name-fetch'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=[self.names['bld-fetch']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-build'], buttonName=self.names['sch-force-name-build'], label=self.names['sch-force-name-build'], reason=util.StringParameter(name="reason", label="Reason:", required=True, size=80), builderNames=comp_builders, codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="clean", label="Clean", default=False), util.BooleanParameter(name="package", label="Package", default=False), ])) ret.append( schedulers.ForceScheduler( name=self.names['sch-force-id-clean'], buttonName=self.names['sch-force-name-clean'], label=self.names['sch-force-name-clean'], reason=util.StringParameter(name="reason", hide=True), builderNames=[self.names['bld-clean']], codebases=[util.CodebaseParameter(codebase='', hide=True)], properties=[ util.BooleanParameter(name="dry_run", label="Dry run", default=False), ])) return ret
def get(builders): scheds = [] # pull request scheduler scheds.append( schedulers.AnyBranchScheduler( name="gr_pull_request_handler", change_filter=util.ChangeFilter(category='pull', project="gnuradio/gnuradio"), treeStableTimer=None, builderNames=[ b.name for b in builders if "control" in b.tags and "gnuradio" in b.tags and "pull" in b.tags ])) scheds.append( schedulers.AnyBranchScheduler( name="volk_pull_request_handler", change_filter=util.ChangeFilter(category='pull', project="gnuradio/volk"), treeStableTimer=None, builderNames=[ b.name for b in builders if "control" in b.tags and "volk" in b.tags and "pull" in b.tags ])) # push event scheduler def filter_for_push(change): event = change.properties.getProperty("event") project = change.properties.getProperty("project") if event == "push": return True return False scheds.append( schedulers.AnyBranchScheduler( name="commit_push_handler", change_filter=util.ChangeFilter(filter_fn=filter_for_push, project="gnuradio/gnuradio"), treeStableTimer=60, builderNames=[ b.name for b in builders if "control" in b.tags and "push" in b.tags ])) scheds.append( schedulers.ForceScheduler( name="force_pullrequest", builderNames=["pull_request_runner"], properties=[ util.StringParameter(name="github.number", label="GitHub pull request number", default="", size=80), util.StringParameter(name="github.base.ref", label="pull request base branch", default="master", size=80) ], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), branch=util.StringParameter( name="branch", label="pull request branch", default="refs/pull/<PR#>/merge", size=80), revision=util.FixedParameter(name="revision", default="")) ])) scheds.append( schedulers.ForceScheduler( name="force_build", builderNames=["repo_push_runner"], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), ) ])) scheds.append( schedulers.ForceScheduler( name="force_weekly", builderNames=["weekly_runner"], codebases=[ util.CodebaseParameter( "", project=util.FixedParameter(name="project", default="gnuradio/gnuradio"), repository=util.FixedParameter( name="repository", default="https://github.com/gnuradio/gnuradio.git"), branch=util.StringParameter(name="branch", label="test branch", default="master", size=80), revision=util.FixedParameter(name="revision", default="")) ])) scheds.append( schedulers.Nightly(name="timed_weekly", builderNames=["weekly_runner"], codebases={ "": { "repository": "https://github.com/gnuradio/gnuradio.git", "branch": "master", "revision": "None" } }, dayOfWeek=[0, 4], hour=4, minute=0)) scheds.extend([ schedulers.Triggerable(name="trigger_" + b.name.lstrip("build_"), builderNames=[b.name]) for b in builders if "build" in b.tags ]) scheds.extend([ schedulers.Triggerable(name="trigger_" + b.name.lstrip("test_"), builderNames=[b.name]) for b in builders if "test" in b.tags ]) return scheds
c["titleURL"] = config.REPO_URL c["buildbotURL"] = config.BUILDBOT_URL def get_workers(worker_pool): if worker_pool is None: return ALL_WORKERS_NAMES return list(config.WORKERS[worker_pool].keys()) # Create schedulers and builders for builds c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler( name=config.TRIGGER, change_filter=util.ChangeFilter(), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER]) ] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append( schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append( util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory']))
c["titleURL"] = config.REPO_URL c["buildbotURL"] = config.BUILDBOT_URL def get_workers(worker_pool): if worker_pool is None: return ALL_WORKERS_NAMES return list(config.WORKERS[worker_pool].keys()) # Create schedulers and builders for builds c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler( name=config.TRIGGER, change_filter=util.ChangeFilter(category="mediasdk"), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER]) ] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append( schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append( util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory'])) # Push status of build to the Github
def build_config() -> dict[str, Any]: c = {} c["buildbotNetUsageData"] = None # configure a janitor which will delete all logs older than one month, and will run on sundays at noon c['configurators'] = [util.JanitorConfigurator( logHorizon=timedelta(weeks=4), hour=12, dayOfWeek=6 )] c["schedulers"] = [ # build all pushes to master schedulers.SingleBranchScheduler( name="master", change_filter=util.ChangeFilter(branch="master"), builderNames=["nix-eval"], ), # build all pull requests schedulers.SingleBranchScheduler( name="prs", change_filter=util.ChangeFilter(category="pull"), builderNames=["nix-eval"], ), # this is triggered from `nix-eval` schedulers.Triggerable( name="nix-build", builderNames=["nix-build"], ), # allow to manually trigger a nix-build schedulers.ForceScheduler(name="force", builderNames=["nix-eval"]), # allow to manually update flakes schedulers.ForceScheduler( name="update-flake", builderNames=["nix-update-flake"], buttonName="Update flakes", ), # updates flakes once a weeek schedulers.NightlyTriggerable( name="update-flake-weekly", builderNames=["nix-update-flake"], hour=3, minute=0, dayOfWeek=6, ), ] github_api_token = read_secret_file("github-token") c["services"] = [ reporters.GitHubStatusPush( token=github_api_token, # Since we dynamically create build steps, # we use `virtual_builder_name` in the webinterface # so that we distinguish what has beeing build context=Interpolate("buildbot/%(prop:virtual_builder_name)s"), ), # Notify on irc NotifyFailedBuilds("irc://buildbot|[email protected]:6667/#xxx"), ] # Shape of this file: # [ { "name": "<worker-name>", "pass": "******", "cores": "<cpu-cores>" } ] worker_config = json.loads(read_secret_file("github-workers")) credentials = os.environ.get("CREDENTIALS_DIRECTORY", ".") enable_cachix = os.path.isfile(os.path.join(credentials, "cachix-token")) systemd_secrets = secrets.SecretInAFile(dirname=credentials) c["secretsProviders"] = [systemd_secrets] c["workers"] = [] worker_names = [] for item in worker_config: cores = item.get("cores", 0) for i in range(cores): worker_name = f"{item['name']}-{i}" c["workers"].append(worker.Worker(worker_name, item["pass"])) worker_names.append(worker_name) c["builders"] = [ # Since all workers run on the same machine, we only assign one of them to do the evaluation. # This should prevent exessive memory usage. nix_eval_config([worker_names[0]], github_token_secret="github-token"), nix_build_config(worker_names, enable_cachix), nix_update_flake_config( worker_names, "TUM-DSE/doctor-cluster-config", github_token_secret="github-token", ), ] c["www"] = { "port": int(os.environ.get("PORT", "1810")), "auth": util.GitHubAuth( os.environ.get("GITHUB_OAUTH_ID"), read_secret_file("github-oauth-secret") ), "authz": util.Authz( roleMatchers=[ util.RolesFromGroups(groupPrefix="") # so we can match on TUM-DSE ], allowRules=[ util.AnyEndpointMatcher(role="TUM-DSE", defaultDeny=False), util.AnyControlEndpointMatcher(role="TUM-DSE"), ], ), "plugins": dict(waterfall_view={}, console_view={}, grid_view={}), "change_hook_dialects": dict( github={ "secret": read_secret_file("github-webhook-secret"), "strict": True, "token": github_api_token, "github_property_whitelist": "*", } ), } c["db"] = {"db_url": os.environ.get("DB_URL", "sqlite:///state.sqlite")} c["protocols"] = {"pb": {"port": "tcp:9989:interface=\\:\\:"}} c["buildbotURL"] = "https://buildbot.dse.in.tum.de/" return c
c["title"] = config.BUILDBOT_TITLE c["titleURL"] = config.REPO_URL c["buildbotURL"] = config.BUILDBOT_URL def get_workers(worker_pool): if worker_pool is None: return ALL_WORKERS_NAMES return list(config.WORKERS[worker_pool].keys()) c["builders"] = [] c["schedulers"] = [ schedulers.SingleBranchScheduler( name=config.TRIGGER, change_filter=util.ChangeFilter(category="driver"), treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER, builderNames=[config.TRIGGER]) ] for builder_name, properties in config.FLOW.get_prepared_builders().items(): if properties.get('add_triggerable_sheduler', True): c["schedulers"].append( schedulers.Triggerable(name=builder_name, builderNames=[builder_name])) c["builders"].append( util.BuilderConfig(name=builder_name, workernames=get_workers(properties.get("worker")), factory=properties['factory'])) # Push status of build to the Github
def ros_sysbuild(c, job_name, rosdistro, machines, source=True, locks=[]): # Create a Job for system test job_name = sys_name project_name = '_'.join([job_name, rosdistro, 'system_build']) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[ project_name, ], change_filter=util.ChangeFilter(category=project_name))) c['schedulers'].append( schedulers.Nightly( name=project_name + '-nightly-master', builderNames=[ project_name, ], hour=4, minute=0, )) # Directory which will be bind-mounted binddir = '/tmp' rosinstall_url = "https://raw.githubusercontent.com/ipa-rwu/scalable_system_setup/master/config/" + job_name + ".rosinstall" f = BuildFactory() # Remove any old crud in /tmp folder f.addStep( ShellCommand(name='rm src', command=['rm', '-rf', 'scalable_ws'], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/'))) # wstool init src .rosinstall f.addStep( ShellCommand( haltOnFailure=True, name='wstool_rosintall', command=['wstool', 'init', 'src', rosinstall_url], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/scalable_ws'))) # Download Dockerfile_sys.py script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/Dockerfile_sys', workerdest=('Dockerfile_sys'), hideStepIf=success)) # Download docker-compose-sys.py script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/docker-compose-sys.yaml', workerdest=('docker-compose-sys.yaml'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/rosdep_private.yaml', workerdest=('rosdep_private.yaml'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/docker-container.py', workerdest=('docker-container.py'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='shell/uplode_docker_image.sh', workerdest=('upload_docker_image.sh'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/unique_docker_sys.py', workerdest=('unique_docker_sys.py'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='shell/test_sys.sh', workerdest=('test_sys.sh'), mode=0o755, hideStepIf=success)) # reedit docker-compose-deb.yaml f.addStep( ShellCommand(haltOnFailure=True, name=job_name + '-reedit-docker-compose', command=[ 'python', 'unique_docker_sys.py', 'docker-compose-sys.yaml', Interpolate(job_name) ], workdir=Interpolate('%(prop:builddir)s/build/'), descriptionDone=['reedit docker-compose', job_name])) # Build docker image for creating debian f.addStep( ShellCommand(haltOnFailure=True, name=job_name + '-create_docker_image', command=[ 'docker-compose', '-f', 'docker-compose-sys.yaml', 'build' ], workdir=Interpolate('%(prop:builddir)s/build/'), descriptionDone=['sourcedeb', job_name])) # Make and run tests in a docker container f.addStep( ShellCommand(name=job_name + '-test_system', command=[ 'docker', 'run', '--name=' + project_name, 'scalable-sys:' + job_name, 'bash', '/usr/local/sbin/test_sys.sh' ], descriptionDone=['make and test', job_name])) f.addStep( ShellCommand(name=job_name + '-upload_docker_image', command=[ 'bash', 'upload_docker_image.sh', project_name, binddir, job_name ], descriptionDone=['upload_docker_image', job_name], workdir=Interpolate('%(prop:builddir)s/build/'))) f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', project_name], descriptionDone=['remove docker container', job_name])) f.addStep( ShellCommand( name=job_name + '-rm_image', command=['docker', 'image', 'rm', 'scalable-sys:' + job_name], descriptionDone=['remove docker image', job_name])) c['builders'].append( BuilderConfig(name=project_name, workernames=machines, factory=f, locks=locks)) # return the name of the job created return project_name
def getSingleBranchSchedulers(builders, explicitly_set_schedulers=None, **kwargs): """ I'm taking over all of not yet assigned builders with the declared source code dependencies, and automatically generate a minimum set of SingleBranchSchedulers to handle all the declared source code dependency combinations. """ builders_with_explicit_schedulers = set() if explicitly_set_schedulers: # TODO: Make a list of builder names with already set schedulers. # builders_with_explicit_schedulers.add(builder) pass # For the builders created with LLVMBuildFactory or similar, # we always use automatic schedulers, # unless schedulers already explicitly set. builders_with_automatic_schedulers = [ builder for builder in builders if builder.name not in builders_with_explicit_schedulers if getattr(builder.factory, 'depends_on_projects', None) ] filter_branch = 'main' treeStableTimer = kwargs.get('treeStableTimer', None) automatic_schedulers = [] # Do we have any to take care of? if builders_with_automatic_schedulers: # Let's reconsile first to get a unique set of dependencies. # We need a set of unique sets of dependent projects. set_of_dependencies = set([ frozenset(getattr(b.factory, 'depends_on_projects')) for b in builders_with_automatic_schedulers ]) for projects in set_of_dependencies: sch_builders = [ b.name for b in builders_with_automatic_schedulers if frozenset( getattr(b.factory, 'depends_on_projects')) == projects ] automatic_scheduler_name = filter_branch + ":" + ",".join( sorted(projects)) automatic_schedulers.append( schedulers.SingleBranchScheduler( name=automatic_scheduler_name, treeStableTimer=kwargs.get('treeStableTimer', None), reason="Merge to github %s branch" % filter_branch, builderNames=sch_builders, change_filter=util.ChangeFilter( project_fn= \ lambda c, projects_of_interest=frozenset(projects): isProjectOfInterest(c, projects_of_interest), branch=filter_branch) ) ) log.msg( "Generated SingleBranchScheduler: { name='%s'" % automatic_scheduler_name, ", builderNames=", sch_builders, ", change_filter=", projects, " (branch: %s)" % filter_branch, ", treeStableTimer=%s" % treeStableTimer, "}") return automatic_schedulers
print('processing scheduler ' + scheduler_args['name']) #builderNames scheduler_args['builderNames'] = scheduler_to_builders[ scheduler_args['name']] #trigger if get_scheduler_spec(spec, 'type') == 'time': scheduler_args['month'] = get_scheduler_spec(spec, 'month') scheduler_args['dayOfMonth'] = get_scheduler_spec( spec, 'day_of_month') scheduler_args['dayOfWeek'] = get_scheduler_spec( spec, 'day_of_week') scheduler_args['hour'] = get_scheduler_spec(spec, 'hour') scheduler_args['minute'] = get_scheduler_spec(spec, 'minute') scheduler_args['branch'] = 'master' elif get_scheduler_spec(spec, 'type') == 'commit': scheduler_args['change_filter'] = util.ChangeFilter( branch_re=get_scheduler_spec(spec, 'branch_regex')) #codebases x = [global_repo_urls[constructicon_name]] + list(all_deps) if get_scheduler_spec(spec, 'type') == 'force': scheduler_args['codebases'] = [ forcesched.CodebaseParameter(codebase=i) for i in x ] else: scheduler_args['codebases'] = { i: { 'repository': i, 'branch': get_scheduler_spec(spec, 'branches').get(i, 'master'), } for i in x
def ros_testbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, source=True, locks=[]): # Create a Job for Source if source: project_name = '_'.join([job_name, rosdistro, 'source_build']) c['change_source'].append( GitPoller( repourl=url, name=url, branch=branch, category=project_name, pollAtLaunch=True, ) ) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[project_name,], change_filter=util.ChangeFilter(category=project_name) ) ) c['schedulers'].append( schedulers.Nightly( name = project_name+'-nightly-master', codebases = {url:{'repository':url,'branch':'master'}}, builderNames = [project_name,], hour=3, minute=0, ) ) c['schedulers'].append( schedulers.Nightly( name = project_name+'-nightly-develop', codebases = {url:{'repository':url,'branch':'develop'}}, builderNames = [project_name,], hour=5, minute=0, ) ) c['schedulers'].append( schedulers.ForceScheduler( name=project_name+'-force', codebases = [util.CodebaseParameter("", branch=util.ChoiceStringParameter( name="branch", choices=["master", "devel"], default="master"), repository=util.FixedParameter(name="repository", default=url), )], builderNames=[project_name,], ) ) else: r_owner, r_name = (url.split(':')[1])[:-4].split('/') project_name = '_'.join([job_name, rosdistro, 'pr_build']) c['change_source'].append( GitPRPoller( owner=r_owner, repo=r_name, category=project_name, branches=[branch], pollInterval=10*60, pollAtLaunch=True, token=util.Secret("OathToken"), repository_type='ssh' ) ) c['schedulers'].append( schedulers.SingleBranchScheduler( name=project_name, builderNames=[project_name,], change_filter=util.ChangeFilter(category=project_name) ) ) # Directory which will be bind-mounted binddir = '/tmp/'+project_name dockerworkdir = '/tmp/test/' f = BuildFactory() # Remove any old crud in build/src folder f.addStep( ShellCommand( name='rm src', command=['rm', '-rf', 'build/src'], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s') ) ) # Check out repository (to /build/src) f.addStep( Git( repourl=util.Property('repository', default=url), branch=util.Property('branch', default=branch), alwaysUseLatest=True, mode='full', workdir=Interpolate('%(prop:builddir)s/build/src') ) ) # Download testbuild_docker.py script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='scripts/testbuild_docker.py', workerdest=('testbuild_docker.py'), hideStepIf=success ) ) # Download Dockerfile_test script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/Dockerfile_test', workerdest=('Dockerfile_test'), hideStepIf=success ) ) # Download docker-compose.py script from master f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/docker-compose-test.yaml', workerdest=('docker-compose-test.yaml'), hideStepIf=success ) ) f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='docker_components/rosdep_private.yaml', workerdest=('rosdep_private.yaml'), hideStepIf=success ) ) f.addStep( FileDownload( name=job_name+'-grab-script', mastersrc='scripts/docker-container.py', workerdest=('docker-container.py'), hideStepIf=success ) ) # create docker work environment f.addStep( ShellCommand( command=['python','docker-container.py', job_name], hideStepIf=success, workdir=Interpolate('%(prop:builddir)s/build/') ) ) # Make and run tests in a docker container f.addStep( ShellCommand( name=job_name+'-build', command=['docker', 'run', '-v', 'ros-buildbot-docker_deb_repository:/home/package', '--name='+project_name, 'scalable-env:'+job_name, 'python', '/tmp/build/testbuild_docker.py', binddir, rosdistro], descriptionDone=['make and test', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-copytestresults', command=['docker', 'cp', project_name + ':' +binddir + '/testresults', 'testresults'], logfiles={'tests': 'testresults'}, descriptionDone=['testresults', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-rm_container', command=['docker', 'rm', project_name], descriptionDone=['remove docker container', job_name] ) ) f.addStep( ShellCommand( name=job_name+'-rm_image', command=['docker', 'image', 'rm', 'scalable-env:'+job_name], descriptionDone=['remove docker image', job_name] ) ) c['builders'].append( BuilderConfig( name=project_name, workernames=machines, factory=f, locks=locks ) ) # return the name of the job created return project_name
def make_config(worker_name, worker_password, worker_port, git_repo, branch, poll_interval, builder_name, project_name, project_url, buildbot_url, buildbot_web_port, buildbot_from_email): return { 'workers': [worker.Worker(worker_name, worker_password)], 'protocols': { 'pb': { 'port': worker_port } }, 'change_source': [ changes.GitPoller( git_repo, workdir='gitpoller-workdir', branch=branch, pollinterval=poll_interval, ), ], 'schedulers': [ schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(branch=branch), treeStableTimer=poll_interval, builderNames=[builder_name], ), schedulers.ForceScheduler( name="force", builderNames=[builder_name], ), ], 'builders': [ util.BuilderConfig( name=builder_name, workernames=[worker_name], factory=util.BuildFactory([ # check out the source steps.Git(repourl=git_repo, mode='incremental'), # run the tests steps.ShellCommand(command=[ "direnv", "allow", ".", ], ), steps.ShellCommand( command=[ "direnv", "exec", ".", "make", "check", ], env={ 'NIX_REMOTE': 'daemon', }, # If we have to rebuild our dependencies from scratch, # we can go a long time without receiving output from # the compiler. Default timeout is 20 mins, bump to # 1hr. timeout=60 * 60, ), ]), ), ], 'status': [], 'title': project_name, 'titleURL': project_url, 'buildbotURL': buildbot_url, 'www': { 'port': buildbot_web_port, 'plugins': { 'waterfall_view': {}, }, }, 'db': { 'db_url': "sqlite:///state.sqlite", }, 'services': [ reporters.MailNotifier( fromaddr=buildbot_from_email, # TODO(jml): Currently sending mail for all builds. We should # send mail under fewer circumstances once we have a better # idea about what we actually want. # # http://buildbot.readthedocs.io/en/latest/manual/cfg-reporters.html?highlight=github#mailnotifier-arguments mode='all', # XXX: Temporarily hard-code until we can figure out how to # get these automatically from commits. extraRecipients=[ "*****@*****.**", "*****@*****.**", ], ) ], }
def getPushChangeFilter(self): filt = dict(repository=self.repository) if self.branch is not None: filt['branch'] = self.branch return util.ChangeFilter(**filt)
def getTryChangeFilter(self): filt = dict(repository=self.repository) filt['category'] = 'pull' return util.ChangeFilter(**filt)
####### BUILDER NAMES lingo_builder = "lingotests (D4)" builder_names = [lingo_builder] builder_names.extend(target.builder_name for target in test_targets) force_builder_names = ["build", *builder_names] ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build build_scheduler = schedulers.SingleBranchScheduler( name="all", change_filter=util.ChangeFilter(repository="https://github.com/scummvm/scummvm"), treeStableTimer=5, fileIsImportant=file_is_director_related, builderNames=["build"], ) director_scheduler = schedulers.Triggerable( name="Director Tests", builderNames=builder_names ) force_scheduler = schedulers.ForceScheduler( name="force", builderNames=force_builder_names ) c["schedulers"] = [] c["schedulers"].append(build_scheduler)
def getSchedulers(props): pretty_branch_name = props['branch_pretty'] sched_dict = _getBasicSchedulers(props) scheduler_list = list(sched_dict.values()) if props['deploy_env']: scheduler_list.append( schedulers.Dependent( name=pretty_branch_name + " Ansible Deploy", upstream=sched_dict['package'], properties=props, builderNames=[pretty_branch_name + " Ansible Deploy"])) forceBuildNames = [ common.getBuildWithJDK(pretty_branch_name, "Build", jdk) for jdk in common.getJDKBuilds(props) ] forceBuild = _getForceScheduler(props, "ForceBuild", forceBuildNames) scheduler_list.append(forceBuild) tag_sched = schedulers.AnyBranchScheduler( name=pretty_branch_name + " Release", #Note: The branch_regex here is matching something like "11.1", so we use the major version (11), plus a static .* change_filter=util.ChangeFilter(category='tag', branch_re=props['pkg_major_version'] + ".*"), properties=props, builderNames=[pretty_branch_name + " Release"]) scheduler_list.append(tag_sched) forceBuilders = [ common.getBuildWithJDK(pretty_branch_name, "Reports", jdk) for jdk in common.getJDKBuilds(props, pretty_branch_name) ] forceBuilders.extend([ pretty_branch_name + " Markdown", # pretty_branch_name + " Database Tests", pretty_branch_name + " Debian Packaging", pretty_branch_name + " el7 RPM Packaging", pretty_branch_name + " el8 RPM Packaging" ]) if props['deploy_env']: forceBuilders.append(pretty_branch_name + " Ansible Deploy") forceOther = _getForceScheduler(props, "ForceBuildOther", forceBuilders) scheduler_list.append(forceOther) if props['package_all']: forcePackage = schedulers.Dependent( name=pretty_branch_name + " Force Packaging Generation", upstream=forceBuild, properties=props, builderNames=[ pretty_branch_name + " Debian Packaging", pretty_branch_name + " el7 RPM Packaging", pretty_branch_name + " el8 RPM Packaging" ]) scheduler_list.append(forcePackage) if props['deploy_env']: forceAnsible = schedulers.Dependent( name=pretty_branch_name + " Force Ansible Deploy", upstream=forcePackage, properties=props, builderNames=[pretty_branch_name + " Ansible Deploy"]) scheduler_list.append(forceAnsible) return scheduler_list