def getClone(): return steps.GitHub( repourl="{{ source_repo_url }}", mode='full', method='fresh', haltOnFailure=True, flunkOnFailure=True, name="Clone/Checkout")
def build_volk_PR(): create_src = steps.MakeDirectory(name="create src directory", dir="volk") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="volk") rm_src_dir = steps.RemoveDirectory( dir=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s")), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) copy_src = steps.CopyDirectory( name="copy src to srcdir", src="volk", dest=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s"), ), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "volk_builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_dir": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s")) }, test_merge=False, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addStep(trigger_builds) return factory
def nix_eval_config( worker_names: list[str], github_token_secret: str ) -> util.BuilderConfig: """ Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel. For each evaluated attribute a new build pipeline is started. If all builds succeed and the build was for a PR opened by the flake update bot, this PR is merged. """ factory = util.BuildFactory() # check out the source factory.addStep( steps.GitHub( repourl=util.Property("repository"), method="clean", submodules=True ) ) factory.addStep( NixEvalCommand( env={}, name="Eval flake", command=[ "nix", "run", "github:nix-community/nix-eval-jobs", "--", "--workers", "8", "--gc-roots-dir", # FIXME: don't hardcode this "/var/lib/buildbot-worker/gcroot", "--flake", ".#hydraJobs", ], haltOnFailure=True, ) ) # Merge flake-update pull requests if CI succeeds factory.addStep( MergePr( name="Merge pull-request", env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)), github_token_secret=util.Secret(github_token_secret), base_branches=["master"], owners=["doctor-cluster-bot"], command=[ "gh", "pr", "merge", "--repo", util.Property("project"), "--rebase", util.Property("pullrequesturl"), ], ) ) return util.BuilderConfig( name="nix-eval", workernames=worker_names, factory=factory, properties=dict(virtual_builder_name="nix-eval"), )
def build_PR(): create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub( name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="src") rm_src_archive = steps.ShellCommand( name="remove old source archive", command=[ "rm", "-rf", util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s.tar.xz")) ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS) create_src_archive = steps.ShellCommand( name="create source archive", command=[ "tar", "cJf", util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s.tar.xz")), "." ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_archive": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s.tar.xz")) }, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_archive) factory.addStep(create_src_archive) factory.addStep(trigger_builds) return factory
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=is_not_configured)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), package_directory=Property("package_directory", None), strip_binaries=Property("package_strip_binaries", None), env=compilation_environment, doStepIf=should_package)) source_path = Property("package_filename") target_path = Interpolate("%s%%(prop:package_filename)s" % snapshots_dir) target_url = Interpolate("%s%%(prop:package_filename)s" % snapshots_url) # This is not an ideal target link calculation since the archive format # in package_filename might be fixed up by the Package step, but here # only None is converted into tar.xz, which is not exactly the same target_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) builder.addStep( CleaningFileUpload(name="publish", workersrc=source_path, masterdest=target_path, url=target_url, clean=True, doStepIf=should_package)) builder.addStep( steps.MasterShellCommand( name="update latest archive", command=["ln", "-sf", target_path, target_link], logEnviron=False, doStepIf=should_package)) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=snapshots_dir, file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), doStepIf=should_package)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("exclusive")])
def build_weekly(): create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, clobberOnFailure=True, getDescription=True, workdir="src") rm_src_dir = steps.RemoveDirectory(dir=util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s"))) copy_src = steps.CopyDirectory(name="copy src to srcdir", src="src", dest=util.Interpolate( os.path.join( _WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s"), )) set_merge_property = steps.SetProperty( property=util.Interpolate("merge_%(prop:branch)s"), value=True, hideStepIf=True, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) # now we have all necessary merge properties together, # we can actually kickoff builds for them trigger_builds = custom_steps.BuildTrigger( name="trigger all builders", build_config=build_config, schedulerNames=["trigger"], runner="time", set_properties={ "src_dir": util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s")), "got_revision": util.Property("got_revision"), }, updateSourceStamp=True, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addStep(set_merge_property) factory.addStep(trigger_builds) return factory
def build_weekly(): create_src = steps.MakeDirectory( name="create src directory", dir="src") clone_step = steps.GitHub( name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, clobberOnFailure=True, getDescription=True, workdir="src") rm_src_archive = steps.ShellCommand( name="remove old source archive", command=[ "rm", "-rf", util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s.tar.xz")) ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS) create_src_archive = steps.ShellCommand( name="create source archive", command=[ "tar", "cJf", util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s.tar.xz")), "." ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) # now we have all necessary merge properties together, # we can actually kickoff builds for them trigger_builds = custom_steps.BuildTrigger( name="trigger all builders", build_config=build_config, schedulerNames=["trigger"], runner="time", set_properties={ "src_archive": util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s","%(prop:commit-description)s.tar.xz")), "got_revision": util.Property("got_revision"), }, updateSourceStamp=True, waitForFinish=True ) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_archive) factory.addStep(create_src_archive) factory.addStep(trigger_builds) return factory
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=ConfigChecker().needs_configuration)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" snapshots_dir = "%s%%(prop:branch)s/" % snapshots_dir snapshots_url = "%s%%(prop:branch)s/" % snapshots_url builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_files=Property("package_files", None), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), split_debug_package=Property("split_debug_package", True), extra_files=Property("package_extra_files", None), package_script=Interpolate(config.get( "package_script", "")), env=compilation_environment, doStepIf=should_package)) latest_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="archive", property_name="package_filename", latest_link=latest_link, do_step_if=should_package) latest_link = Interpolate("%s%%(prop:buildername)s" "-latest-debug-symbols.tar.xz" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="debug archive", property_name="debug_package_filename", latest_link=latest_link, do_step_if=should_package_debug) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=Interpolate(snapshots_dir), file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), secondary_file_suffix="-debug-symbols", file_extensions=r"\.(?:tar(?:\.[xg]z)?|[a-z]{3,4})$", doStepIf=should_package, hideStepIf=True)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("counting")])
"""Build Factory to configure, compile and build the scummvm binary.""" from typing import Dict, Any import os.path from buildbot.plugins import steps, util default_step_kwargs: Dict[str, Any] = {"logEnviron": False} build_factory = util.BuildFactory() # check out the source checkout_step = steps.GitHub( repourl="git://github.com/scummvm/scummvm.git", mode="incremental", **default_step_kwargs, ) build_factory.addStep(checkout_step) # run the tests (note that this will require that 'trial' is installed) build_factory.addStep( steps.Configure( command=[ "./configure", "--disable-all-engines", "--enable-engine=director", ], env={"CXX": "ccache g++"}, **default_step_kwargs, )) build_factory.addStep(steps.Compile(command=["make"], **default_step_kwargs))
def build_PR(): # @util.renderer # def check_mergeable(props): # mergeable = props.getProperty("github.mergeable", False) # return mergeable # check_mergeables = steps.Assert( # check_mergeable, # name="check if PR was mergeable", # haltOnFailure=True # ) create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="src") set_merge_property = steps.SetProperty( name="set merge property", property=util.Interpolate("merge_%(prop:github.base.ref)s"), value=True, hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) create_merge_branch = steps.ShellCommand( name="create test_merge branch for further steps", command=[ "git", "branch", "-f", util.Interpolate("test_merge_%(prop:github.base.ref)s") ], workdir="src") rm_src_dir = steps.RemoveDirectory( dir=util.Interpolate( os.path.join(_BASEPATH, "pull", "%(prop:github.number)s", "%(prop:github.base.ref)s")), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) copy_src = steps.CopyDirectory( name="copy src to srcdir", src="src", dest=util.Interpolate( os.path.join(_BASEPATH, "pull", "%(prop:github.number)s", "%(prop:github.base.ref)s"), ), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) master_steps = sequences.mergeability_sequence("master", "maint", _PULL_SRC_BASE) next_steps = sequences.mergeability_sequence("next", "master", _PULL_SRC_BASE) python3_steps = sequences.mergeability_sequence("python3", "next", _PULL_SRC_BASE) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_dir": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s")) }, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(set_merge_property) factory.addStep(create_merge_branch) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addSteps(master_steps) factory.addSteps(next_steps) factory.addSteps(python3_steps) factory.addStep(trigger_builds) return factory