def initTargetProperty(): """ Sets 'target' property of the build to: - <branch>-buildbot-<starttime> if it isn't set yet or property 'targetInitMode' is TargetInitOptions.GENERATE; - <branch> if property 'targetInitMode' is TargetInitOptions.SET_FROM_BRANCH. :return: list of steps """ return [ steps.SetProperty( name=util.Interpolate("Set 'target' property"), property="target", value=util.Interpolate( "%(prop:branch)s-buildbot-%(kw:startTime)s", startTime=getFormattedDateTime("%Y-%b-%d-%H-%M-%S")), doStepIf=lambda step: step.build.getProperty('target') is None and step.build.getProperty('targetInitMode') is None or step.build. getProperty('targetInitMode') == TargetInitOptions.GENERATE, hideStepIf=lambda results, s: results == SKIPPED), steps.SetProperty( name=util.Interpolate("Set 'target' property"), property="target", value=util.Property("branch"), doStepIf=lambda step: step.build.getProperty( 'targetInitMode') == TargetInitOptions.SET_FROM_BRANCH, hideStepIf=lambda results, s: results == SKIPPED) ]
def setLocale(): return steps.SetProperty( property="LANG", value=random.choice(["en_US.utf8", "de_DE.utf8", "es_ES.utf8", "fr_FR.utf8"]), flunkOnFailure=True, haltOnFailure=True, name="Generate locale for testing")
def getShortBuildRevision(): return steps.SetProperty( property="short_revision", value=_getShortBuildRevision, flunkOnFailure=True, haltOnFailure=True, name="Get build tarball short revision")
def createBuildSteps(): buildSteps = [] buildSteps.extend(common.configureMdbciVmPathProperty()) buildSteps.append( steps.SetProperty( name="Set mdbci configuration path", property="mdbciConfig", value=util.Interpolate("%(prop:MDBCI_VM_PATH)s/%(prop:name)s"))) buildSteps.extend(common.destroyVirtualMachine()) return buildSteps
def configureMdbciVmPathProperty(): """Configure the MDBCI_VM_PATH property""" buildSteps = getWorkerHomeDirectory() configureMdbciProperty = steps.SetProperty( name="Set MDBCI_VM_PATH property to $HOME/vms", property="MDBCI_VM_PATH", value=util.Interpolate("%(prop:HOME)s/vms")) buildSteps.append(configureMdbciProperty) return buildSteps
def setTimezone(): offsetHour = random.randint(-12, 14) offsetMin = random.choice(["00", "15", "30", "45"]).zfill(2) if offsetHour >= 0: tz = "UTC+" + str(offsetHour).zfill(2) + ":" + offsetMin else: tz = "UTC" + str(offsetHour).zfill(2) + ":" + offsetMin return steps.SetProperty( property="TZ", value=tz, flunkOnFailure=True, haltOnFailure=True, name="Generate timezone offset for testing")
def initNameProperty(): """ Sets 'name' property of the build to: - <branch>-buildbot-<starttime> if it isn't set yet or property 'nameInitMode' is NameInitOptions.GENERATE; - <name> if property 'nameInitMode' is NameInitOptions.KEEP_ORIGINAL. :return: list of steps """ return [ steps.SetProperty( name=util.Interpolate("Set 'name' property"), property="name", value=util.Interpolate( "%(prop:branch)s-buildbot-%(kw:startTime)s", startTime=getFormattedDateTime("%Y-%b-%d-%H-%M-%S")), doStepIf=lambda step: step.build.getProperty('name') is None and step.build.getProperty('nameInitMode') is None or step.build. getProperty('nameInitMode') == NameInitOptions.GENERATE, hideStepIf=lambda results, s: results == SKIPPED) ]
def createRunTestSnapshotSteps(): testSnapshotSteps = [] # run_test_snapshot.sh script does not take 'name' argument, but instead defines its own # which consists of environmental variables ${box}-${product}-${version}-permanent. # This step overwrites property 'name' which is set from scheduler to match 'name' in script testSnapshotSteps.append( steps.SetProperty( property="name", value=util.Interpolate( "%(prop:box)s-%(prop:product)s-%(prop:version)s-permanent"))) testSnapshotSteps.extend(common.configureMdbciVmPathProperty()) testSnapshotSteps.extend(common.cloneRepository()) testSnapshotSteps.append( steps.SetProperties(properties=run_test.configureCommonProperties)) testSnapshotSteps.extend(common.remoteRunScriptAndLog()) testSnapshotSteps.extend(common.parseCtestLog()) testSnapshotSteps.extend(common.findCoredump()) testSnapshotSteps.extend(common.writeBuildsResults()) testSnapshotSteps.extend(common.showTestResult(alwaysRun=True)) testSnapshotSteps.extend(common.removeSnapshotLock()) testSnapshotSteps.extend(common.removeLock()) testSnapshotSteps.extend(common.cleanBuildDir()) return testSnapshotSteps
def __init__(self, pkgbuilddir: str, group: str, pkg_base: str, properties: dict): super().__init__() gpg_sign = properties["gpg_sign"] sshdir = properties["sshdir"] workdir = f"{pkgbuilddir}/{group}/{pkg_base}" if group in ("community", "packages"): workdir += "/trunk" # set initial properties self.addStep( steps.SetProperties(name="set properties from srcinfo", properties=properties)) # find dependencies depends = properties["depends"] if depends is not None: for depends_name in depends: self.addSteps([ steps.SetProperty( name=f"set depends_name to {depends_name}", property="depends_name", value=depends_name, hideStepIf=True, ), FindDependency(name=f"find {depends_name}"), ]) # download build files self.addStep( steps.FileDownload( name="download PKGBUILD", mastersrc=f"{workdir}/PKGBUILD", workerdest="PKGBUILD", )) for src_name in properties["src_names"]: self.addStep( steps.FileDownload( name=f"download {src_name}", mastersrc=f"{workdir}/{src_name}", workerdest=src_name, )) install = properties["install"] if install: self.addStep( steps.FileDownload( name=f"download {install}", mastersrc=f"{workdir}/{install}", workerdest=install, )) # update pkgver, pkgrel self.addSteps([SetPkgver(), SetPkgrel(), Updpkgsums()]) # update git tag revision if properties["git_tag"]: self.addStep(SetTagRevision()) # update git commit revision if properties["git_revision"]: self.addStep(SetCommitRevision()) # build self.addStep(ArchBuild()) # update properties self.addSteps([ Srcinfo(), steps.FileUpload( name="upload updated PKGBUILD", workersrc="PKGBUILD", masterdest=f"{workdir}/PKGBUILD", ), steps.FileUpload( name="upload updated .SRCINFO", workersrc=".SRCINFO", masterdest=f"{workdir}/.SRCINFO", ), steps.SetProperties( name="refresh properties from srcinfo", properties=ArchBuildUtil.srcinfo, ), ]) # upload and optionally sign packages for pkg_name in properties["pkg_names"]: self.addSteps([ steps.SetProperty( name=f"set pkg_name to {pkg_name}", property="pkg_name", value=pkg_name, hideStepIf=True, ), steps.FileUpload( name=f"upload {pkg_name}", workersrc=ArchBuildUtil.pkg, masterdest=ArchBuildUtil.pkg_masterdest, ), MovePackage(name=f"move {pkg_name}"), ]) if gpg_sign: self.addSteps([ GpgSign(name=f"sign {pkg_name}"), steps.FileDownload( name=f"download {pkg_name} sig", mastersrc=ArchBuildUtil.sig_mastersrc, workerdest=ArchBuildUtil.sig_workerdest, ), ]) # update repository self.addStep(RepoAdd(name=f"add {pkg_name}")) # synchronize repository if sshdir: self.addStep(CreateSshfsDirectory()) self.addStep(MountPkgbuildCom(env=ArchBuildUtil.ssh_agent)) self.addStep(RepoSync(env=ArchBuildUtil.ssh_agent)) self.addStep(UnmountPkgbuildCom()) # cleanup self.addStep(Cleanup())
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=is_not_configured)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), package_directory=Property("package_directory", None), strip_binaries=Property("package_strip_binaries", None), env=compilation_environment, doStepIf=should_package)) source_path = Property("package_filename") target_path = Interpolate("%s%%(prop:package_filename)s" % snapshots_dir) target_url = Interpolate("%s%%(prop:package_filename)s" % snapshots_url) # This is not an ideal target link calculation since the archive format # in package_filename might be fixed up by the Package step, but here # only None is converted into tar.xz, which is not exactly the same target_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) builder.addStep( CleaningFileUpload(name="publish", workersrc=source_path, masterdest=target_path, url=target_url, clean=True, doStepIf=should_package)) builder.addStep( steps.MasterShellCommand( name="update latest archive", command=["ln", "-sf", target_path, target_link], logEnviron=False, doStepIf=should_package)) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=snapshots_dir, file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), doStepIf=should_package)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("exclusive")])
def build_weekly(): create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, clobberOnFailure=True, getDescription=True, workdir="src") rm_src_dir = steps.RemoveDirectory(dir=util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s"))) copy_src = steps.CopyDirectory(name="copy src to srcdir", src="src", dest=util.Interpolate( os.path.join( _WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s"), )) set_merge_property = steps.SetProperty( property=util.Interpolate("merge_%(prop:branch)s"), value=True, hideStepIf=True, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) # now we have all necessary merge properties together, # we can actually kickoff builds for them trigger_builds = custom_steps.BuildTrigger( name="trigger all builders", build_config=build_config, schedulerNames=["trigger"], runner="time", set_properties={ "src_dir": util.Interpolate( os.path.join(_WEEKLY_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s")), "got_revision": util.Property("got_revision"), }, updateSourceStamp=True, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addStep(set_merge_property) factory.addStep(trigger_builds) return factory
def getBuildPipeline(): rpmsClone = steps.Git( repourl="{{ source_rpm_repo_url }}", branch=util.Interpolate("%(prop:rpmspec_override:-%(prop:branch)s)s"), alwaysUseLatest=True, shallow=True, mode="full", method="clobber", flunkOnFailure=True, haltOnFailure=True, name="Cloning rpm packaging configs") rpmsVersion = steps.SetPropertyFromCommand( command="git rev-parse HEAD", property="rpm_script_rev", flunkOnFailure=True, warnOnFailure=True, haltOnFailure=True, workdir="build", name="Get rpm script revision") rpmsFullVersion = steps.SetProperty( property="rpm_version", value=util.Interpolate("%(prop:pkg_major_version)s.git%(prop:short_revision)s-%(prop:buildnumber)s")) rpmsSetup = common.shellSequence( commands=[ common.shellArg( # We're using a string here rather than an arg array since we need the shell functions command='echo -e "%_topdir `pwd`" > ~/.rpmmacros', logname="rpmdev-setup"), ], workdir="build/rpmbuild", name="Fetch built artifacts and build prep") rpmsFetch = common.syncAWS( pathFrom="s3://{{ s3_public_bucket }}/builds/{{ builds_fragment }}", pathTo="rpmbuild/SOURCES", name="Fetch build from S3") rpmsPrep = common.shellSequence( commands=[ common.shellArg( command=[ 'sed', '-i', util.Interpolate('s/define srcversion .*$/define srcversion %(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/g'), util.Interpolate('opencast.spec') ], logname='version'), common.shellArg( command=[ 'rpmdev-bumpspec', '-u', '"Buildbot <*****@*****.**>"', '-c', util.Interpolate( 'Opencast revision %(prop:got_revision)s, packaged with RPM scripts version %(prop:rpm_script_rev)s' ), util.Interpolate('opencast.spec') ], logname='rpmdev-bumpspec'), common.shellArg( command=[ 'sed', '-i', util.Interpolate("s/\(Version: *\) .*/\\1 %(prop:pkg_major_version)s.git%(prop:short_revision)s/"), util.Interpolate('opencast.spec') ], logname='version'), common.shellArg( command=[ 'sed', '-i', util.Interpolate('s/2%%{?dist}/%(prop:buildnumber)s%%{?dist}/g'), util.Interpolate('opencast.spec') ], logname='buildnumber'), common.shellArg( command=['rm', '-f', 'BUILD/opencast/build/revision.txt'], logname="cleanup") ], workdir="build/rpmbuild/SPECS", name="Prepping rpms") rpmsBuild = common.shellSequence( commands=getRPMBuilds, workdir="build/rpmbuild/SPECS", name="Build rpms") # Note: We're using a string here because using the array disables shell globbing! rpmsUpload = common.syncAWS( pathFrom="rpmbuild/RPMS/noarch", pathTo="s3://{{ s3_public_bucket }}/repo/rpms/unstable/el/%(prop:el_version)s/noarch/", name="Upload rpms to S3") rpmsPrune = common.shellCommand( command=util.Interpolate("ls -t /builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch | grep allinone | tail -n +6 | cut -f 4 -d '-' | while read version; do rm -f /builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch/*$version; done"), name=util.Interpolate("Pruning %(prop:pkg_major_version)s unstable repository")) repoMetadata = common.shellCommand( command=['createrepo', '.'], workdir=util.Interpolate("/builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch"), name="Building repository") f_package_rpms = util.BuildFactory() f_package_rpms.addStep(common.getPreflightChecks()) f_package_rpms.addStep(rpmsClone) f_package_rpms.addStep(rpmsVersion) f_package_rpms.addStep(common.getLatestBuildRevision()) f_package_rpms.addStep(common.getShortBuildRevision()) f_package_rpms.addStep(rpmsFullVersion) f_package_rpms.addStep(rpmsSetup) f_package_rpms.addStep(rpmsFetch) f_package_rpms.addStep(rpmsPrep) f_package_rpms.addStep(common.loadSigningKey()) f_package_rpms.addStep(rpmsBuild) f_package_rpms.addStep(common.unloadSigningKey()) f_package_rpms.addStep(rpmsUpload) f_package_rpms.addStep(common.deployS3fsSecrets()) f_package_rpms.addStep(common.mountS3fs()) f_package_rpms.addStep(rpmsPrune) f_package_rpms.addStep(repoMetadata) f_package_rpms.addStep(common.unmountS3fs()) f_package_rpms.addStep(common.cleanupS3Secrets()) f_package_rpms.addStep(common.getClean()) return f_package_rpms
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=ConfigChecker().needs_configuration)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" snapshots_dir = "%s%%(prop:branch)s/" % snapshots_dir snapshots_url = "%s%%(prop:branch)s/" % snapshots_url builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_files=Property("package_files", None), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), split_debug_package=Property("split_debug_package", True), extra_files=Property("package_extra_files", None), package_script=Interpolate(config.get( "package_script", "")), env=compilation_environment, doStepIf=should_package)) latest_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="archive", property_name="package_filename", latest_link=latest_link, do_step_if=should_package) latest_link = Interpolate("%s%%(prop:buildername)s" "-latest-debug-symbols.tar.xz" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="debug archive", property_name="debug_package_filename", latest_link=latest_link, do_step_if=should_package_debug) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=Interpolate(snapshots_dir), file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), secondary_file_suffix="-debug-symbols", file_extensions=r"\.(?:tar(?:\.[xg]z)?|[a-z]{3,4})$", doStepIf=should_package, hideStepIf=True)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("counting")])
def build_push(): create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, clobberOnFailure=True, getDescription=True, workdir="src") rm_src_archive = steps.ShellCommand( name="remove old source archive", command=[ "rm", "-rf", util.Interpolate( os.path.join(_PUSH_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s.tar.xz")) ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS) create_src_archive = steps.ShellCommand( name="create source archive", command=[ "tar", "cJf", util.Interpolate( os.path.join(_PUSH_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s.tar.xz")), "." ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) set_merge_property = steps.SetProperty( property=util.Interpolate("merge_%(prop:branch)s"), value=True, hideStepIf=True, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) # now we have all necessary merge properties together, # we can actually kickoff builds for them trigger_builds = custom_steps.BuildTrigger( name="trigger all builders", build_config=build_config, schedulerNames=["trigger"], runner="push", set_properties={ "src_archive": util.Interpolate( os.path.join(_PUSH_SRC_BASE, "%(prop:branch)s", "%(prop:commit-description)s.tar.xz")) }, updateSourceStamp=True, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_archive) factory.addStep(create_src_archive) factory.addStep(set_merge_property) factory.addStep(trigger_builds) return factory
def mergeability_sequence(branch, prev_branch, src_base): def condition(step): return step.getProperty("merge_"+prev_branch, False) fetch_base = steps.ShellCommand( name="fetch "+branch+" from upstream", command=[ "git", "fetch", "-t", util.Property("repository"), branch ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, doStepIf=condition ) create_new_worktree = steps.ShellCommand( name="checkout test_merge_"+branch+" branch", command=[ "git", "checkout", "-B", "test_merge_"+branch, "FETCH_HEAD" ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED, doStepIf=condition ) merge_new_changes = steps.ShellCommand( name="merge test_merge_"+prev_branch+" into test_merge_"+branch, command=[ "git", "merge", "--no-edit", "test_merge_"+prev_branch ], flunkOnFailure=False, workdir="src", hideStepIf=lambda results, s: results == SKIPPED, doStepIf=condition ) @util.renderer def set_merge_prop(step): if step.build.executedSteps[-2].results == SUCCESS: return True return False set_merge_property = steps.SetProperty( name="Set merge_"+branch+" to True if merge was successful", property="merge_"+branch, value=set_merge_prop, hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, doStepIf=condition) sync_submodule = steps.ShellCommand( name="sync submodules on test_merge_"+branch, command=[ "git", "submodule", "update", "--init", "--recursive" ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, doStepIf=condition ) make_src_base = steps.ShellCommand( name="create src_base directory for test_merge_"+branch, command=[ "mkdir", "-p", util.Interpolate( os.path.join(src_base, "%(prop:github.number)s", branch)) ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, doStepIf=condition ) copy_src = steps.ShellCommand( name="copy test_merge_"+branch+" src dir to common src dir", command=[ "rsync", "--delete", "-rLz", "./", util.Interpolate( os.path.join(src_base, "%(prop:github.number)s", branch)) ], workdir="src", hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, doStepIf=condition, ) result = [] result.append(fetch_base) result.append(create_new_worktree) result.append(merge_new_changes) result.append(set_merge_property) result.append(sync_submodule) result.append(make_src_base) result.append(copy_src) return result
def build_PR(): # @util.renderer # def check_mergeable(props): # mergeable = props.getProperty("github.mergeable", False) # return mergeable # check_mergeables = steps.Assert( # check_mergeable, # name="check if PR was mergeable", # haltOnFailure=True # ) create_src = steps.MakeDirectory(name="create src directory", dir="src") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="src") set_merge_property = steps.SetProperty( name="set merge property", property=util.Interpolate("merge_%(prop:github.base.ref)s"), value=True, hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) create_merge_branch = steps.ShellCommand( name="create test_merge branch for further steps", command=[ "git", "branch", "-f", util.Interpolate("test_merge_%(prop:github.base.ref)s") ], workdir="src") rm_src_dir = steps.RemoveDirectory( dir=util.Interpolate( os.path.join(_BASEPATH, "pull", "%(prop:github.number)s", "%(prop:github.base.ref)s")), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) copy_src = steps.CopyDirectory( name="copy src to srcdir", src="src", dest=util.Interpolate( os.path.join(_BASEPATH, "pull", "%(prop:github.number)s", "%(prop:github.base.ref)s"), ), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) master_steps = sequences.mergeability_sequence("master", "maint", _PULL_SRC_BASE) next_steps = sequences.mergeability_sequence("next", "master", _PULL_SRC_BASE) python3_steps = sequences.mergeability_sequence("python3", "next", _PULL_SRC_BASE) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_dir": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s")) }, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(set_merge_property) factory.addStep(create_merge_branch) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addSteps(master_steps) factory.addSteps(next_steps) factory.addSteps(python3_steps) factory.addStep(trigger_builds) return factory