def determineBuildId(): return steps.SetProperties( name="Automatically set buildId property", doStepIf=shouldGenerateBuildId, properties=generateBuildId, )
def determineTestRunName(): return steps.SetProperties( name="Add test run id to the name of the build", doStepIf=shouldAppendTestRunId, properties=generateTestRunId, )
def __init__(self, pkgbuilddir: str, group: str, pkg_base: str, properties: dict): super().__init__() gpg_sign = properties["gpg_sign"] sshdir = properties["sshdir"] workdir = f"{pkgbuilddir}/{group}/{pkg_base}" if group in ("community", "packages"): workdir += "/trunk" # set initial properties self.addStep( steps.SetProperties(name="set properties from srcinfo", properties=properties)) # find dependencies depends = properties["depends"] if depends is not None: for depends_name in depends: self.addSteps([ steps.SetProperty( name=f"set depends_name to {depends_name}", property="depends_name", value=depends_name, hideStepIf=True, ), FindDependency(name=f"find {depends_name}"), ]) # download build files self.addStep( steps.FileDownload( name="download PKGBUILD", mastersrc=f"{workdir}/PKGBUILD", workerdest="PKGBUILD", )) for src_name in properties["src_names"]: self.addStep( steps.FileDownload( name=f"download {src_name}", mastersrc=f"{workdir}/{src_name}", workerdest=src_name, )) install = properties["install"] if install: self.addStep( steps.FileDownload( name=f"download {install}", mastersrc=f"{workdir}/{install}", workerdest=install, )) # update pkgver, pkgrel self.addSteps([SetPkgver(), SetPkgrel(), Updpkgsums()]) # update git tag revision if properties["git_tag"]: self.addStep(SetTagRevision()) # update git commit revision if properties["git_revision"]: self.addStep(SetCommitRevision()) # build self.addStep(ArchBuild()) # update properties self.addSteps([ Srcinfo(), steps.FileUpload( name="upload updated PKGBUILD", workersrc="PKGBUILD", masterdest=f"{workdir}/PKGBUILD", ), steps.FileUpload( name="upload updated .SRCINFO", workersrc=".SRCINFO", masterdest=f"{workdir}/.SRCINFO", ), steps.SetProperties( name="refresh properties from srcinfo", properties=ArchBuildUtil.srcinfo, ), ]) # upload and optionally sign packages for pkg_name in properties["pkg_names"]: self.addSteps([ steps.SetProperty( name=f"set pkg_name to {pkg_name}", property="pkg_name", value=pkg_name, hideStepIf=True, ), steps.FileUpload( name=f"upload {pkg_name}", workersrc=ArchBuildUtil.pkg, masterdest=ArchBuildUtil.pkg_masterdest, ), MovePackage(name=f"move {pkg_name}"), ]) if gpg_sign: self.addSteps([ GpgSign(name=f"sign {pkg_name}"), steps.FileDownload( name=f"download {pkg_name} sig", mastersrc=ArchBuildUtil.sig_mastersrc, workerdest=ArchBuildUtil.sig_workerdest, ), ]) # update repository self.addStep(RepoAdd(name=f"add {pkg_name}")) # synchronize repository if sshdir: self.addStep(CreateSshfsDirectory()) self.addStep(MountPkgbuildCom(env=ArchBuildUtil.ssh_agent)) self.addStep(RepoSync(env=ArchBuildUtil.ssh_agent)) self.addStep(UnmountPkgbuildCom()) # cleanup self.addStep(Cleanup())
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=is_not_configured)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), package_directory=Property("package_directory", None), strip_binaries=Property("package_strip_binaries", None), env=compilation_environment, doStepIf=should_package)) source_path = Property("package_filename") target_path = Interpolate("%s%%(prop:package_filename)s" % snapshots_dir) target_url = Interpolate("%s%%(prop:package_filename)s" % snapshots_url) # This is not an ideal target link calculation since the archive format # in package_filename might be fixed up by the Package step, but here # only None is converted into tar.xz, which is not exactly the same target_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) builder.addStep( CleaningFileUpload(name="publish", workersrc=source_path, masterdest=target_path, url=target_url, clean=True, doStepIf=should_package)) builder.addStep( steps.MasterShellCommand( name="update latest archive", command=["ln", "-sf", target_path, target_link], logEnviron=False, doStepIf=should_package)) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=snapshots_dir, file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), doStepIf=should_package)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("exclusive")])
def createTestFactory(): factory = util.BuildFactory() factory.addSteps(common.configureMdbciVmPathProperty()) factory.addStep(common.determineBuildId()) factory.addStep(common.determineTestRunName()) factory.addStep( steps.SetProperties( name="Configure build properties", properties=configureCommonProperties, )) factory.addSteps(common.cloneRepository()) factory.addStep(common.generateMdbciRepositoryForTarget()) factory.addSteps( common.remoteRunScriptAndLog( name="Run MaxScale tests", scriptName="run_test_vm.sh", logFile=util.Property("buildLogFile"), resultFile=util.Property("resultFile"), )) factory.addSteps( common.downloadAndRunScript( name="Parse ctest results log and save it to logs directory", scriptName="parse_ctest_log.py", args=[ util.Property("buildLogFile"), "--output-log-file", util.Interpolate( "%(prop:builddir)s/results_%(prop:buildnumber)s"), "--human-readable", "--only-failed", "--output-log-json-file", util.Property("jsonResultsFile"), "--ctest-sublogs-path", util.Interpolate( "%(prop:builddir)s/%(prop:buildername)s-%(prop:buildnumber)s/ctest_sublogs" ), "--store-directory", util.Interpolate( "%(prop:HOME)s/LOGS/results_%(prop:buildnumber)s/LOGS") ], alwaysRun=True)) factory.addSteps( common.downloadAndRunScript( name="Find core dumps and record information into the file", scriptName="coredump_finder.py", args=[ "--directory", util.Property("logDirectory"), "--remote-prefix", util.Interpolate("%(kw:server)s%(prop:testId)s/", server=constants.CI_SERVER_LOGS_URL), "--output-file", util.Property("coreDumpsLog"), ], haltOnFailure=False, flunkOnFailure=False, alwaysRun=True)) factory.addSteps(common.writeBuildsResults()) factory.addStep( common.StdoutShellCommand( name="test_result", command=util.Interpolate( "cat %(prop:builddir)s/results_%(prop:buildnumber)s %(prop:coreDumpsLog)s" ), alwaysRun=True)) factory.addStep( common.rsyncViaSsh( name="Rsync test logs to the logs server", local=util.Property("logDirectory"), remote=util.Interpolate( "%(prop:upload_server)s:/srv/repository/bb-logs/Maxscale/%(prop:testId)s/" ), alwaysRun=True, flunkOnFailure=False, )) factory.addStep( common.runSshCommand( name="Fix permissions on remote server", host=util.Property("upload_server"), command=[ "chmod", "777", "-R", util.Interpolate( "/srv/repository/bb-logs/Maxscale/%(prop:testId)s/") ], alwaysRun=True, flunkOnFailure=False, )) factory.addStep( steps.ShellCommand( name="Remove logs from worker host", command=["rm", "-rf", util.Property("logDirectory")], alwaysRun=True, )) factory.addSteps( common.destroyAllConfigurations( util.Interpolate("%(prop:HOME)s/%(prop:name)s_vms"))) factory.addSteps(common.cleanBuildDir()) return factory
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=ConfigChecker().needs_configuration)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" snapshots_dir = "%s%%(prop:branch)s/" % snapshots_dir snapshots_url = "%s%%(prop:branch)s/" % snapshots_url builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_files=Property("package_files", None), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), split_debug_package=Property("split_debug_package", True), extra_files=Property("package_extra_files", None), package_script=Interpolate(config.get( "package_script", "")), env=compilation_environment, doStepIf=should_package)) latest_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="archive", property_name="package_filename", latest_link=latest_link, do_step_if=should_package) latest_link = Interpolate("%s%%(prop:buildername)s" "-latest-debug-symbols.tar.xz" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="debug archive", property_name="debug_package_filename", latest_link=latest_link, do_step_if=should_package_debug) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=Interpolate(snapshots_dir), file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), secondary_file_suffix="-debug-symbols", file_extensions=r"\.(?:tar(?:\.[xg]z)?|[a-z]{3,4})$", doStepIf=should_package, hideStepIf=True)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("counting")])
def _make_factory(name, ms): f = util.BuildFactory() # Sync Git f.addStep( steps.Git(repourl=ms['giturl'], method='clobber', mode='full', shallow=False, haltOnFailure=True, name='git sync')) # TODO: login to dockerhub f.addStep( steps.SetPropertyFromCommand( name="set property from make version", command=["make", "version", "--always-make"], property="project_version", haltOnFailure=True)) version_specific_str = '%(prop:project_version)s-$(prop:branch)s-%(prop:buildnumber)s-' + str( DATABASE_VERSION) version_specifier = util.Interpolate('VERSION=' + version_specific_str) commit_hash_specifier = util.Interpolate('COMMIT_HASH=' + '%(prop:got_revision)s') # Compile f.addStep( steps.ShellCommand( name="compile", command=[ "make", "build", version_specifier, commit_hash_specifier ], haltOnFailure=True, )) # Run tests f.addStep( steps.ShellCommand( name="run tests", command=["make", "test", version_specifier, commit_hash_specifier], warnOnFailure=not ms['fail_on_tests'], haltOnFailure=ms['fail_on_tests'], doStepIf=ms['run_tests'], )) # Build image and push to Docker registry f.addStep( steps.ShellCommand( name="push docker image to registry", haltOnFailure=True, command=["make", "push", version_specifier, commit_hash_specifier], doStepIf=_is_deploy_branch, )) f.addStep( steps.SetProperties(name="set container properties", properties={ 'container_name': REGISTRY + r'\/' + name, 'container_tag': util.Interpolate(version_specific_str), 'project_name': name, })) # TODO: add actual k8s deployment step # TODO: add liveness check step return f