def masterConfig(): c = {} from buildbot.plugins import schedulers, steps, util class SleepAndInterrupt(steps.ShellSequence): @defer.inlineCallbacks def run(self): if self.worker.worker_system == "nt": sleep = "waitfor SomethingThatIsNeverHappening /t 100 >nul 2>&1" else: sleep = ["sleep", "100"] d = self.runShellSequence([util.ShellArg(sleep)]) yield asyncSleep(1) self.interrupt("just testing") res = yield d return res c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] f = util.BuildFactory() f.addStep(SleepAndInterrupt()) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def mkcoveragefactory(): f = util.BuildFactory() f.addSteps([ gitStep, VirtualenvSetup(name='virtualenv setup', no_site_packages=True, virtualenv_packages=[ 'coverage', 'mock', '--editable=master', '--editable=slave' ], virtualenv_dir='sandbox', haltOnFailure=True), steps.ShellCommand(command=textwrap.dedent(""" PYTHON=sandbox/bin/python; sandbox/bin/coverage run --rcfile=common/coveragerc \ sandbox/bin/trial buildbot.test buildslave.test \ || exit 1; sandbox/bin/coverage html -i --rcfile=.coveragerc \ -d /home/buildbot/www/buildbot.buildbot.net/static/coverage \ || exit 1; chmod -R a+rx /home/buildbot/www/buildbot.buildbot.net/static/coverage || exit 1 """), usePTY=False, description='coverage', descriptionDone='coverage', name='coverage report') ]) return f
def test_gentoo_sources(): factory = util.BuildFactory() factory.addStep( steps.ShellCommand(description="Cleaning enviroment", descriptionDone='Cleaned enviroment', name='Clean enviroment', command=["/bin/bash", "-c", "umask 022; rm -rf *"], logEnviron=False, timeout=2400)) factory.addStep( steps.GitHub(name="Fetching repository", repourl=pull_repourl, logEnviron=False, mode='incremental', workdir="build/gentoo", shallow=50)) factory.addStep( steps.GitHub(name="Fetching Ghelper", repourl=os.getenv("GHELPER_REPOURL"), branch=os.getenv("GHELPER_BRANCH"), mode='incremental', logEnviron=False, alwaysUseLatest=True, workdir="build/ghelper")) factory.addStep( steps.ShellCommand(name="Stabilizing package", command=filterFiles, logEnviron=False, workdir="build/ghelper/")) return factory
def createBuildFactory(): """ Creates build factory containing steps which triggers build scheduler for each chosen box """ factory = util.BuildFactory() factory.addStep( common.BuildAllTrigger( name="build_all", schedulerNames=['build'], waitForFinish=True, copy_properties=[ "branch", "build_box_checkbox_container", "build_experimental", "ci_url", "cmake_flags", "do_not_destroy_vm", "host", "old_target", "owners", "repository", "run_upgrade_test", "target", "try_already_running", "version", ], set_properties={"virtual_builder_name": "build"})) return factory
def mkdocsfactory(): f = util.BuildFactory() f.addSteps([ gitStep, steps.FileDownload(mastersrc="virtualenv.py", slavedest="virtualenv.py", flunkOnFailure=True), # run docs tools in their own virtualenv, otherwise we end up # documenting the version of Buildbot running the metabuildbot! VirtualenvSetup(name='virtualenv setup', no_site_packages=True, virtualenv_packages=[ 'sphinx==1.2.2', '--editable=master', '--editable=slave' ], virtualenv_dir='sandbox', haltOnFailure=True), # manual steps.ShellCommand(command=util.Interpolate( textwrap.dedent("""\ source sandbox/bin/activate && make docs """)), name="create docs"), steps.ShellCommand(command=textwrap.dedent("""\ export VERSION=latest && tar -C /home/buildbot/www/buildbot.net/buildbot/docs -zvxf master/docs/docs.tgz && chmod -R a+rx /home/buildbot/www/buildbot.net/buildbot/docs/latest && find /home/buildbot/www/buildbot.net/buildbot/docs/latest -name '*.html' | xargs python /home/buildbot/www/buildbot.net/buildbot/add-tracking.py """), name="docs to web", flunkOnFailure=True, haltOnFailure=True) ]) return f
def build_coverage(): remove_build = steps.RemoveDirectory("build") create_build = steps.MakeDirectory("build") cmake_step = steps.CMake(path=util.Property("src_dir"), definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) @util.renderer def join_make_opts(props): make_opts = props.getProperty("make_opts", []) return ["make"] + make_opts make_step = steps.Compile(command=join_make_opts, workdir="build", env=env) test_coverage = steps.ShellCommand(command=["make", "coverage"], workdir="build") upload_coverage_data = steps.ShellCommand(command=[ "bash", "-c", util.Interpolate("bash <(curl -s https://codecov.io/bash) -t " + tokens.codecovToken + " -C %(prop:revision)s -f coverage.info.cleaned") ], workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(create_build) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(test_coverage) factory.addStep(upload_coverage_data) return factory
def __getBasePipeline(): checkSpaces = common.shellSequence(commands=[ common.shellArg(command=util.Interpolate( "(! grep -rnP '\t' modules assemblies pom.xml etc --include=pom.xml)" ), haltOnFailure=False, logname='Tab Check'), common.shellArg(command=util.Interpolate( "(! grep -rn ' $' modules assemblies pom.xml etc --include=pom.xml)" ), haltOnFailure=False, logname='End Of Line Space Check') ], workdir="build/docs/guides", name="Formatting checks") reports = [ 'site', 'site:stage', '-Daggregate=true', '-Dcheckstyle.skip=true', '-P', 'none,!frontend' ] site = common.getBuild(override=reports, name="Build site report") f_build = util.BuildFactory() f_build.addStep(common.getPreflightChecks()) f_build.addStep(common.getClone()) f_build.addStep(common.getWorkerPrep()) f_build.addStep(common.setTimezone()) f_build.addStep(common.setLocale()) f_build.addStep(common.getBuild()) f_build.addStep(checkSpaces) f_build.addStep(site) return f_build
def createBuildFactory(): factory = util.BuildFactory() factory.addStep(steps.SetProperties(properties=configureBuildProperties)) factory.addSteps(common.cloneRepository()) factory.addStep(buildMdbci()) factory.addStep(publishMdbci()) factory.addSteps(common.cleanBuildDir()) return factory
def build_volk_PR(): create_src = steps.MakeDirectory(name="create src directory", dir="volk") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="volk") rm_src_dir = steps.RemoveDirectory( dir=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s")), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) copy_src = steps.CopyDirectory( name="copy src to srcdir", src="volk", dest=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s"), ), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "volk_builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_dir": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s")) }, test_merge=False, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addStep(trigger_builds) return factory
def __getBasePipeline(): f_build = util.BuildFactory() f_build.addStep(common.getPreflightChecks()) f_build.addStep(common.getClone()) f_build.addStep(common.setLocale()) f_build.addStep(common.setTimezone()) return f_build
def _make_factory(): f = util.BuildFactory() # Sync git f.addStep( steps.Git(repourl="https://github.com/klaital/wwdice", method='clobber', mode='full', shallow=True, haltOnFailure=True, name='git sync')) version_specifier = util.Interpolate( 'VERSION=%(prop:branch)s-%(prop:buildnumber)s') # Build binary f.addStep( steps.ShellCommand( name='compile binary', command=['make', 'wwdicebot', version_specifier], env={'GOOS': 'linux'}, haltOnFailure=True, )) # Run tests f.addStep( steps.ShellCommand( name='run tests', command=['make', 'test'], haltOnFailure=True, )) # TODO: Run linters # Build Docker image f.addStep( steps.ShellCommand( name='build and push docker image', command=['make', 'wwdicebot-push'], haltOnFailure=True, )) # Update k8s deployment f.addStep( steps.ShellCommand(name='push to home cluster', command=[ 'kubectl', '--kubeconfig', 'wwdicebot_kubeconfig', 'apply', '-f', 'cmd/wwdicebot/k8s.yaml' ], haltOnFailure=True, doStepIf=_is_deploy_branch)) # TODO: add liveness check to see if the new version is actually deployed and reachable return f
def __getBasePipeline(): npm_install = common.shellSequence(commands=[ common.shellArg(command=['npm', 'install'], logname='npm_install'), ], workdir="build/docs/guides", name="Running npm install", haltOnFailure=True) npmCheck = common.shellSequence( commands=[ common.shellArg(command=['npm', 'test'], haltOnFailure=False, logname='markdown-cli'), ], workdir="build/docs/guides", name="Check Markdown doc formatting with markdown-cli", haltOnFailure=False) pip_install = common.shellSequence(commands=[ common.shellArg(command=[ 'python3', '-m', 'pip', 'install', '-r', 'requirements.txt' ], haltOnFailure=False, logname='markdown-cli'), ], workdir="build/docs/guides", name="Running pip install", haltOnFailure=True) build = common.shellCommand(command=['./.style-and-markdown-build.sh'], name="Running tests and building docs", env={ "LC_ALL": "en_US.UTF-8", "LANG": "en_US.UTF-8", "OC_CTYPE": "en_US.UTF-8", "PATH": "/builder/.local/bin:${PATH}" }, haltOnFailure=False, flunkOnFailure=True) markdown = GenerateMarkdownCommands(command='ls -d */', name="Determining available docs", workdir="build/docs/guides", haltOnFailure=True, flunkOnFailure=True) f_build = util.BuildFactory() f_build.addStep(common.getClone()) f_build.addStep(npm_install) f_build.addStep(npmCheck) f_build.addStep(pip_install) f_build.addStep(build) f_build.addStep(markdown) return f_build
def create_builders(repos_and_packages): ret = [] for repo in repos_and_packages: for artifact in repo['artifacts']: print 'building builder for artifact', artifact['artifact'] build_factory = util.BuildFactory() build_factory.addStep( MonorepoGitStep(artifact['path'], repourl=repo['repo'], mode='full', method='copy')) has_assets = artifact.get('assets', None) != None if artifact['type'] == 'npm': add_npm_build_steps(build_factory, has_assets) ret.append( util.BuilderConfig( name=artifact['artifact'], workernames=["build-npm"], factory=build_factory, properties={'owners': artifact.get('owners', [])})) elif artifact['type'] == 'docker-npm': add_docker_npm_build_steps(build_factory, artifact['artifact'], has_assets) ret.append( util.BuilderConfig( name=artifact['artifact'], workernames=["build-docker-npm"], factory=build_factory, properties={'owners': artifact.get('owners', [])})) artifactsrc_yml_build_factory = util.BuildFactory() add_artifactsrc_yml_build_steps(artifactsrc_yml_build_factory, repo['repo']) ret.append( util.BuilderConfig(name='build-artifactsrc-yml', workernames=["build-artifactsrc-yml"], factory=artifactsrc_yml_build_factory)) return ret
def __getBasePipeline(): f_build = util.BuildFactory() f_build.addStep(common.getClone()) f_build.addStep(generateDBTestStep("maria", "3307")) f_build.addStep(generateDBUpgradeStep("maria", "3307")) f_build.addStep(generateDBTestStep("mysql5.6", "3308")) f_build.addStep(generateDBUpgradeStep("mysql5.6", "3308")) f_build.addStep(generateDBTestStep("mysql5.7", "3309")) f_build.addStep(generateDBUpgradeStep("mysql5.7", "3309")) f_build.addStep(common.getClean()) return f_build
def dynamic_factory(self, default_factory, build_specification): """ Create buildbot factory with one dynamic step. All factories will be created by this method. :param default_factory: other method of this class :param build_specification: dict :return: BuildFactory """ factory = util.BuildFactory() # Hide debug information about build steps for production mode factory.addStep(StepsGenerator(default_factory=default_factory, build_specification=build_specification, hideStepIf=self.mode == bb.utils.Mode.PRODUCTION_MODE)) return factory
def get_builders(): factory = util.BuildFactory() # check out the source factory.addStep( steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) # run the tests (note that this will require that 'trial' is installed) factory.addStep( steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) return [ util.BuilderConfig(name="runtests", workernames=["default"], factory=factory) ]
def build_coverity(): remove_build = steps.RemoveDirectory("build") remove_src = steps.RemoveDirectory("src") create_build = steps.MakeDirectory("build") download_src_archive = steps.FileDownload( mastersrc=util.Property("src_archive"), workerdest="src.tar.xz", workdir="src") extract_src_archive = steps.ShellCommand( name="Extract source archive", command=["tar", "xJf", "src.tar.xz"], workdir="src") cmake_step = steps.CMake(path="../src/", definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) make_step = steps.Compile(command=[ "cov-build", "--dir", "cov-int", "make", "-j", "16", "-l", "32" ], workdir="build", env=env) compress = steps.ShellCommand( command=["tar", "czvf", "gnuradio.tgz", "cov-int"], workdir="build") upload = steps.ShellCommand(command=[ "curl", "--form", "token=" + tokens.coverityToken, "--form", "[email protected]", "--form", "[email protected]", "--form", util.Interpolate("version=%(prop:revision)s"), "--form", util.Interpolate( "description=\"Weekly Buildbot submission for %(prop:branch)s branch \"" ), "https://scan.coverity.com/builds?project=GNURadio" ], workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(remove_src) factory.addStep(create_build) factory.addStep(download_src_archive) factory.addStep(extract_src_archive) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(compress) factory.addStep(upload) return factory
def build_and_test(): remove_build = steps.RemoveDirectory("build") remove_src = steps.RemoveDirectory("src") create_build = steps.MakeDirectory("build") download_src_archive = steps.FileDownload( mastersrc=util.Property("src_archive"), workerdest="src.tar.xz", workdir="src") extract_src_archive = steps.ShellCommand( name="Extract source archive", command=["tar", "xJf", "src.tar.xz"], workdir="src") cmake_step = steps.CMake(path="../src/", definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) @util.renderer def join_make_opts(props): make_opts = props.getProperty("make_opts", []) return ["make"] + make_opts make_step = steps.Compile(command=join_make_opts, workdir="build", env=env) @util.renderer def parse_test_excludes(props): command = ["ctest", "--output-on-failure", "--timeout", "120"] excludes = props.getProperty('test_excludes', []) excludes.append("qtgui") if excludes is not None: command += ["-E", "|".join(excludes)] return command test_step = steps.Test(command=parse_test_excludes, workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(remove_src) factory.addStep(create_build) factory.addStep(download_src_archive) factory.addStep(extract_src_archive) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(test_step) return factory
def createCreateFullRepoFactory(): """ Creates create factory containing steps which triggers create_full_repo scheduler for each chosen box """ factory = util.BuildFactory() factory.addStep( common.BuildAllTrigger( name="create_full_repo_all", schedulerNames=['create_full_repo'], waitForFinish=True, copy_properties=[ "branch", "host", "owners", "repository", "version", "target", "do_not_destroy_vm", "ci_url", "major_ver", "build_box_checkbox_container" ], set_properties={"virtual_builder_name": "create_full_repo"})) return factory
def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.plugins import schedulers, steps, util c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] f = util.BuildFactory() f.addStep( steps.SetPropertyFromCommand(property="test", command=["echo", "foo"])) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def nix_build_config( worker_names: list[str], enable_cachix: bool ) -> util.BuilderConfig: """ Builds one nix flake attribute. """ factory = util.BuildFactory() factory.addStep( NixBuildCommand( env={}, name="Build flake attr", command=[ "nix", "build", "-L", "--out-link", util.Interpolate("result-%(prop:attr)s"), util.Property("drv_path"), ], haltOnFailure=True, ) ) if enable_cachix: factory.addStep( steps.ShellCommand( name="Upload cachix", env=dict(CACHIX_AUTH_TOKEN=util.Secret("cachix-token")), command=[ "cachix", "push", util.Secret("cachix-name"), util.Interpolate("result-%(prop:attr)s"), ], ) ) factory.addStep(UpdateBuildOutput(name="Update build output", branches=["master"])) return util.BuilderConfig( name="nix-build", workernames=worker_names, properties=[], collapseRequests=False, env={}, factory=factory, )
def generate_builder(target: TestTarget) -> BuilderConfig: factory = util.BuildFactory() factory.addStep(download_step) factory.addStep( GenerateStartMovieCommands( directory=target.directory, game_id=target.game_id, debugflags=target.debugflags, name=f"Generate commands: {target.builder_name}", command=[ "cat", os.path.join(target.directory, "test_scripts.txt") ], haltOnFailure=True, **default_step_kwargs, )) return BuilderConfig(name=target.builder_name, workernames=["director-worker"], factory=factory)
def error(message): try: name = constructicon_name + '-' + builder_name except NameError: name = constructicon_name global errors name += '-uniquifier-{}'.format(errors) errors += 1 all_builders.append( util.BuilderConfig(name=name, description=git_state + ' error: ' + message, slavenames=['none'], factory=util.BuildFactory())) all_schedulers.append( ForceScheduler( name=name + '-force', builderNames=[name], )) log.msg('builder {}: {}'.format(name, message))
def xmippBundleFactory(groupId): xmippTestSteps = util.BuildFactory() xmippTestSteps.workdir = settings.SDEVEL_XMIPP_HOME env = { "SCIPION_HOME": util.Property("SCIPION_HOME"), "SCIPION_LOCAL_CONFIG": util.Property("SCIPION_LOCAL_CONFIG"), "LD_LIBRARY_PATH": LD_LIBRARY_PATH, "EM_ROOT": settings.EM_ROOT } xmippTestSteps.addStep( SetProperty(command=["bash", "-c", "source build/xmipp.bashrc; env"], extract_fn=glob2list, env=env)) xmippTestSteps.addStep( GenerateStagesCommand( command=["./xmipp", "test", "--show"], name="Generate test stages for Xmipp programs", description="Generating test stages for Xmipp programs", descriptionDone="Generate test stages for Xmipp programs", haltOnFailure=False, pattern='./xmipp test (.*)', rootName=settings.XMIPP_CMD, timeout=settings.timeOutExecute, blacklist=settings.SCIPION_TESTS_BLACKLIST, env=util.Property('env'))) xmippTestSteps.addStep( GenerateStagesCommand( command=["./xmipp", "test", "--show"], name="Generate test stages for Xmipp functions", description="Generating test stages for Xmipp functions", descriptionDone="Generate test stages for Xmipp functions", haltOnFailure=False, timeout=settings.timeOutExecute, pattern='xmipp_test_(.*)', rootName=settings.XMIPP_CMD, blacklist=settings.SCIPION_TESTS_BLACKLIST, env=util.Property('env'))) return xmippTestSteps
def build_and_test(): remove_build = steps.RemoveDirectory("build") create_build = steps.MakeDirectory("build") cmake_step = steps.CMake(path=util.Property("src_dir"), definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) @util.renderer def join_make_opts(props): make_opts = props.getProperty("make_opts", []) return ["make"] + make_opts make_step = steps.Compile(command=join_make_opts, workdir="build", env=env) def parse_exclude_file(rc, stdout, stderr): exclude_tests = json.loads(stdout) return {"test_excludes": exclude_tests} load_exclude_file = steps.SetPropertyFromCommand( command=["cat", os.path.join("/config", "test_excludes.json")], extract_fn=parse_exclude_file, doStepIf=lambda steps: steps.getProperty("exclude_file", False)) @util.renderer def parse_test_excludes(props): command = ["ctest", "--output-on-failure", "--timeout", "10"] excludes = props.getProperty("test_excludes", None) if excludes is not None: command += ["-E", "|".join(excludes)] return command test_step = steps.Test(command=parse_test_excludes, workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(create_build) factory.addStep(load_exclude_file) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(test_step) return factory
def createBuildFactory(): factory = util.BuildFactory() buildSteps = createBuildSteps() factory.addSteps(buildSteps) factory.addStep( steps.Trigger(name="Call the 'create_full_repo_all' scheduler", schedulerNames=['create_full_repo_all_triggerable'], waitForFinish=True, copy_properties=[ "branch", "repository", "host", "owners", "version", ], set_properties={ "major_ver": getMajorVersion, "target": constructTargetString })) return factory
def mksimplefactory(test_master=True): f = util.BuildFactory() f.addSteps([ gitStep, # use workdir instead of testpath because setuptools sticks its own eggs (including # the running version of buildbot) into sys.path *before* PYTHONPATH, but includes # "." in sys.path even before the eggs steps.Trial(workdir="build/slave", testpath=".", tests='buildslave.test', usePTY=False, name='test slave') ]) if test_master: f.addStep( steps.Trial(workdir="build/master", testpath=".", tests='buildbot.test', usePTY=False, name='test master')) return f
def mklintyfactory(): f = util.BuildFactory() f.addSteps([ gitStep, # run linty tools in their own virtualenv, so we can control the version # the version of Buildbot running the metabuildbot! VirtualenvSetup(name='virtualenv setup', no_site_packages=True, virtualenv_packages=[ 'pyflakes', 'pylint==1.1.0', 'pep8==1.4.6', '--editable=master', '--editable=slave' ], virtualenv_dir='sandbox', haltOnFailure=True), steps.PyFlakes(command="sandbox/bin/pyflakes master/buildbot", name="pyflakes - master", flunkOnFailure=True), steps.PyFlakes(command="sandbox/bin/pyflakes slave/buildslave", name="pyflakes - slave", flunkOnFailure=True), steps.ShellCommand( command="sandbox/bin/pylint --rcfile common/pylintrc buildbot", name="pylint - master", flunkOnFailure=True), steps.ShellCommand( command="sandbox/bin/pylint --rcfile common/pylintrc buildslave", name="pylint - slave", flunkOnFailure=True), steps.ShellCommand( command="sandbox/bin/pep8 --config common/pep8rc master/buildbot", name="pep8 - master", flunkOnFailure=True), steps.ShellCommand( command="sandbox/bin/pep8 --config common/pep8rc slave/buildslave", name="pep8 - slave", flunkOnFailure=True), ]) return f
def createBuildfactory(): factory = util.BuildFactory() factory.addSteps(common.cloneRepository()) factory.addStep(steps.ShellCommand( name=util.Interpolate("Register in the Docker Registry %(prop:dockerRegistryUkrl)s"), command=["docker", "login", util.Property("dockerRegistryUrl"), "--username", constants.DOCKER_REGISTRY_USER_NAME, "--password", util.Secret("dockerRegistryPassword") ], haltOnFailure=True )) factory.addSteps(common.downloadAndRunScript( name=util.Interpolate("Build docker image for %(prop:target)s"), scriptName="build_maxscale_docker_image.py", args=[ "--product", util.Property("mdbciProductName"), "--product-version", util.Property("target"), "--name", util.Property("dockerProductName"), "--tag", util.Property("target"), "--registry", util.Property("dockerRegistryUrl") ], workdir=util.Interpolate("%(prop:builddir)s/build/maxscale/"), )) return factory
def createBuildFactory(): factory = util.BuildFactory() buildSteps = createBuildSteps() factory.addSteps(buildSteps) return factory