def getFactory(): factory = base.getFactory(); factory.addStep( steps.Git( repourl="https://github.com/qt/qt5.git", branch=util.Interpolate('%(prop:branch)s'), mode='full', method = 'fresh', submodules=True, name = 'git operations', description = 'operations of git like pull clone fetch', ) ); factory.addStep( steps.ShellCommand( command= getHelp, name = 'read help', haltOnFailure = True, description = 'read help for generate the configure command', ) ); factory.addSteps(linuxSteps()); factory.addSteps(windowsSteps()); factory.addSteps(androidSteps()); return factory
def __init__(self): util.BuildFactory.__init__(self, [ steps.Git( repourl='{{ pairing_git_source }}', mode='incremental', ), sh('git', 'clean', '-dfx', name='git clean'), sh('wget', '-N', 'https://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz', name='download latest nightly Rust'), sh('tar', 'xzf', 'rust-nightly-x86_64-unknown-linux-gnu.tar.gz', name='extract Rust'), sh('./rust-nightly-x86_64-unknown-linux-gnu/install.sh', '--prefix=./rust-nightly', name='install Rust'), steps.ShellCommand( command=['rustc', '--version'], env={'PATH': ['./rust-nightly/bin', '${PATH}']}, name='rustc version', ), CargoBenchRunner( 'Pairing', 'pairing', ['--features', 'u128-support'], env={'PATH': ['./rust-nightly/bin', '${PATH}']}, name='cargo bench', ), steps.POST(urllib.parse.urljoin(CODESPEED_URL, '/result/add/json/'), data={'json': getPerfJson}, auth=('buildbot', CODESPEED_PASS), verify=CODESPEED_CERT, doStepIf=lambda s: s.getProperty('publish', False), hideStepIf=lambda results, s: results==util.SKIPPED, ), ])
def __init__(self, channel, options, *args, **kwargs): channel_filename = 'channel-%s.yaml' % channel util.BuildFactory.__init__(self, *args, **kwargs) self.addSteps([ steps.ShellCommand( name='install tools', haltOnFailure=True, logEnviron=False, command=['sudo', 'dnf', 'install', '-y', 'flatpak', 'flatpak-builder', 'python3-PyYAML'], ), FlatpakGPGStep(name='setup gpg keys'), steps.Git( name='checkout sources', codebase=util.Property('codebase'), repourl=util.Property('repository'), branch=util.Property('branch'), mode='incremental', submodules=True, shallow=True, ), FlatpakPullStep(name='pull from master'), steps.ShellCommand( name='build', haltOnFailure=True, command=['./flatpak-build', '--repo=repo', '--channel=' + channel_filename, '--jobs=1', '--export', '--gpg-homedir=flatpak-gpg', '--gpg-sign=' + options['gpg-key']], ), FlatpakRefStep(name='copy flatpakref files', channel=channel), FlatpakSyncStep(name='sync repo'), ])
def getFactory(): factory = base.getFactory(); list = [ steps.Git( repourl='https://github.com/QuasarApp/quasarAppCoin.git', branch=util.Interpolate('%(prop:Branch)s'), mode='incremental', submodules=True ), steps.ShellCommand( command= ['qmake'], ), steps.ShellCommand( command= ['make', 'deploy'], ), steps.CopyDirectory( src="build/Distro", dest="~/shared/quasarAppCoin/" ) ] factory.addSteps(list); return factory
def _add_pyside_setup_repo(self, factory): repo_name = "pyside-setup" repo_url = "https://github.com/ivanalejandro0/" + repo_name + ".git" git_branch = "master" venv_name = "virtualenv_wheels" sandbox_path = {'PATH': "../" + venv_name + '/bin' + ':${PATH}'} publish_pyside_wheel = self._publish_pyside_command( '`ls -t *.whl | head -1`') factory.addSteps([ steps.ShellCommand(command=['rm', '-rf', repo_name], workdir='.', env=sandbox_path, name="Remove previous pyside"), steps.Git(repourl=repo_url, branch=git_branch, workdir=repo_name, mode='full', method='clobber', shallow=True, haltOnFailure=True, name="Pull " + repo_url), steps.ShellCommand( command=['python', 'setup.py', 'bdist_wheel', '--standalone'], workdir=repo_name, env=sandbox_path, name="Wheel for " + repo_name), steps.ShellCommand(command=publish_pyside_wheel, workdir=repo_name + '/dist/', name="Publish pyside") ])
def addGetSourcecodeForProject(self, project, name=None, src_dir=None, **kwargs): # project contains a repo name which is not a part of the monorepo. # We do not enforce it here, though. _repourl = kwargs.pop('repourl', None) if not _repourl: _repourl = self.repourl_prefix + "llvm-%s.git" % project if not name: name = 'Checkout %s' % project # Check out to the given directory if any. # Otherwise this is a part of the unified source tree. if src_dir is None: src_dir = 'llvm-%s' % project # Ignore workdir if given. We check out to src_dir. kwargs.pop('workdir', None) self.addStep( steps.Git(name=name, repourl=_repourl, progress=True, workdir=util.WithProperties(src_dir), **kwargs))
def addGetSourcecodeSteps(self, **kwargs): # Checkout the monorepo. self.addStep( steps.Git(name='Checkout the source code', repourl=self.repourl_prefix + "llvm-project.git", progress=True, workdir=util.WithProperties(self.monorepo_dir), **kwargs))
def _make_factory(): f = util.BuildFactory() # Sync git f.addStep( steps.Git(repourl="https://github.com/klaital/wwdice", method='clobber', mode='full', shallow=True, haltOnFailure=True, name='git sync')) version_specifier = util.Interpolate( 'VERSION=%(prop:branch)s-%(prop:buildnumber)s') # Build binary f.addStep( steps.ShellCommand( name='compile binary', command=['make', 'wwdicebot', version_specifier], env={'GOOS': 'linux'}, haltOnFailure=True, )) # Run tests f.addStep( steps.ShellCommand( name='run tests', command=['make', 'test'], haltOnFailure=True, )) # TODO: Run linters # Build Docker image f.addStep( steps.ShellCommand( name='build and push docker image', command=['make', 'wwdicebot-push'], haltOnFailure=True, )) # Update k8s deployment f.addStep( steps.ShellCommand(name='push to home cluster', command=[ 'kubectl', '--kubeconfig', 'wwdicebot_kubeconfig', 'apply', '-f', 'cmd/wwdicebot/k8s.yaml' ], haltOnFailure=True, doStepIf=_is_deploy_branch)) # TODO: add liveness check to see if the new version is actually deployed and reachable return f
def make_bundler_builder(self): builder_name = "builder_bundler" factory = BuildFactory() repo_name = "bitmask_bundler" repo_url = "https://github.com/leapcode/" + repo_name + ".git" branch = "develop" workdir = "build" repo_dir = workdir + "/" + repo_name bundler_output_dir = "bundler_output" sumo_tarball = "leap.bitmask-latest-SUMO.tar.gz" publish_bundle = self._publish_bundle_command( '`ls -t *.tar.gz | head -1`') factory.addSteps([ steps.Git(repourl=repo_url, branch=branch, workdir=repo_dir, mode='full', method='clobber', shallow=True, haltOnFailure=True, name="Pull " + repo_url), steps.ShellCommand(command="rm -rf " + bundler_output_dir, workdir=workdir, name="Remove previous bundler dir"), steps.ShellCommand(command="mkdir " + bundler_output_dir, workdir=workdir, name="Create bundler dir"), steps.ShellCommand(command="cp bundle_pyinstaller.sh ../" + bundler_output_dir, workdir=repo_dir, haltOnFailure=True, name="Copy bundle_pyinstaller"), steps.ShellCommand(command="mkdir files", workdir=workdir + '/' + bundler_output_dir, name="Create auxiliary folder"), steps.ShellCommand( command="wget http://lizard.leap.se/sumo-tarball/%s" % sumo_tarball, workdir=workdir + '/' + bundler_output_dir, haltOnFailure=True, name="Download sumo"), steps.ShellCommand(command="./bundle_pyinstaller.sh " + sumo_tarball, workdir=workdir + '/' + bundler_output_dir, name="Create bundle"), steps.ShellCommand(command=publish_bundle, workdir=workdir + '/' + bundler_output_dir, name="Publish bundle") ]) return BuilderConfig(name=builder_name, slavenames=self.slaves.leap_names(), factory=factory)
def getFactory(): factory = base.getFactory() factory.addStep( steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) factory.addStep( steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) return factory
def __init__(self, build_steps): """ Takes a list of Buildbot steps. Prefer using DynamicServoFactory to using this class directly. """ all_steps = [ steps.Git(repourl=SERVO_REPO, mode="full", method="clobber"), ] + build_steps # util.BuildFactory is an old-style class so we cannot use super() # but must hardcode the superclass here util.BuildFactory.__init__(self, all_steps)
def cloneRepository(): """Clone MaxScale repository using default configuration options""" return [ steps.Git(name=util.Interpolate( "Clone repository '%(prop:repository)s', branch '%(prop:branch)s'" ), repourl=util.Property('repository'), branch=util.Property('branch'), mode='incremental', haltOnFailure=True) ]
def steps_build_common(env, config=None): st = [] # OpenStack machines have frequent github.com name resolution failures: # fatal: unable to access 'https://github.com/krzk/tools.git/': Could not resolve host: github.com # Cache the address first. st.append(steps.ShellCommand(command=util.Interpolate('%(prop:builddir:-~/)s/tools/buildbot/name-resolve-fixup.sh'), haltOnFailure=False, warnOnFailure=True, flunkOnFailure=False, name='Cache DNS addresses (workaround)')) st.append(steps.Git(repourl='https://github.com/krzk/tools.git', name='Clone krzk tools sources', mode='incremental', alwaysUseLatest=True, branch='master', getDescription=False, workdir='tools', haltOnFailure=True, env=util.Property('git_env'))) st.append(steps.Git(repourl=repo_git_kernel_org, name='Clone the sources', # Run full/fresh checkout to get rid of any old DTBs or binaries from # KBUILD_OUTPUT. For example when compiling stable kernel without # given DTB, old DTB from linux-next might remain. # Removal of them is necessary for boot tests so they do not re-use # wrong binaries... and anyway it is nice to test clean build. mode='full', method='fresh', haltOnFailure=True, env=util.Property('git_env'))) st.append(steps.SetPropertyFromCommand(command='${CROSS_COMPILE}gcc --version | head -n 1', property='gcc_version', haltOnFailure=True, env=env, name='Set property: gcc version')) st.append(steps.SetPropertyFromCommand(command=[util.Interpolate(CMD_MAKE), '-s', 'kernelversion'], property='kernel_version', haltOnFailure=True, env=env, name='Set property: kernel version')) st.append(step_make_config(env, config)) return st
def get_builders(): factory = util.BuildFactory() # check out the source factory.addStep( steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) # run the tests (note that this will require that 'trial' is installed) factory.addStep( steps.ShellCommand(command=["trial", "hello"], env={"PYTHONPATH": "."})) return [ util.BuilderConfig(name="runtests", workernames=["default"], factory=factory) ]
def createBuildSteps(): buildSteps = [] buildSteps.append(steps.Git( repourl=util.Property("maxscale_docker_repository"), branch=util.Property("maxscale_docker_repository_branch"), mode="full", )) buildSteps.extend(common.downloadAndRunScript( "build_maxscale_docker_image.py", args=[ "--repository", util.Interpolate("%(prop:ci_url)s/%(prop:target)s/mariadb-maxscale/ubuntu"), "--tag", util.Property("target"), "--name", util.Property("docker_product_name"), "--registry", util.Property("docker_registry_url") ], workdir=util.Interpolate("%(prop:builddir)s/build/maxscale/"), )) return buildSteps
def __init__(self, flavor: str, buildmaster_setup: clipos.buildmaster.SetupSettings, buildbot_worker_version: Optional[str] = None): # Initialize Build factory from parent class: super().__init__() # Fetch the current configuration repository (which also holds the # Dockerfiles for the build workers environments): self.addStep(steps.Git( name="git", description="fetch/synchronize the CLIP OS buildbot Git repository", repourl=util.Property("repository"), branch=util.Property("branch"), mode="full", # there's no need to keep previous build artifacts method="clobber", # obliterate everything beforehand )) # Launch Docker build with the expected Dockerfile: location_to_dockerfile = os.path.join(self.workdir, clipos.workers.DockerLatentWorker.FLAVORS[flavor]['docker_build_context']) docker_image_tag = ( clipos.workers.DockerLatentWorker.docker_image_tag(flavor)) # use current version if not specified in the props if not buildbot_worker_version: buildbot_worker_version = str(buildbot.version) self.addStep(steps.ShellCommand( name="docker build", description="build the Dockerized CLIP OS build environment image", command=[ "docker", "build", "--no-cache", # do not use cache to ensure up-to-date images "--rm", # remove intermediate containers "--tag", docker_image_tag, "--build-arg", "BUILDBOT_WORKER_VERSION={}".format(buildbot_worker_version), "." # docker build requires the path to its context ], workdir=location_to_dockerfile, env={ "DOCKER_HOST": buildmaster_setup.docker_host_uri, }, ))
def getFactory(): factory = base.getFactory() factory.addStep( steps.Git( repourl=util.Interpolate('%(prop:repository)s'), branch=util.Interpolate('%(prop:branch)s'), mode='full', method='fresh', submodules=True, name='git operations', description='operations of git like pull clone fetch', doStepIf=lambda step: not isStopForce(step), )) factory.addStep( steps.ShellCommand( command=['npm', "stop"], haltOnFailure=True, name='npm stop', description='stop old version', )) factory.addStep( steps.ShellCommand( command=['npm', "i"], doStepIf=lambda step: not isStopForce(step), haltOnFailure=True, name='npm install', description='install all dependecies', )) factory.addStep( steps.ShellCommand( command=['npm', "start"], doStepIf=lambda step: not isStopForce(step), haltOnFailure=True, name='npm start', description='install new versio to server', )) return factory
def __init__(self): util.BuildFactory.__init__(self, [ steps.Git( repourl=git_source, mode='incremental', ), sh('git', 'clean', '-dfx', name='git clean'), ]) self.addStep(steps.SetPropertyFromCommand(command=nproc, property="numcpus")) self._addPreBuildSteps() self._addBuildSteps() # Ensures the worker has the params; usually a no-op self.addStep( sh('./zcutil/fetch-params.sh', '--testnet', haltOnFailure=True, locks=[params_lock.access('exclusive')]))
def add_artifactsrc_yml_build_steps(build_factory, repo): build_factory.addStep(steps.Git(repourl=repo, mode='incremental')) build_factory.addStep( ShellStepWithName( 'generate artifacts.yml', command= 'if [ -e /tmp/artifacts.yml ]; then rm -f /tmp/artifactsrc.yml; fi' + ' && node /home/buildbot/artifactsrc-yml-generator artifactsrc.yml /tmp/artifactsrc.yml' )) build_factory.addStep( ShellStepWithName('copy artifactsrc.yml', command='cp /tmp/artifactsrc.yml artifactsrc.yml')) build_factory.addStep( ShellStepWithName( 'commit changed artifactsrc.yml', command= 'git commit -am "Dependencies changed" || echo Dependencies not changed' )) build_factory.addStep( ShellStepWithName('push changed artifactsrc.yml', command='git pull && git push')) build_factory.addStep(ReconfigMaster())
def __init__(self, arch): nightly_name = 'rust-nightly-%s' % arch nightly_file = '%s.tar.gz' % nightly_name util.BuildFactory.__init__(self, [ steps.Git( repourl='{{ sapling_git_source }}', mode='incremental', ), sh('git', 'clean', '-dfx', name='git clean'), sh('wget', '-N', 'https://static.rust-lang.org/dist/%s' % nightly_file, name='download latest nightly Rust'), sh('tar', 'xzf', nightly_file, name='extract Rust'), sh('./%s/install.sh' % nightly_name, '--prefix=./rust-nightly', name='install Rust'), steps.ShellCommand( command=['rustc', '--version'], env={'PATH': ['./rust-nightly/bin', '${PATH}']}, name='rustc version', ), steps.ShellCommand( command=['cargo', 'test', '--release'], env={'PATH': ['./rust-nightly/bin', '${PATH}']}, name='cargo test', ), ])
def getFactory(): factory = base.getFactory() factory.addStep( steps.Git( repourl=util.Interpolate('%(prop:repository)s'), branch=util.Interpolate('%(prop:branch)s'), mode='full', method='fresh', submodules=True, name='git operations', description='operations of git like pull clone fetch', )) factory.addSteps(LinuxSteps()) factory.addSteps(WinSteps()) factory.addSteps(AndroidSteps()) factory.addStep( steps.CopyDirectory( src=util.Interpolate('build/%(prop:copyFolder)s'), dest=destDir, doStepIf=lambda step: isDeploy(step), name='copy buildet files', description='copy buildet files to shared folder', )) factory.addStep( steps.ShellCommand( command=permission, name='set permission', haltOnFailure=True, description='set permission for shared folder', )) return factory
def nix_update_flake_config( worker_names: list[str], projectname: str, github_token_secret: str ) -> util.BuilderConfig: """ Updates the flake an opens a PR for it. """ factory = util.BuildFactory() url_with_secret = util.Interpolate( f"https://*****:*****@github.com/{projectname}" ) factory.addStep( steps.Git( repourl=url_with_secret, method="clean", submodules=True, haltOnFailure=True, ) ) factory.addStep( steps.ShellCommand( name="Update flakes", env=dict( GIT_AUTHOR_NAME="buildbot", GIT_AUTHOR_EMAIL="*****@*****.**", GIT_COMMITTER_NAME="buildbot", GIT_COMMITTER_EMAIL="*****@*****.**", ), command=[ "nix", "flake", "update", "--commit-lock-file", "--commit-lockfile-summary", "flake.lock: Update", ], haltOnFailure=True, ) ) factory.addStep( steps.ShellCommand( name="Force-Push to update_flake_lock branch", command=[ "git", "push", "--force", "origin", "HEAD:refs/heads/update_flake_lock", ], haltOnFailure=True, ) ) factory.addStep( CreatePr( name="Create pull-request", env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)), command=[ "gh", "pr", "create", "--repo", projectname, "--title", "flake.lock: Update", "--body", "Automatic buildbot update", "--head", "refs/heads/update_flake_lock", "--base", "master", ], ) ) return util.BuilderConfig( name="nix-update-flake", workernames=worker_names, factory=factory, properties=dict(virtual_builder_name="nix-update-flake"), )
def __init__(self, Environ): # Max number of running builds build_lock = WorkerLock('build', maxCount=2, maxCountForWorker={ 'farmer-worker1': 2, }) # All repo all_repos = { 'quantum_espresso': { 'repository': 'https://gitlab.com/QEF/q-e.git', 'branch': 'develop', }, 'quantum_espresso_GPU': { 'repository': 'https://gitlab.com/QEF/q-e-gpu.git', 'branch': 'gpu-develop', }, 'wannier90': { 'repository': 'https://github.com/wannier-developers/wannier90.git', # 'repository': 'https://github.com/sponce24/wannier90.git', 'branch': 'develop', }, } ############################################################################ # QE code ############################################################################ self.checkout_qe = [ steps.Git( name="checkout_qe", method="copy", repourl=all_repos["quantum_espresso"]["repository"], branch=all_repos["quantum_espresso"]["branch"], haltOnFailure=True, alwaysUseLatest=True, ) ] self.checkout_qe_GPU = [ steps.Git( name="checkout_qe", method="copy", repourl=all_repos["quantum_espresso_GPU"]["repository"], branch=all_repos["quantum_espresso_GPU"]["branch"], haltOnFailure=True, alwaysUseLatest=True, ) ] self.configure_qe = [ ShellCommand(name="configure_qe", command=["./configure"], env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["configure_qe"]) ] self.configure_qe_hdf5 = [ ShellCommand(name="configure_qe_hdf5", command=[ "./configure", "--with-hdf5=/home/buildbot/local/hdf5-104-gcc102" ], env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["configure_qe_hdf5"]) ] self.configure_qe_serial = [ ShellCommand(name="configure_qe_serial", command=["./configure", "--enable-parallel=no"], env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["configure_qe_serial"]) ] self.configure_qe_mp = [ ShellCommand(name="configure_qe_mp", command=[ "./configure", "--enable-openmp", "--enable-parallel" ], env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["configure_qe_mp"]) ] self.configure_qe_GPU = [ ShellCommand(name="configure_qe_GPU", command=[ "./configure", "--with-cuda=/opt/pgi/linux86-64/2019/cuda/10.1/", "--with-cuda-runtime=10.1", "--with-cuda-cc=60", "--with-scalapack=no", "--enable-openmp" ], env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["configure_qe_GPU"]) ] self.debug_flags = [ ShellCommand( name="debug_flags", command=Interpolate( 'sed -i "s/FFLAGS = -O3 -g/FFLAGS = -g -Wall -fbounds-check -frange-check -finit-integer=987654321 -finit-real=nan -finit-logical=true -finit-character=64/g" make.inc' ), env=Environ, workdir="build", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["debug_flags"]) ] self.env_qe1 = [ ShellCommand( name="env_qe1", command=Interpolate( 'sed -i "s/TESTCODE_NPROCS=4/TESTCODE_NPROCS=4/g" ENVIRONMENT' ), env=Environ, workdir="build/test-suite/", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["env_qe1"]) ] self.env_qe2 = [ ShellCommand( name="env_qe2", command=Interpolate( 'echo "export OMP_NUM_THREADS=1" >> ENVIRONMENT'), #command=["cat","'export OMP_NUM_THREADS=2'",">>", "ENVIRONMENT"], env=Environ, workdir="build/test-suite/", locks=[build_lock.access('counting')], haltOnFailure=True, descriptionDone=["env_qe2"]) ] self.make_pw = [ ShellCommand( name="make_pw", command=["make", "-j", "4", "pwall", "cp", "ld1", "hp"], env=Environ, workdir="build", haltOnFailure=True, descriptionDone=["make_pw"], locks=[build_lock.access('counting')]) ] self.make_pw_GPU = [ ShellCommand(name="make_pw_GPU", command=["make", "-j", "4", "pw"], env=Environ, workdir="build", haltOnFailure=True, descriptionDone=["make_pw"], locks=[build_lock.access('counting')]) ] self.make_ph = [ ShellCommand(name="make_ph", command=["make", "ph"], env=Environ, workdir="build", haltOnFailure=True, descriptionDone=["make_ph"], locks=[build_lock.access('counting')]) ] self.make_epw0 = [ ShellCommand(name="make_epw0", command=["make"], env=Environ, workdir="build/EPW/src/", haltOnFailure=True, descriptionDone=["make_epw"], locks=[build_lock.access('counting')]) ] self.make_epw = [ ShellCommand(name="make_epw", command=["make", "epw"], env=Environ, workdir="build", haltOnFailure=True, descriptionDone=["make_epw"], locks=[build_lock.access('counting')]) ] self.make_lr = [ ShellCommand( name="make_lr", command=["make", "-j", "8", "lrmods"], env=Environ, workdir="build", haltOnFailure=True, descriptionDone=["make_lr"], locks=[build_lock.access('counting')], ) ] self.test_clean = [ ShellCommand( name="test_clean", command=["make", "clean"], env=Environ, workdir="build/test-suite", descriptionDone=["test_clean"], locks=[build_lock.access('counting')], ) ] self.clean = [ ShellCommand(command=["make", "veryclean"], alwaysRun=True, flunkOnFailure=False, workdir="build") ] self.test0 = [ ShellCommand(name="test_prolog", command=["make", "prolog"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["make prolog"], locks=[build_lock.access('counting')]) ] self.test_para_PW = [ ShellCommand(name="PW_para", command=["make", "run-tests-pw-parallel"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["PW para tests"], locks=[build_lock.access('counting')]) ] self.test_serial_PW = [ ShellCommand(name="PW_serial", command=["make", "run-tests-pw-serial"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["PW serial tests"], locks=[build_lock.access('counting')]) ] self.test_para_CP = [ ShellCommand(name="CP_para", command=["make", "run-tests-cp-parallel"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["CP para tests"], locks=[build_lock.access('counting')]) ] self.test_serial_CP = [ ShellCommand(name="CP_serial", command=["make", "run-tests-cp-serial"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["CP serial tests"], locks=[build_lock.access('counting')]) ] self.test_para_PH = [ ShellCommand(name="PH_para", command=["make", "run-tests-ph-parallel"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["PH para tests"], locks=[build_lock.access('counting')]) ] self.test_serial_PH = [ ShellCommand(name="PH_serial", command=["make", "run-tests-ph-serial"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["PH serial tests"], locks=[build_lock.access('counting')]) ] self.test_para_EPW = [ ShellCommand(name="EPW_para", command=["make", "run-tests-epw-parallel"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["EPW para tests"], locks=[build_lock.access('counting')]) ] self.test_serial_EPW = [ ShellCommand(name="EPW_serial", command=["make", "run-tests-epw-serial"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["EPW serial tests"], locks=[build_lock.access('counting')]) ] self.test_para_HP = [ ShellCommand(name="HP_para", command=["make", "run-tests-hp-parallel"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["HP para tests"], locks=[build_lock.access('counting')]) ] self.test_serial_HP = [ ShellCommand(name="HP_serial", command=["make", "run-tests-hp-serial"], env=Environ, workdir="build/test-suite", haltOnFailure=False, descriptionDone=["HP serial tests"], locks=[build_lock.access('counting')]) ] ############################################################################ # SGW code ############################################################################ # self.configure_qe2 = [ShellCommand( # name="configure_qe", # command=["./configure"], # env=Environ, # workdir="build", # locks=[build_lock.access('counting')], # haltOnFailure = True,descriptionDone=["configure_qe"] # )] # # self.make_pw2 = [ShellCommand( # name="make_pw", # command=["make","pw","lrmods"], # env=Environ, # workdir="build", # haltOnFailure=True, descriptionDone=["make_pw"], # locks=[build_lock.access('counting')] # )] # # self.checkout_sgw = [steps.Git( # name="checkout_sgw", # repourl=all_repos["sternheimer_gw"]["repository"], # branch=all_repos["sternheimer_gw"]["branch"], # workdir="build/SGW", # haltOnFailure = True, # alwaysUseLatest = True, # )] # # self.make_clean = [ShellCommand( # name="make_clean", # command=["make", "clean"], # env=Environ, # workdir="build/SGW", # haltOnFailure = True, # descriptionDone = ["make_clean"], # locks=[build_lock.access('counting')], # )] # # self.make_sgw = [ShellCommand( # name="make_sgw", # command=["make"], # env=Environ, # workdir="build/SGW", # haltOnFailure = True, # descriptionDone = ["make_sgw"], # locks=[build_lock.access('counting')], # )] # # self.test_sgw = [ShellCommand( # name="test_sgw", # command=["make", "run-tests"], # env=Environ, # workdir="build/SGW/test-suite", # haltOnFailure = True, # descriptionDone = ["test_sgw"], # locks=[build_lock.access('counting')], # )] # # self.test_clean_sgw = [ShellCommand( # name="test_clean", # command=["make", "clean"], # env=Environ, # workdir="build/SGW/test-suite", # descriptionDone = ["test_clean"], # locks=[build_lock.access('counting')], # )] ############################################################################ # Wannier code ############################################################################ self.checkout_wannier = [ steps.Git( name="checkout_wannier", method="copy", workdir="build/WAN", repourl=all_repos["wannier90"]["repository"], branch=all_repos["wannier90"]["branch"], haltOnFailure=True, alwaysUseLatest=True, ) ] self.cpconfig = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_gcc640_serial.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.cpgcc730 = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_gcc730_openmpi1107.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.cpintel17 = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_intel17_openmpi313.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.cpintel17i = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_intel17_impi.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.cpintel18 = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_intel18_openmpi313.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.cppgi18 = [ ShellCommand( name="cp_config", command=[ "cp", "test-suite/config/TestFarm/farmer_pgi18_mvapich23b.inc", "make.inc" ], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["cp_config"], locks=[build_lock.access('counting')]) ] self.clean_wannier = [ ShellCommand( name="clean_wannier", command=["make", "clean"], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["clean_wannier"], locks=[build_lock.access('counting')], ) ] self.clean_tests = [ ShellCommand( name="clean_tests", command=["python", "clean_tests"], env=Environ, workdir="build/WAN/test-suite", haltOnFailure=True, descriptionDone=["clean_tests"], locks=[build_lock.access('counting')], ) ] self.make_wannier = [ ShellCommand( name="make_wannier", command=["make"], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["make_wannier"], locks=[build_lock.access('counting')], ) ] self.make_wannier2 = [ ShellCommand( name="make_wannier2", command=["make", "default", "w90chk2chk"], env=Environ, workdir="build/WAN", haltOnFailure=True, descriptionDone=["make_wannier2"], locks=[build_lock.access('counting')], ) ] self.test_wannier_serial = [ ShellCommand( name="test_wannier_seq", command=["./run_tests", "--category=default"], env=Environ, workdir="build/WAN/test-suite", haltOnFailure=True, descriptionDone=["test_wannier_seq"], locks=[build_lock.access('counting')], ) ] self.test_wannier_para = [ ShellCommand( name="test_wannier_para", command=["./run_tests", "--category=default", "--numprocs=4"], env=Environ, workdir="build/WAN/test-suite", haltOnFailure=True, descriptionDone=["test_wannier_para"], locks=[build_lock.access('counting')], ) ]
def getBuildPipeline(): clone = steps.Git(repourl="{{ ansible_scripts_url }}", branch=util.Property('branch'), alwaysUseLatest=True, mode="full", method="fresh") version = steps.SetPropertyFromCommand( command="git rev-parse HEAD", property="ansible_script_rev", flunkOnFailure=True, warnOnFailure=True, haltOnFailure=True, workdir="build", name="Get ansible script revision") deps = common.shellCommand( command=['ansible-galaxy', 'install', '-r', 'requirements.yml'], name="Installing Ansible dependencies") secrets = common.copyAWS( pathFrom="s3://{{ s3_private_bucket }}/{{ groups['master'][0] }}/env/%(prop:deploy_env)s", pathTo="%(prop:builddir)s/%(prop:deploy_env)s", name="Fetching deploy key") permissions = common.shellCommand( command=['chmod', '600', util.Interpolate("%(prop:builddir)s/%(prop:deploy_env)s")], name="Fixing deploy key permissions") install = GenerateInstallCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining install targets", haltOnFailure=True, flunkOnFailure=True) deploy = GenerateDeployCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining deploy targets", haltOnFailure=True, flunkOnFailure=True) sleep = common.shellCommand( command=["sleep", "300"], name="Sleeping to let Opencast finish starting up") # We aren't using -u here because this is executing in the same directory as the checked out ansible scripts, which # contains a group_vars/all.yml files specifying ansible_user ingest = GenerateIngestCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining ingest targets", haltOnFailure=True, flunkOnFailure=True) cleanup = common.shellCommand( command=['rm', '-rf', util.Interpolate("%(prop:builddir)s/%(prop:deploy_env)s")], alwaysRun=True, name="Cleanup") f_ansible = util.BuildFactory() f_ansible.addStep(clone) f_ansible.addStep(version) f_ansible.addStep(deps) f_ansible.addStep(secrets) f_ansible.addStep(permissions) f_ansible.addStep(install) f_ansible.addStep(deploy) f_ansible.addStep(sleep) f_ansible.addStep(ingest) f_ansible.addStep(cleanup) return f_ansible
import textwrap import itertools from buildbot.plugins import steps from buildbot.plugins import util from metabbotcfg.common import GIT_URL from metabbotcfg.slaves import slaves, get_slaves, names _PACKAGE_STASH = 'http://ftp.buildbot.net/pub/metabuildbot/python-packages/' builders = [] # slaves seem to have a hard time fetching from github, so retry gitStep = steps.Git(repourl=GIT_URL, mode='full', method='fresh', retryFetch=True) ####### Custom Steps # only allow one VirtualenvSetup to run on a slave at a time. This prevents # collisions in the shared package cache. veLock = util.SlaveLock('veLock') class VirtualenvSetup(steps.ShellCommand): def __init__(self, virtualenv_dir='sandbox', virtualenv_python='python', virtualenv_packages=[], no_site_packages=False,
####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which workers can execute them. Note that any particular build will # only take place on one worker. factory = util.BuildFactory() # change workdir for each projects factory.workdir = util.Interpolate('%(src::project)s') # check out the source factory.addStep( steps.Git(name="source checkout", logEnviron=False, repourl=util.Interpolate( '%(kw:github_base_url)s/%(src::project)s.git', github_base_url=Config.get("github", "base_url")), mode='incremental', haltOnFailure=True)) # pull latest image for build cache factory.addStep( steps.ShellCommand(name="docker pull latest(for cache, ignore error)", logEnviron=False, command=["docker", "pull", getLatestImage], haltOnFailure=False, warnOnFailure=False, flunkOnFailure=False)) # docker build factory.addStep(
def run(self): command = yield self.makeRemoteShellCommand() yield self.runCommand(command) force_complete_rebuild = None if self.hasProperty('options'): options = self.getProperty('options') force_complete_rebuild = options['force_complete_rebuild'] buildsteps = [] for repo in REPOS: buildsteps.append( define( str(repo).upper() + '_ROOT', ip(REPOS[repo]['checkout_dir']))) if force_complete_rebuild: buildsteps.append(define('FORCE_COMPLETE_REBUILD', 'true')) buildsteps.append( steps.ShellCommand(name='Delete old build directory', command=['rm', '-rf', 'build'], workdir=ip(CHECKOUT_BASE_DIR))) for repo in REPOS: if 'repository_clone_url' in codebases[repo].keys(): url = codebases[repo]['repository_clone_url'] else: url = codebases[repo]['repository'] branch = REPOS[repo]['default_branch'] buildsteps.append( steps.Git(repourl=url, branch=branch, codebase=repo, name="checkout: {0}".format(url), description="checkout: {0}@{1}".format( url, branch), timeout=1200, progress=True, submodules=True, workdir=P(str(repo).upper() + '_ROOT'), mode='full', method='clobber')) else: self.build.addStepsAfterCurrentStep( [define('FORCE_COMPLETE_REBUILD', 'false')]) for repo in REPOS: if 'repository_clone_url' in codebases[repo].keys(): url = codebases[repo]['repository_clone_url'] else: url = codebases[repo]['repository'] branch = REPOS[repo]['default_branch'] buildsteps.append( steps.Git(repourl=url, branch=branch, codebase=repo, name="checkout: {0}".format(url), description="checkout: {0}@{1}".format( url, branch), timeout=1200, progress=True, submodules=True, workdir=P(str(repo).upper() + '_ROOT'))) buildsteps.append( steps.ShellCommand(name='Create build directory', command=['mkdir', '-p', 'build'], workdir=ip(CHECKOUT_BASE_DIR), hideStepIf=True)) self.build.addStepsAfterCurrentStep(buildsteps) defer.returnValue(command.results())
from buildbot.plugins import util, steps from .helpers.steps import venv_step, service_step PYTHON_EX = "/var/buildbot/workers/backoffice_api/venv/bin/python3" backoffice_api_builder = util.BuilderConfig( name='BackofficeApi', workername='backoffice_api', factory=util.BuildFactory([ steps.Git( repourl='[email protected]:CentraleFitness/backoffice-server.git', mode='incremental'), steps.ShellCommand( command=["mv", "config/config_prod.py", "config/config.py"]), venv_step('backoffice_api', 'BackofficeApi'), steps.ShellCommand( command=[PYTHON_EX, "manage.py", "migrate"]), service_step('backoffice_api', pidfile="/var/run/backoffice_api.pid") ]) )
def __init__(self, *args, **kwargs): util.BuildFactory.__init__(self, *args, **kwargs) title = 'Liri OS' releasever = '30' self.addSteps([ ImagePropertiesStep(name='set properties'), steps.ShellCommand( name='update container', haltOnFailure=True, command=['dnf', 'update', '-y'], ), steps.ShellCommand( name='install tools', haltOnFailure=True, command=[ 'dnf', 'install', '-y', 'git', 'spin-kickstarts', 'pykickstart', 'livecd-tools' ], ), steps.Git( name='checkout sources', codebase=util.Property('codebase'), repourl=util.Property('repository'), branch=util.Property('branch'), mode='incremental', submodules=True, shallow=True, ), steps.ShellCommand( name='ksflatten', haltOnFailure=True, command=[ 'ksflatten', Interpolate('--config=%(prop:product)s-livecd.ks'), '-o', 'livecd.ks' ], ), steps.RemoveDirectory( name='clean cache', dir='/build/cache', doStepIf=IsCacheDisabled, ), steps.ShellCommand( name='build image', haltOnFailure=True, timeout=60 * 60, command=[ 'livecd-creator', '--releasever=' + releasever, '--config=livecd.ks', Interpolate('--fslabel=%(prop:imgname)s'), '--title', title, Interpolate('--product=%(prop:product)s'), '--cache=/build/cache' ], ), steps.ShellCommand( name='checksum', haltOnFailure=True, command=[ 'bash', '-c', Interpolate( 'sha256sum -b --tag %(prop:isofilename)s > /repo/images/nightly/%(prop:checksumfilename)s' ) ], ), steps.ShellCommand( name='move file', command=[ 'mv', Interpolate('%(prop:isofilename)s'), '/repo/images/nightly/' ], ), steps.ShellCommand( name='remove old images', command=[ 'bash', '-c', 'find /repo/images/nightly -type f -mtime +7 -exec rm {} \;' ], ) ])
from buildbot.plugins import util, steps site_vitrine_builder = util.BuilderConfig( name='SiteVitrine', workername='site_vitrine', factory=util.BuildFactory([ steps.Git(repourl='[email protected]:CentraleFitness/site_vitrine.git', mode='incremental'), steps.ShellCommand(command=["yarn"]), steps.ShellCommand(command=["yarn", "build"]), steps.ShellCommand(command=["forever", "stop", "bin/www"]), steps.ShellCommand(command=["forever", "start", "bin/www"]), ])) site_vitrine_builder_alt = util.BuilderConfig( name='SiteVitrine_npm', workername='site_vitrine', factory=util.BuildFactory([ steps.Git(repourl='[email protected]:CentraleFitness/site_vitrine.git', mode='incremental'), steps.ShellCommand(command=["npm", "install"]), steps.ShellCommand(command=["npm", "run", "build"]), steps.ShellCommand(command=["forever", "stop", "bin/www"]), steps.ShellCommand(command=["forever", "start", "bin/www"]), ]))