def catkin_ws_steps(job_name, repo, arch, oscode, distro): f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:builddir)s'), )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=repo["url"], branch=repo["branch"], alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Build all packages in catkin_ws # If one package built failed, then failed f.addStep( ShellCommand( haltOnFailure=True, name=job_name + '-buildbinary', # Current workdir is the git source dir command=['bloom-local-deb'], descriptionDone=['binarydeb', job_name], )) return f
def addSimpleBuilder(self, name, buildername, category, repourl, builderconfig, sos, sbranch, bparams): """Private. Add a single builder on the given OS type for the given repo and branch. """ factory = BuildFactory() factory.addStep(Git(repourl=repourl, mode='full', submodules=True, method='copy', branch=sbranch, getDescription={'tags': True})) if "tag" in builderconfig and not(builderconfig["tag"] is None): stag = builderconfig["tag"].encode('ascii', 'ignore') factory.addStep(ShellCommand( command=['git', 'checkout', stag], workdir="build", description="checkout tag")) # Delegate all knowlege of how to build to a script called buildshim in the project # directory. Call this script in nine standardize steps. It should ignore steps that it doesn't need. # Pass the step name as the first arg, and if params was given in the json for this builder, pass that as the # second arg. for step in ["patch", "install_deps", "configure", "compile", "check", "package", "upload", "compile_extra", "uninstall_deps"]: factory.addStep(ShellCommand(command=["./buildshim", step, bparams], description=step, haltOnFailure=True)) self['builders'].append( BuilderConfig(name=buildername, slavenames=self._os2slaves[sos], factory=factory, category=category)) return factory
def create_factory(config, mode="incremental"): factory = BuildFactory() factory.addStep( Git(repourl=config["repo"], alwaysUseLatest=True, mode=mode)) return factory
def createLinuxCIFactory(): f = BuildFactory() f.addStep( Git( description="fetching sources", descriptionDone="sources", haltOnFailure=True, repourl=repositoryUri, mode='full', method='clobber', )) f.addStep( ShellCommand(description="fetching packages", descriptionDone="packages", haltOnFailure=True, command=["mono", "paket.exe", "restore"], workdir=workingDirectory)) f.addStep( ShellCommand(description="building", descriptionDone="build", haltOnFailure=True, command=["xbuild", "CorvusAlba.ToyFactory.Linux.sln"], workdir=workingDirectory)) return f
def stepSources(self): for base in self.codebases: self.addStep( Git( name='Fetch ' + base, workdir=base, repourl=Interpolate("%%(src:%s:repository)s" % base), codebase=base, mode='full', method='clean', retryFetch=True, retry=(300, 5), haltOnFailure=True, getDescription={'always': True}, )) def dostep_if(base): def dostep_fn(step): stamp = step.build.getSourceStamp(base + '_merge') return (stamp is not None) and (stamp.repository != '') return dostep_fn for base in self.codebases: self.addStep( ShellCommand( name="Merge %s with test branch" % base, command=Interpolate( 'git pull -v "%%(src:%s_merge:repository)s" "%%(src:%s_merge:branch)s"' % (base, base)), workdir=base, description='merge ' + base, descriptionDone='merge ' + base, doStepIf=dostep_if(base), haltOnFailure=True))
def update_repositories(platform): """ Adds the steps needed to update the csources and Nimrod repositories. """ return [ Git(name="Update Local Nim Repository", descriptionSuffix=' Local Nim Repository', repourl=nim_git_url, codebase=repositories[nim_git_url], workdir=str(platform.nim_dir), **common_git_parameters), Git(name="Update Local CSources Repository", descriptionSuffix=' Local CSources Repository', repourl=csources_git_url, codebase=repositories[csources_git_url], workdir=str(platform.csources_dir), alwaysUseLatest=True, **common_git_parameters) ]
def configure(self, config_dict): c = self.config = config_dict PORT = int(os.environ.get('PORT', 8010)) c.setdefault( 'buildbotURL', os.environ.get('buildbotURL', "http://localhost:%d/" % (PORT, ))) db_url = os.environ.get('BUILDBOT_DB_URL') if db_url is not None: self.config.setdefault('db', {'db_url': db_url}) # minimalistic config to activate new web UI c.setdefault( 'www', dict( port=PORT, plugins=dict(console_view=True, waterfall_view=True), )) c.setdefault('protocols', {'pb': {'port': 9989}}) c.setdefault('builders', []) c.setdefault('schedulers', []) # Define the builder for the main job f = factory.BuildFactory() f.addStep( Git(repourl=Property("repository"), codebase=Property("codebase"), name='git', shallow=1)) f.addStep(SpawnerStep()) self.config['builders'].append( BuilderConfig(name='__spawner', workernames=self.get_spawner_workers(), collapseRequests=False, factory=f)) self.config['schedulers'].append( AnyCodeBaseScheduler(name='__spawner', builderNames=['__spawner'])) # Define the builder for the main job f = factory.BuildFactory() f.addStep(RunnerStep()) self.config['builders'].append( BuilderConfig(name='__runner', workernames=self.get_runner_workers(), collapseRequests=False, factory=f)) self.config['schedulers'].append( Triggerable( name='__runner', builderNames=['__runner'], codebases={}, ))
def get_buildsteps(working_dir): build_steps = BuildFactory() repo = Git(repourl="https://github.com/mlakewood/Buildbot-rollout.git", branch='master') virt_env = working_dir + '/virt/lib' slave_python = working_dir + '/virt/bin/python' env = {"LD_LIBRARY_PATH": virt_env} build_steps.addStep(repo) # # Remove the Virtual Environment from the last run # command = "rm -Rf %s/virt" % (working_dir) # build_steps.addStep(ShellCommand(workdir=working_dir, description="Clear Virtualenv", command=command.split(" "))) # # Create the virtual environment for the build # command = "virtualenv virt" # build_steps.addStep(ShellCommand(workdir=working_dir, description="Create Virtualenv", command=command.split(" "))) # # Pip install the python packages from requirements.txt # command = '%s/virt/bin/pip install -r requirements.txt' % working_dir # build_steps.addStep(ShellCommand(workdir=working_dir, description="Install packages", command=command.split(" "))) # Run the tests through coverage to get test coverage at the same time command = "../virt/bin/coverage run --include=src -m unittest discover -vf tests" build_steps.addStep( ShellCommand(workdir=working_dir + '/rollout', description="rollout Unit Tests", command=command.split(" "))) # Output the coverage report command = "virt/bin/coverage report --omit=*tests* -m" build_steps.addStep( ShellCommand(workdir=working_dir, description="API Unit Test Coverage Report", command=command.split(" "))) # Run pylint. P command = "pylint %s/rollout --rcfile=rollout/.pylintrc" % (working_dir) build_steps.addStep( PyLint(workdir=working_dir, description="API pylint", command=command.split(" "))) command = "./jslint js/*" build_steps.addStep( ShellCommand(workdir=working_dir + '/rollout', description="Insight JSLint code", command=command)) return build_steps
def addRepository(self, factory, project=None, repository=None, branch=None, **kwargs): branch = branch or "master" kwargs.update(dict( repourl=repository, branch=branch, codebase=project, haltOnFailure=True, flunkOnFailure=True, getDescription={'tags': True, 'always': True} )) factory.addStep(Git(**kwargs))
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs = None): gbp_args = ['-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist='+distro, '--git-arch='+arch] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name = job_name+'-clean', dir = Interpolate('%(prop:workdir)s'), hideStepIf = success, ) ) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl = url, branch = 'master', alwaysUseLatest = True, # this avoids broken builds when schedulers send wrong tag/rev mode = 'full' # clean out old versions ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Need to build each package in order for package in packages: debian_pkg = 'ros-'+rosdistro+'-'+package.replace('_','-') # debian package name (ros-groovy-foo) branch_name = 'debian/'+debian_pkg+'_%(prop:release_version)s_'+distro # release branch from bloom deb_name = debian_pkg+'_%(prop:release_version)s'+distro final_name = debian_pkg+'_%(prop:release_version)s-%(prop:datestamp)s'+distro+'_'+arch+'.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand( haltOnFailure = True, name = package+'-checkout', command = ['git', 'checkout', Interpolate(branch_name), '--force'], hideStepIf = success ) ) # Download script for building the source deb f.addStep( FileDownload( name = job_name+'-grab-build-source-deb-script', mastersrc = 'scripts/build_source_deb.py', slavedest = Interpolate('%(prop:workdir)s/build_source_deb.py'), mode = 0755, hideStepIf = success ) )
def Factory(slave, branch): os = Slaves[slave]['os'] paths = PathBuilder(os) bootstrap_argv = ['perl', paths.join('support', 'buildbot', 'bootstrap.pl')] build_argv = ['perl', paths.join('support', 'buildbot', 'startbuild.pl')] upload_argv = [ 'perl', paths.join('support', 'buildbot', 'package.pl'), paths.join('..', '..', 'amxxdrop_info'), ] if 'compiler' in Slaves[slave]: bootstrap_argv.append(Slaves[slave]['compiler']) build_argv.append(Slaves[slave]['compiler']) f = factory.BuildFactory() f.addStep(Git( repourl = 'https://github.com/alliedmodders/amxmodx.git', branch = branch, mode = 'incremental', submodules = True )) f.addStep(ShellCommand( name = "bootstrap", command = bootstrap_argv, description = "bootstrapping", descriptionDone = "bootstrapped" )) f.addStep(ShellCommand( name = 'build', command = build_argv, description = 'compiling', descriptionDone = 'compiled', timeout = 2400 )) f.addStep(ShellCommand( name = 'upload', command = upload_argv, description = 'packaging', descriptionDone = 'uploaded' )) if os == 'windows': symstore_argv = ['perl', paths.join('support', 'buildbot', 'symstore.pl')] f.addStep(ShellCommand( name = "symstore", command = symstore_argv, description = "symstore", descriptionDone = "symstore", locks = [pdb_lock.access('exclusive')] )) return f
def __new__(cls, **kwargs): vcs_type: AnyStr = kwargs.get("vcs_type") kwargs.pop("vcs_type") if "git" == vcs_type: __impl = Git(**kwargs) elif "hg" == vcs_type or "mercurial" == vcs_type: __impl = Mercurial(**kwargs) else: raise NotImplementedError( f"VCS step does not support type '{vcs_type}'") return __impl
def startVC(self, branch, revision, patch): """ * If a branch name starts with /branches/, cut it off before referring to it in git commands. * If a "git_revision" property is provided in the Change, use it instead of the base revision number. """ branch = mungeBranch(branch) id = self.getRepository() s = self.build.getSourceStamp(id) if s.changes: latest_properties = s.changes[-1].properties if "git_revision" in latest_properties: revision = latest_properties["git_revision"] return Git.startVC(self, branch, revision, patch)
def make_vagrant_unit_factory(branch): """ This returns the factory that runs the Vagrant unit tests. """ f = BuildFactory() f.addStep( Git(repourl="git://github.com/mitchellh/vagrant.git", branch=branch, mode="full", method="fresh", shallow=True)) f.addStep(buildsteps.Bundler()) f.addStep(buildsteps.UnitTests()) return f
def create_checkout_step(project): description = 'Checkout' repo_type = project.repo_type if repo_type == "git": return Git(repourl=set_url_auth(project.repo_url, project.repo_user, project.repo_password), mode='incremental', submodules=True, name=description, description=description, descriptionDone=description, hideStepIf=ShowStepIfSuccessful) else: raise Exception("Repository type '" + str(repo_type) + "' not supported.")
def make_vagrant_acceptance_factory(branch): """ This returns a build factory that knows how to run the Vagrant acceptance tests. """ f = BuildFactory() f.addStep( Git(repourl="git://github.com/mitchellh/vagrant.git", branch=branch, mode="full", method="fresh", shallow=True)) f.addStep(buildsteps.Bundler()) f.addStep(buildsteps.AcceptanceBoxes()) f.addStep(buildsteps.AcceptanceConfig()) f.addStep(buildsteps.AcceptanceTests()) return f
def _add_step_source_command(self, step): """ Add a source step. """ # Use 'incremental' when migrating to latest git step. mode = step.get('mode', 'incremental') branch = step.get('branch', None) config = step.get('config', None) self.addStep( Git( name='get code for ' + self._project.name, mode=mode, repourl=self._project.repo, branch=branch, shallow=False, config=config, ))
def update_utility_scripts(platform): """ Updates the utility scripts used by other steps. """ common_parameters = common_git_parameters.copy() common_parameters['haltOnFailure'] = False return [ Git(name="Update Utility Scripts", descriptionSuffix=' Utility Scripts', repourl=scripts_git_url, codebase=repositories[scripts_git_url], workdir=str(platform.scripts_dir), alwaysUseLatest=True, hideStepIf=False, flunkOnWarnings=False, flunkOnFailure=False, **common_parameters) ]
def get_clone_step(): from buildbot.steps.source.git import Git from buildbot.process.properties import Interpolate return [ Git(description=['cloning'], descriptionDone=['clone'], repourl=Interpolate('%(prop:repository)s'), mode='full', shallow=True, method='clobber', retry=(1, 120), progress=True, env={ 'GIT_CURL_VERBOSE': '1', 'GIT_TRACE': '1' }, logEnviron=False) ]
def _make_factory_step_generator(project_name, project_git_uri, make_command=None, workdir="/srv/buildbot"): make_factory_steps = [ Git( name = "Executing %s content fetch" % project_name, repourl=project_git_uri, mode='incremental' ), ShellCommand( name = "Executing %s: 'make %s'" % ( project_name, make_command ), command = [ "make", make_command ] ), DirectoryUpload( slavesrc="build", masterdest=Interpolate( "/srv/output/%(kw:project_name)s/%(src::branch)s", ) ) ] return make_factory_steps
def get_clone_step(): from buildbot.steps.source.git import Git from buildbot.steps.master import SetProperty from buildbot.process.properties import Interpolate return [ Git(description=['cloning'], descriptionDone=['clone'], hideStepIf=skipped_or_success, repourl=Interpolate('%(prop:repository)s'), mode='full', shallow=16, method='clobber', getDescription={'tags': True}, retry=(1, 120), progress=True, env={ 'GIT_CURL_VERBOSE': '1', 'GIT_TRACE': '1' }, logEnviron=False) ]
def installTwistedTrunk(): steps = [] steps.append( Git( repourl=TWISTED_GIT, mode='full', method='fresh', codebase='twisted', workdir='Twisted', alwaysUseLatest=True, )) steps.append( ShellCommand(name='install-twisted-trunk', description=['installing', 'twisted', 'trunk'], descriptionDone=['install', 'twisted', 'trunk'], command=[ virtualenvBinary('pip'), "install", "--no-index", '--use-wheel', "-f", "http://data.hybridcluster.net/python/", "." ], workdir='Twisted', haltOnFailure=True)) return steps
def getFactory(codebase, useSubmodules=True, mergeForward=False): factory = BuildFactory() repourl = GITHUB + b"/" + codebase # check out the source factory.addStep( Git(repourl=repourl, submodules=useSubmodules, mode='full', method='fresh', codebase=codebase)) if mergeForward: factory.addStep(MergeForward(repourl=repourl, codebase=codebase)) if useSubmodules: # Work around http://trac.buildbot.net/ticket/2155 factory.addStep( ShellCommand(command=["git", "submodule", "update", "--init"], description=["updating", "git", "submodules"], descriptionDone=["update", "git", "submodules"], name="update-git-submodules")) return factory
"-v", Interpolate("%(prop:builddir)s/build/:/build/:rw"), "-w", "/build/", "-e", Interpolate("PYTHONPATH=/build/%s", common.outputdir), "-e", Interpolate("LD_LIBRARY_PATH=/build/%s/lib", common.outputdir), "-e", Interpolate("PATH=/build/%s/bin", common.outputdir), Interpolate("%(prop:suite)s-%(prop:arch)s"), setarch, "/usr/bin/python3", "tests/build_samples.py" ] changelog_msg = Interpolate("Automatic build %(prop:buildnumber)s by builder %(prop:buildername)s") # Build steps shared by all builders. build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Patch a bug in 1.10.7 ShellCommand(command=["sed", "-i", "s/\"version\": SDK\\[\"PYTHONVERSION\"\\]\\[6:\\],/\"version\": SDK[\"PYTHONVERSION\"][6:].rstrip('dmu'),/", "makepanda/makepandacore.py"]), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand("version", command=[ "python3", "makepanda/getversion.py", buildtype_flag], haltOnFailure=True), # Delete the built dir, if requested. ShellCommand(name="clean", command=get_clean_command(), haltOnFailure=False, doStepIf=lambda step:step.getProperty("clean", False)), # These steps fill in properties used to determine upstream_version. ] + whl_version_steps + [
def ros_branch_build(c, job_name, packages, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] with open(os.path.dirname(os.path.realpath(__file__)) + "/spec.yaml") as file: spec_list = yaml.full_load(file) f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch=branch, alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full', # clean out old versions getDescription={'tags': True})) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Generate the changelog for the package f.addStep( ShellCommand(haltOnFailure=True, name='catkin_generate_changelog', command=['catkin_generate_changelog', '-y'], descriptionDone=['catkin_generate_changelog'])) # Add all files including untracked ones f.addStep( ShellCommand(haltOnFailure=True, name='add_changelogs', command=['git', 'add', '.'], descriptionDone=['add_changelogs'])) # Commit the changelog after updating it f.addStep( ShellCommand(haltOnFailure=True, name='update_changelogs', command=['git', 'commit', '-m', '\"Updated changelogs\"'], descriptionDone=['update_changelogs'])) # Prepare the release without pushing it f.addStep( ShellCommand(haltOnFailure=True, name='catkin_prepare_release', command=[ 'catkin_prepare_release', '--bump', 'minor', '--no-push', '-y' ], descriptionDone=['catkin_prepare_release'])) # f.addStep( ShellCommand( haltOnFailure=True, name='git_bloom_generate_release', command=['git-bloom-generate', '-y', 'rosrelease', rosdistro], )) f.addStep( ShellCommand( haltOnFailure=True, name='git_bloom_generate_debian', command=[ 'git-bloom-generate', '-y', 'rosdebian', '-a', '-p', 'release', rosdistro ], )) # Get the tag number for the lastest commit f.addStep( SetPropertyFromCommand( command="git describe --tags", property="release_version", name='latest_tag', )) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s-0_' + distro deb_name = debian_pkg + '_%(prop:release_version)s-0' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # A hack for generating the debian folder so we could build the lastest commit of the specified branch # f.addStep( # ShellCommand( # haltOnFailure = True, # name = package+'-bloom_generate', # command= ['bloom-generate', 'rosdebian'], # descriptionDone = ['bloom_generate', package] # ) # ) # Download script for building the source deb f.addStep( FileDownload( name=job_name + '-grab-build-source-deb-script', mastersrc='scripts/build_source_deb.py', slavedest=Interpolate('%(prop:workdir)s/build_source_deb.py'), mode=0755, hideStepIf=success))
def ros_testbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, token=None): # Change source is either GitPoller or GitPRPoller # TODO: make this configurable for svn/etc project_name = '' if token: project_name = '_'.join([job_name, rosdistro, 'prtestbuild']) c['change_source'].append( GitPRPoller( name=rosdistro + "_pr_poller", repourl=url, # this may pose some problems project=project_name, token=token, pollInterval=15)) # parse repo_url git@github:author/repo.git to repoOwner, repoName r_owner, r_name = (url.split(':')[1])[:-4].split('/') c['status'].append( status.GitHubStatus(token=token, repoOwner=r_owner, repoName=r_name)) else: project_name = '_'.join([job_name, rosdistro, 'testbuild']) c['change_source'].append( NamedGitPoller(repourl=url, name=rosdistro, branch=branch, project=project_name)) c['schedulers'].append( basic.SingleBranchScheduler( name=project_name, builderNames=[ project_name, ], change_filter=ChangeFilter(project=project_name))) # Directory which will be bind-mounted binddir = '/tmp/' + project_name f = BuildFactory() # Remove any old crud in /tmp folder f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success)) # Check out repository (to /tmp) f.addStep( Git(repourl=util.Property('repository', default=url), branch=util.Property('branch', default=branch), alwaysUseLatest=True, mode='full', workdir=binddir + '/src/' + job_name)) # Download testbuild.py script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/testbuild.py', slavedest=Interpolate('%(prop:workdir)s/testbuild.py'), hideStepIf=success)) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Make and run tests in a cowbuilder f.addStep( TestBuild(name=job_name + '-build', command=[ 'sudo', 'cowbuilder', '--execute', Interpolate('%(prop:workdir)s/testbuild.py'), '--distribution', distro, '--architecture', arch, '--bindmounts', binddir, '--basepath', '/var/cache/pbuilder/base-' + distro + '-' + arch + '.cow', '--override-config', '--othermirror', othermirror, '--', binddir, rosdistro ], logfiles={'tests': binddir + '/testresults'}, descriptionDone=['make and test', job_name])) c['builders'].append( BuilderConfig(name=project_name, slavenames=machines, factory=f)) # return the name of the job created return project_name
def createPoclFactory( environ={}, repository='https://github.com/pocl/pocl.git', branch='master', buildICD=True, llvm_dir='/usr/', icd_dir='/usr/', tests_dir=None, config_opts='', pedantic=True, tcedir='', f=None, cmake=False ): """ Create a buildbot factory object that builds pocl. environ Dictionary: The environment variables to append to the build. PATH and LD_LIBRARY_PATH will be added from llvm_dir (if given). repository String: the repo to build from. defaults to pocl on github branch String: the branch in 'repository' to build from. default to master buildICD Bool: if false, the ICD extension is not built. llvm_dir String: LLVM installation dir. I.e. without the 'bin/' or 'lib/'. icd_dir String: ICD loader installation dir. We expect here to be a ICD loader that understand the OCL_ICD_VENDORS parameter, i.e. ocl-icd or patched Khronos loader. tests_dir String: Path where the extenral testsuite packages can be copied from. ('cp' is used, so they need to be on the same filesystem). NOTE: currently only a placeholder - not tested on the public buildbot config_opts String: extra options to pass to ./configure cmake String: use CMake instead of autotools to build pocl """ #multiple slaves that pend on lock seem to pend after they modified environ. myenviron = environ.copy() if 'PATH' in myenviron.keys(): myenviron['PATH'] = llvm_dir+"/bin/:"+myenviron['PATH']+":${PATH}" else: myenviron['PATH'] = llvm_dir+"/bin/:${PATH}" if 'LD_LIBRARY_PATH' in myenviron.keys(): myenviron['LD_LIBRARY_PATH'] = llvm_dir+"/lib/:"+myenviron['PATH']+":${LD_LIBRARY_PATH}" else: myenviron['LD_LIBRARY_PATH'] = llvm_dir+"/lib/:${LD_LIBRARY_PATH}" if tcedir: myenviron['PATH'] = tcedir+"/bin/:"+myenviron['PATH'] myenviron['LD_LIBRARY_PATH'] = tcedir+"/lib/:"+myenviron['LD_LIBRARY_PATH'] if f==None: f = factory.BuildFactory() f.addStep( Git( repourl=repository, mode=Property('git_mode'), branch=branch ) ) if not cmake: f.addStep(ShellCommand( command=["./autogen.sh"], haltOnFailure=True, name="autoconfig", env=myenviron, description="autoconfiging", descriptionDone="autoconf")) if tests_dir!=None: f.addStep(ShellCommand( haltOnFailure=True, command=["cp", "-u", tests_dir+AMD_test_pkg, "examples/AMD/"+AMD_test_pkg], name="copy AMD", description="copying", descriptionDone="copied AMD", #kludge around 'cp' always complaining if source is missing decodeRC={0:SUCCESS,1:SUCCESS} )) f.addStep(ShellCommand( haltOnFailure=False, command=["cp", "-u", tests_dir+ViennaCL_test_pkg, "examples/ViennaCL/"+ViennaCL_test_pkg], name="copy ViennaCL", description="copying", descriptionDone="copied ViennaCL", decodeRC={0:SUCCESS,1:SUCCESS} )) if cmake: f.addStep( ShellCommand( command=["cmake", "."], env=myenviron, haltOnFailure=True, name="CMake", description="cmaking", descriptionDone="cmade")) else: configOpts=config_opts.split(' ') if pedantic==True: configOpts = configOpts + ['--enable-pedantic'] if buildICD==False: configOpts = configOpts + ['--disable-icd'] f.addStep(ShellCommand( command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=myenviron )) if tests_dir!=None and not cmake: f.addStep(ShellCommand(command=["make", "prepare-examples"], haltOnFailure=True, name="prepare examples", env=myenviron, description="preparing", descriptionDone="prepare")) if tcedir: f.addStep(ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, timeout=60*60)) else: f.addStep(ShellCommand(command=["make", "check"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, #blas3 alone takes 15-20 min. timeout=60*60)) return f ####### ## LLVM/clang builder ## # srcdir - LLVM source diectory # builddir - LLVM build dir # installdir - final LLVM install directory # test_install_dir - the LLVM install dir pocl_build tests against def createLLVMFactory(srcdir, builddir, installdir, test_install_dir): f = factory.BuildFactory() f.addStep( SVN( name='svn-llvm', mode='update', baseURL='http://llvm.org/svn/llvm-project/llvm/', defaultBranch='trunk', workdir=srcdir)) f.addStep( SVN( name='svn-clang', mode='update', baseURL='http://llvm.org/svn/llvm-project/cfe/', defaultBranch='trunk', workdir='%s/tools/clang' % srcdir)) f.addStep( ShellCommand( command=[ '%s/configure' % srcdir, '--prefix=' + installdir, '--enable-optimized', '--enable-targets=host', '--enable-shared'], workdir=builddir, haltOnFailure=True, name="configure", descriptionDone='configure', description='configuring')) f.addStep( ShellCommand( command=['make', '-j', '4'], workdir=builddir, haltOnFailure=True, name = "compile", descriptionDone = 'compile', description='compiling')) f.addStep( ShellCommand( command=['make', 'check'], workdir=builddir, name='check', descriptionDone='check', haltOnFailure=True, description='checking')) f.addStep( ShellCommand( command=['make', 'install'], env={'DESTDIR':test_install_dir}, workdir=builddir, haltOnFailure=True, name = 'install', descriptionDone='install', description='installing')) f=createPoclFactory( llvm_dir=test_install_dir+installdir, f=f) f.addStep( ShellCommand( command=['make', 'install'], workdir=builddir, haltOnFailure=True, name = 'install', descriptionDone='install', description='installing')) return f
# what steps, and which slaves can execute them. Note that any particular build will # only take place on one slave. from buildbot.process.factory import BuildFactory from buildbot.steps.source.git import Git from buildbot.steps.shell import ShellCommand, Configure, Compile from buildbot.steps.master import MasterShellCommand from buildbot.steps.transfer import DirectoryUpload from buildbot.config import BuilderConfig c['builders'] = [] f = BuildFactory() # Check out the source. f.addStep(Git(repourl="git://github.com/Forkk/forkk.net", mode="incremental")) # f.addStep(ShellCommand(name="init-sandbox", # command=["cabal", "sandbox", "init"], # description=["initializing", "sandbox"], # descriptionDone=["initialize", "sandbox"])) # Build the site. f.addStep(Compile( name="compile", command=["ghc", "--make", "-threaded", "site.hs"], description=["compiling", "site", "builder"], descriptionDone=["compile", "site", "builder"] )) f.addStep(Compile( name="build-site",
def ros_docbuild(c, job_name, url, branch, rosdistro, machines, trigger_pkgs=None): # Directory which will be bind-mounted binddir = job_name + '_' + rosdistro + '_docbuild' f = BuildFactory() # Remove any old crud in /tmp folder f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success)) # Check out repository (to /tmp) f.addStep( Git(repourl=url, branch=branch, alwaysUseLatest=True, mode='full' #workdir = binddir+'/src/'+job_name+'/' )) # Download script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/docbuild.py', workerdest=Interpolate('%(prop:builddir)s/docbuild.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='scripts/unique_docker_doc.py', workerdest=Interpolate('%(prop:builddir)s/unique_docker_doc.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='docker_components/Dockerfile_doc', workerdest=Interpolate('%(prop:builddir)s/Dockerfile_doc'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/docker-compose-doc.yaml', workerdest=Interpolate( '%(prop:builddir)s/docker-compose-doc.yaml'), hideStepIf=success)) # reedit docker-compose-doc.yaml f.addStep( ShellCommand( haltOnFailure=True, name=job_name + '-reedit-docker-compose', command=[ 'python', 'unique_docker_doc.py', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), Interpolate(job_name) ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['reedit docker-compose', job_name])) # Build docker image for creating doc f.addStep( ShellCommand( # haltOnFailure = True, name=job_name + '-create_docker', command=[ 'docker-compose', '-f', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), 'build' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['create_doc', job_name])) # creating doc in docker f.addStep( ShellCommand( # haltOnFailure=True, name=job_name + '-create_doc', command=[ 'docker', 'run', # '-v', 'ros-repository-docker_deb_repository:/home/package', '--name', Interpolate('doc_' + job_name), Interpolate('scalable-doc:' + job_name), 'python', '/root/docbuild.py', '/tmp/', rosdistro ], descriptionDone=['create doc', job_name])) f.addStep( ShellCommand(name=job_name + '-copydocs', command=[ 'docker', 'cp', Interpolate('doc_' + job_name + ':' + '/tmp/docs'), '/docs' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['copydocs', job_name])) # rm container f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', Interpolate('doc_' + job_name)], descriptionDone=['remove docker container', job_name])) # rm image f.addStep( ShellCommand(name=job_name + '-rm_image', command=[ 'docker', 'image', 'rm', Interpolate('scalable-doc:' + job_name) ], descriptionDone=['remove docker image', job_name])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-doctrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable(name=job_name.replace('_', '-') + '-' + rosdistro + '-doctrigger', builderNames=[ job_name + '_' + rosdistro + '_docbuild', ])) # Add builder config c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_docbuild', workernames=machines, factory=f)) # return the name of the job created return job_name + '_' + rosdistro + '_docbuild'
name='download customization configuration', haltOnFailure=True, command=[ 'wget', util.Interpolate( 'http://192.168.250.160:6543/build_order/%(prop:build_order_id)s/configuration' ), '-O', util.Interpolate('%(prop:workdir)s\\customization_config.zip') ], workdir=util.Interpolate('%(prop:workdir)s')) # 1. check out the source ngq_get_src_bld_step = Git(name='checkout ngq', repourl=ngq_repourl, branch=ngq_branch_for_everyday, mode='full', method='clobber', submodules=True, workdir='ngq_src', timeout=1800) ngq_get_src_rel_step = Git( name='checkout ngq', repourl=ngq_repourl, branch=util.Interpolate('%(prop:release_ngq_branch)s'), mode='full', method='clobber', submodules=True, workdir='ngq_src', timeout=1800) installer_get_step = Git(name='checkout installer', repourl=installer_repourl, branch='4ngq_autobuild',
def __init__(self, **kwargs): Git.__init__(self, **kwargs)
def startVC(self, branch, revision, patch): if not isSafeBranch(branch): raise ValueError("No building on pull requests.") return Git.startVC(self, branch, revision, patch)