def start(self): result = self.build.getProperties().render(self.myWaitForFinish) self.waitForFinish = (str(result) == "True") Trigger.start(self) self.waitForFinish = self.myWaitForFinish
def __init__(self, schedulers_and_properties, **kwargs): self.schedulers_and_properties = schedulers_and_properties Trigger.__init__(self, schedulerNames=["dummy"], updateSourceStamp=False, waitForFinish=True, **kwargs)
def __init__(self, scheduler, **kwargs): if "name" not in kwargs: kwargs['name'] = 'trigger' self.config = None Trigger.__init__(self, waitForFinish=True, schedulerNames=[scheduler], **kwargs)
def __init__(self, partitionFunction, **kwargs): if not partitionFunction: config.error( "You must specify a parition function for the partition trigger" ) self.partitionFunction = partitionFunction Trigger.__init__(self, **kwargs)
def start(self): # Add a log linking to the triggered builders, if supplied. if self.triggeredBuilders: logText = StringIO.StringIO() for builder in self.triggeredBuilders: print >>logText, ('<b><a href="../../../../../../%s">%s' '</a></b><br>' % (builder, builder)) self.addHTMLLog('triggered builds', str(logText.getvalue())) # Dispatch to the super class. Trigger.start(self)
def trigger(self, job_or_jobs): """Adds a build step which triggers execution of another job.""" if type(job_or_jobs) is list: self.add_step(Trigger( schedulerNames=[scheduler_name(j, 'trigger') for j in job_or_jobs], waitForFinish=True)) else: self.add_step(Trigger( schedulerNames=[scheduler_name(job_or_jobs, 'trigger')], waitForFinish=True))
def test_get_last_step_build_requests_return_brids(): step = steps.ReduceTriggerProperties(reducefn=lambda: None) trigger = Trigger(waitForFinish=True, schedulerNames=["NA"]) trigger.brids = object() class FakeBuild: executedSteps = [trigger, step] step.build = FakeBuild() assert step.get_last_step_build_requests() is trigger.brids
def test_get_last_step_build_requests_return_brids(): step = steps.ReduceTriggerProperties(reducefn=lambda:None) trigger = Trigger(waitForFinish=True, schedulerNames=["NA"]) trigger.brids = object() class FakeBuild: executedSteps = [trigger, step] step.build = FakeBuild() assert step.get_last_step_build_requests() is trigger.brids
def __init__(self, scheduler, **kwargs): if "name" not in kwargs: kwargs['name'] = 'trigger' self.config = None Trigger.__init__( self, waitForFinish=True, schedulerNames=[scheduler], haltOnFailure=True, flunkOnFailure=True, sourceStamps=[], alwaysUseLatest=False, updateSourceStamp=False, **kwargs)
def __init__(self, scheduler, **kwargs): if "name" not in kwargs: kwargs['name'] = 'trigger' self.config = None Trigger.__init__(self, waitForFinish=True, schedulerNames=[scheduler], haltOnFailure=True, flunkOnFailure=True, sourceStamps=[], alwaysUseLatest=False, updateSourceStamp=False, **kwargs)
def __init__(self, scheduler: str, jobs: list[dict[str, str]], **kwargs): if "name" not in kwargs: kwargs["name"] = "trigger" self.jobs = jobs self.config = None Trigger.__init__( self, waitForFinish=True, schedulerNames=[scheduler], haltOnFailure=True, flunkOnFailure=True, sourceStamps=[], alwaysUseLatest=False, updateSourceStamp=False, **kwargs, )
def make_dolphin_debian_build(mode="normal"): f = BuildFactory() mode = mode.split(",") debug = "debug" in mode pr = "pr" in mode fifoci_golden = "fifoci_golden" in mode f.addStep( GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git", progress=True, mode="incremental")) f.addStep( ShellCommand(command=["mkdir", "-p", "build"], logEnviron=False, description="mkbuilddir", descriptionDone="mkbuilddir")) cmake_cmd = ["cmake", "..", "-GNinja"] if debug: cmake_cmd.append("-DFASTLOG=ON") cmake_cmd.append("-DDISTRIBUTOR=dolphin-emu.org") f.addStep( ShellCommand(command=cmake_cmd, workdir="build/build", description="configuring", descriptionDone="configure", haltOnFailure=True)) f.addStep( Compile(command=["ninja"], workdir="build/build", description="building", descriptionDone="build", haltOnFailure=True)) f.addStep( Test(command=["ninja", "unittests"], workdir="build/build", description="testing", descriptionDone="test", haltOnFailure=True)) if fifoci_golden: if pr: f.addStep( Trigger(schedulerNames=["pr-fifoci-lin"], copy_properties=[ "pr_id", "repo", "headrev", "branchname", "shortrev" ])) else: f.addStep( TriggerIfBranch(schedulerNames=["fifoci-lin"], branchList=["master"], copy_properties=["shortrev"])) return f
def build_triggerer(c, distro, arch, rosdistro, machines, ordered_repos): f = BuildFactory() for repos in ordered_repos: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in repos ], waitForFinish=True, alwaysRun=True)) # Add to builders c['builders'].append( BuilderConfig(name='build_triggerer' + '_' + rosdistro + '_' + distro + '_' + arch, slavenames=machines, factory=f)) return 'build_triggerer' + '_' + rosdistro + '_' + distro + '_' + arch
def start(self): sss = self.build.getAllSourceStamps() is_master = False for ss in sss: if not ss.changes: continue for ch in ss.changes: if ch.properties.getProperty("branchname", None) in self.branchList: is_master = True if is_master: return Trigger.start(self) else: self.running = True self.step_status.setText(["(no branch match)"]) self.end(SUCCESS) return
def _add_step_sequential_group(self, step): """ Run all builders from group one after another. """ set_properties = step.get('set_properties', {}) copy_properties = step.get('copy_properties', []) self._update_github_status(step, set_properties) target_group = step['target'] for target in self._project.getGroupMembersBuilderNames(target_group): step = Trigger( schedulerNames=[target], waitForFinish=True, updateSourceStamp=True, set_properties=set_properties, copy_properties=copy_properties, ) self.addStep(step)
def _add_step_parallel_group(self, step): """ Run all builders from group in parallel. """ set_properties = step.get('set_properties', {}) copy_properties = step.get('copy_properties', []) self._update_github_status(step, set_properties) target_group = step['target'] targets = self._project.getGroupMembersBuilderNames(target_group) self.addStep( Trigger( schedulerNames=targets, waitForFinish=True, updateSourceStamp=True, set_properties=set_properties, copy_properties=copy_properties, haltOnFailure=True, flunkOnFailure=True, ))
def __init__(self, target_builder_names, steps): super(ParallelFactory, self).__init__() copy_properties = ['test'] for step in steps: name = step.get('name', None) if not name: continue optional = step.get('optional', False) if optional: copy_properties.append('force_' + name) self.addStep( Trigger( schedulerNames=target_builder_names, waitForFinish=True, updateSourceStamp=True, set_properties={}, copy_properties=copy_properties, haltOnFailure=True, flunkOnFailure=True, ))
def launchpad_debbuild(c, package, version, binaries, url, distro, arch, machines, othermirror, keys, trigger_names = None): f = BuildFactory() # Grab the source package f.addStep( ShellCommand( haltOnFailure = True, name = package+'-getsourcedeb', command = ['dget', '--allow-unauthenticated', url] ) ) # download hooks f.addStep( FileDownload( name = package+'-grab-hooks', mastersrc = 'hooks/D05deps', slavedest = Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf = success, mode = 0777 # make this executable for the cowbuilder ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Build it f.addStep( ShellCommand( haltOnFailure = True, name = package+'-build', command = ['cowbuilder', '--build', package+'_'+version+'.dsc', '--distribution', distro, '--architecture', arch, '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow', '--buildresult', Interpolate('%(prop:workdir)s'), '--hookdir', Interpolate('%(prop:workdir)s/hooks'), '--othermirror', othermirror, '--override-config'], env = {'DIST': distro}, descriptionDone = ['built binary debs', ] ) ) # Upload debs for deb_arch in binaries.keys(): for deb_name in binaries[deb_arch]: debian_pkg = deb_name+'_'+version+'_'+deb_arch+'.deb' f.addStep( FileUpload( name = deb_name+'-upload', slavesrc = Interpolate('%(prop:workdir)s/'+debian_pkg), masterdest = Interpolate('binarydebs/'+debian_pkg), hideStepIf = success ) ) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand( name = deb_name+'-include', command = ['reprepro-include.bash', deb_name, Interpolate(debian_pkg), distro, deb_arch], descriptionDone = ['updated in apt', debian_pkg] ) ) # Trigger if needed if trigger_names != None: f.addStep( Trigger(schedulerNames = trigger_names, waitForFinish = False) ) # Add to builders c['builders'].append( BuilderConfig( name = package+'_'+distro+'_'+arch+'_debbuild', slavenames = machines, factory = f ) ) # return name of builder created return package+'_'+distro+'_'+arch+'_debbuild'
def ros_docbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs = None): # Directory which will be bind-mounted binddir = '/tmp/'+job_name+'_'+rosdistro+'_docbuild' f = BuildFactory() # Remove any old crud in /tmp folder f.addStep( ShellCommand( command = ['rm', '-rf', binddir], hideStepIf = success ) ) # Check out repository (to /tmp) f.addStep( Git( repourl = url, branch = branch, alwaysUseLatest = True, mode = 'full', workdir = binddir+'/src/'+job_name+'/' ) ) # Download testbuild.py script from master f.addStep( FileDownload( name = job_name+'-grab-script', mastersrc = 'scripts/docbuild.py', slavedest = Interpolate('%(prop:workdir)s/docbuild.py'), hideStepIf = success ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Build docs in a cowbuilder f.addStep( ShellCommand( haltOnFailure = True, name = job_name+'-docbuild', command = ['cowbuilder', '--execute', Interpolate('%(prop:workdir)s/docbuild.py'), '--distribution', distro, '--architecture', arch, '--bindmounts', binddir, '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow', '--override-config', '--othermirror', othermirror, '--', binddir, rosdistro], env = {'DIST': distro}, descriptionDone = ['built docs', ] ) ) # Upload docs to master f.addStep( DirectoryUpload( name = job_name+'-upload', slavesrc = binddir+'/docs', masterdest = 'docs/' + rosdistro, hideStepIf = success ) ) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger( schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-doctrigger' for t in trigger_pkgs], waitForFinish = False, alwaysRun=True ) ) # Create trigger c['schedulers'].append( triggerable.Triggerable( name = job_name.replace('_','-')+'-'+rosdistro+'-doctrigger', builderNames = [job_name+'_'+rosdistro+'_docbuild',] ) ) # Add builder config c['builders'].append( BuilderConfig( name = job_name+'_'+rosdistro+'_docbuild', slavenames = machines, factory = f ) ) # return the name of the job created return job_name+'_'+rosdistro+'_docbuild'
descriptionDone = ['updated in apt', package] ) ) f.addStep( ShellCommand( name = package+'-clean', command = ['rm', '-rf', 'debian/'+debian_pkg], hideStepIf = success ) ) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger( schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-'+distro+'-'+arch+'-debtrigger' for t in trigger_pkgs], waitForFinish = False, alwaysRun=True ) ) # Create trigger c['schedulers'].append( triggerable.Triggerable( name = job_name.replace('_','-')+'-'+rosdistro+'-'+distro+'-'+arch+'-debtrigger', builderNames = [job_name+'_'+rosdistro+'_'+distro+'_'+arch+'_debbuild',] ) ) # Add to builders c['builders'].append( BuilderConfig( name = job_name+'_'+rosdistro+'_'+distro+'_'+arch+'_debbuild', properties = {'release_version' : version},
def createTarballFactory(gerrit_repo): """ Generates a build factory for a tarball generating builder. Returns: BuildFactory: Build factory with steps for generating tarballs. """ bf = util.BuildFactory() # are we building a tag or a patchset? bf.addStep(SetProperty( property='category', value=buildCategory, hideStepIf=hide_except_error)) # update dependencies bf.addStep(ShellCommand( command=dependencyCommand, decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED }, haltOnFailure=True, logEnviron=False, doStepIf=do_step_installdeps, hideStepIf=hide_if_skipped, description=["installing dependencies"], descriptionDone=["installed dependencies"])) # Pull the patch from Gerrit bf.addStep(Gerrit( repourl=gerrit_repo, workdir="build/lustre", mode="full", method="fresh", retry=[60,60], timeout=3600, logEnviron=False, getDescription=True, haltOnFailure=True, description=["cloning"], descriptionDone=["cloned"])) # make tarball bf.addStep(ShellCommand( command=['sh', './autogen.sh'], haltOnFailure=True, description=["autogen"], descriptionDone=["autogen"], workdir="build/lustre")) bf.addStep(Configure( command=['./configure', '--enable-dist'], workdir="build/lustre")) bf.addStep(ShellCommand( command=['make', 'dist'], haltOnFailure=True, description=["making dist"], descriptionDone=["make dist"], workdir="build/lustre")) # upload it to the master bf.addStep(SetPropertyFromCommand( command=['sh', '-c', 'echo *.tar.gz'], property='tarball', workdir="build/lustre", hideStepIf=hide_except_error, haltOnFailure=True)) bf.addStep(FileUpload( workdir="build/lustre", slavesrc=util.Interpolate("%(prop:tarball)s"), masterdest=tarballMasterDest, url=tarballUrl)) # trigger our builders to generate packages bf.addStep(Trigger( schedulerNames=["package-builders"], copy_properties=['tarball', 'category'], waitForFinish=False)) return bf
def getGlobalBuilders(self): ret = list() f = factory.BuildFactory() f.useProgress = False f.addStep( Git( mode="incremental", workdir=".", repourl=self.giturl, branch=self.branch, locks=[self.lock_src.access("exclusive")], )) if len(self.PATCHES): f.addStep( steps.Patch( patches=self.PATCHES, workdir=".", locks=[self.lock_src.access("exclusive")], )) if self.nightly is not None: # Trigger nightly scheduler to let it know the source stamp f.addStep( Trigger(name="Updating source stamp", hideStepIf=(lambda r, s: r == results.SUCCESS), schedulerNames=["nightly-{0}".format(self.name)])) f.addStep( Trigger(name="Building all platforms", schedulerNames=[self.name], copy_properties=['got_revision', 'clean', 'package'], updateSourceStamp=True, waitForFinish=True)) ret.append( BuilderConfig( name="fetch-{0}".format(self.name), # This is specific workername='fetcher', workerbuilddir="/data/src/{0}".format(self.name), factory=f, tags=["fetch"], )) if self.nightly is not None: f = factory.BuildFactory() f.addStep( Trigger(schedulerNames=[self.name], copy_properties=['got_revision'], updateSourceStamp=True, waitForFinish=True, set_properties={ 'clean': True, 'package': True })) ret.append( BuilderConfig( name="nightly-{0}".format(self.name), # TODO: Fix this workername='fetcher', workerbuilddir="/data/triggers/nightly-{0}".format( self.name), factory=f, tags=["nightly"], locks=[self.lock_src.access("counting")])) return ret
class FakeBuild: executedSteps = [ Trigger(waitForFinish=False, schedulerNames=["NA"]), step ]
def run(self): self.config = yield self.getStepConfig() rv = yield Trigger.run(self) defer.returnValue(rv)
def __init__(self, name, triggeredBuilders = [], **kwargs): Trigger.__init__(self, **kwargs) self.name = name self.triggeredBuilders = triggeredBuilders self.addFactoryArguments(name = name, triggeredBuilders = triggeredBuilders)
def __init__(self, waitForFinish=False, *args, **kwargs): Trigger.__init__(self, waitForFinish=waitForFinish, *args, **kwargs) self.myWaitForFinish = waitForFinish
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Build the source deb f.addStep( ShellCommand(haltOnFailure=True, name=package + '-buildsource', command=['git-buildpackage', '-S'] + gbp_args, descriptionDone=['sourcedeb', package])) # Upload sourcedeb to master (currently we are not actually syncing these with a public repo) f.addStep( FileUpload( name=package + '-uploadsource', slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'), masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'), hideStepIf=success)) # Stamp the changelog, in a similar fashion to the ROS buildfarm f.addStep( SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z", property="datestamp", name=package + '-getstamp', hideStepIf=success)) f.addStep( ShellCommand( haltOnFailure=True, name=package + '-stampdeb', command=[ 'git-dch', '-a', '--ignore-branch', '--verbose', '-N', Interpolate('%(prop:release_version)s-%(prop:datestamp)s' + distro) ], descriptionDone=[ 'stamped changelog', Interpolate('%(prop:release_version)s'), Interpolate('%(prop:datestamp)s') ])) # download hooks f.addStep( FileDownload( name=package + '-grab-hooks', mastersrc='hooks/D05deps', slavedest=Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf=success, mode=0777 # make this executable for the cowbuilder )) # build the binary from the git working copy f.addStep( ShellCommand( haltOnFailure=True, name=package + '-buildbinary', command=[ 'git-buildpackage', '--git-pbuilder', '--git-export=WC', Interpolate('--git-export-dir=%(prop:workdir)s') ] + gbp_args, env={ 'DIST': distro, 'GIT_PBUILDER_OPTIONS': Interpolate( '--hookdir %(prop:workdir)s/hooks --override-config'), 'OTHERMIRROR': othermirror }, descriptionDone=['binarydeb', package])) # Upload binarydeb to master f.addStep( FileUpload(name=package + '-uploadbinary', slavesrc=Interpolate('%(prop:workdir)s/' + final_name), masterdest=Interpolate('binarydebs/' + final_name), hideStepIf=success)) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand(name=package + 'includedeb', command=[ 'reprepro-include.bash', debian_pkg, Interpolate(final_name), distro, arch ], descriptionDone=['updated in apt', package])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', properties={'release_version': version}, slavenames=machines, factory=f)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
def __init__(self, scheduler, **kwargs): if "name" not in kwargs: kwargs['name'] = 'trigger' self.config = None Trigger.__init__( self, waitForFinish=True, schedulerNames=[scheduler], **kwargs)
builders.append(MyBuilderConfig( name = "System_Update", factory = system_update_factory )) # JoCoCo # TODO: IBM JDK, different Maven versions jacoco_its_factory = BuildFactory(steps = [ SVN(svnurl = "http://eclemma.svn.sourceforge.net/svnroot/eclemma/jacoco/trunk"), Maven(description = "jdk 1.5", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.5"), Maven(description = "jdk 1.6", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.6"), Maven(description = "jdk 1.7", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.7"), TreeSize(), Trigger(schedulerNames = ['JaCoCo_Deploy']) ]) # TODO: site at SourceForge jacoco_deploy_factory = BuildFactory(steps = [ SVN(svnurl = "http://eclemma.svn.sourceforge.net/svnroot/eclemma/jacoco/trunk"), Maven(description = "deploy", command = "mvn -V -e --file org.jacoco.build/pom.xml clean deploy -Djdk.version=1.5"), ShellCommand(command = "scp org.jacoco.doc/target/jacoco-*.zip mandrikov,[email protected]:/home/frs/project/e/ec/eclemma/07_JaCoCo/trunk"), TreeSize() ]) builders.append(MyBuilderConfig( name = "JaCoCo_ITs_Linux", factory = jacoco_its_factory ))
logEnviron=False) step_configure = Configure(command=["./configure"], logEnviron=False) step_configure_64 = Configure(command=["./configure", "--enable-64bit"], logEnviron=False) step_compile_all = Compile(command=["make", "clean", "all"], logEnviron=False) step_compile_txt = Compile(command=["make", "clean", "txt"], description="compiling txt", descriptionDone="compile txt", logEnviron=False) step_compile_sql = Compile(command=["make", "clean", "sql"], description="compiling sql", descriptionDone="compile sql", logEnviron=False) step_compile_VS10 = Compile( command=["devenv.com", "eAthena-10.sln", "/REBUILD"], logEnviron=False) step_trigger_tests = Trigger( waitForFinish=True, schedulerNames=["test-Ubuntu-12.04-x64-scheduler"]) step_test_txt = Test(command=[ "gdb", "map-server", "-ex=run --run-once", "-ex=bt full", "-ex=kill", "-ex=quit" ], warningPattern="\[(Error|Warning)\]", description="testing txt", descriptionDone="test txt", logEnviron=False) step_test_sql = Test(command=[ "gdb", "map-server_sql", "-ex=run --run-once", "-ex=bt full", "-ex=kill", "-ex=quit" ], warningPattern="\[(Error|Warning)\]", description="testing sql", descriptionDone="test sql",
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None, locks=[]): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:builddir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom debian/ros-groovy-foo_0.0.1_kinetic deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s' + distro + '_' + arch + '.deb' # final_name = debian_pkg+'_%(prop:release_version)s-%(prop:datestamp)s'+distro+'_'+arch+'.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Download script for building the source deb f.addStep( FileDownload(name=job_name + '-grab-docker-compose-debian', mastersrc='docker_components/docker-compose-deb.yaml', workerdest=Interpolate( '%(prop:builddir)s/docker-compose-deb.yaml'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-dockerfile-debian', mastersrc='docker_components/Dockerfile_deb', workerdest=Interpolate('%(prop:builddir)s/Dockerfile_deb'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-build-deb-shell', mastersrc='shell/builddebian.sh', workerdest=Interpolate('%(prop:builddir)s/builddebian.sh'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-rosdep-private', mastersrc='docker_components/rosdep_private.yaml', workerdest=Interpolate( '%(prop:builddir)s/rosdep_private.yaml'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-rosdep-private', mastersrc='scripts/unique_docker_deb.py', workerdest=Interpolate( '%(prop:builddir)s/unique_docker_deb.py'), mode=0o755, hideStepIf=success)) # reedit docker-compose-deb.yaml f.addStep( ShellCommand( haltOnFailure=True, name=package + '-reedit-docker-compose', command=[ 'python', 'unique_docker_deb.py', Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'), Interpolate(package) ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['reedit docker-compose', package])) # Build docker image for creating debian f.addStep( ShellCommand( #haltOnFailure = True, name=package + '-buildsource', command=[ 'docker-compose', '-f', Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'), 'build' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['sourcedeb', package])) # build debian package f.addStep( ShellCommand( #haltOnFailure=True, name=job_name + '-build', command=[ 'docker', 'run', '-v', 'ros-buildbot-docker_deb_repository:/home/package', '--name', Interpolate(package), Interpolate('scalable-deb:' + package), 'bash', '/usr/local/sbin/builddeb.sh' ], descriptionDone=['build debian package', job_name])) # update to local repository f.addStep( ShellCommand(name=job_name + '-upload', command=[ 'docker', 'exec', '-e', Interpolate('package=' + debian_pkg + '*'), 'local-repository', 'bash', '/tmp/debian-upload.sh' ], descriptionDone=['release package', job_name])) # rm container f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', Interpolate(package)], descriptionDone=['remove docker container', job_name])) # rm image f.addStep( ShellCommand(name=job_name + '-rm_image', command=[ 'docker', 'image', 'rm', Interpolate('scalable-deb:' + package) ], descriptionDone=['remove docker image', job_name])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', properties={'release_version': version}, workernames=machines, factory=f, locks=locks)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
def ros_docbuild(c, job_name, url, branch, rosdistro, machines, trigger_pkgs=None): # Directory which will be bind-mounted binddir = job_name + '_' + rosdistro + '_docbuild' f = BuildFactory() # Remove any old crud in /tmp folder f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success)) # Check out repository (to /tmp) f.addStep( Git(repourl=url, branch=branch, alwaysUseLatest=True, mode='full' #workdir = binddir+'/src/'+job_name+'/' )) # Download script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/docbuild.py', workerdest=Interpolate('%(prop:builddir)s/docbuild.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='scripts/unique_docker_doc.py', workerdest=Interpolate('%(prop:builddir)s/unique_docker_doc.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='docker_components/Dockerfile_doc', workerdest=Interpolate('%(prop:builddir)s/Dockerfile_doc'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/docker-compose-doc.yaml', workerdest=Interpolate( '%(prop:builddir)s/docker-compose-doc.yaml'), hideStepIf=success)) # reedit docker-compose-doc.yaml f.addStep( ShellCommand( haltOnFailure=True, name=job_name + '-reedit-docker-compose', command=[ 'python', 'unique_docker_doc.py', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), Interpolate(job_name) ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['reedit docker-compose', job_name])) # Build docker image for creating doc f.addStep( ShellCommand( # haltOnFailure = True, name=job_name + '-create_docker', command=[ 'docker-compose', '-f', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), 'build' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['create_doc', job_name])) # creating doc in docker f.addStep( ShellCommand( # haltOnFailure=True, name=job_name + '-create_doc', command=[ 'docker', 'run', # '-v', 'ros-repository-docker_deb_repository:/home/package', '--name', Interpolate('doc_' + job_name), Interpolate('scalable-doc:' + job_name), 'python', '/root/docbuild.py', '/tmp/', rosdistro ], descriptionDone=['create doc', job_name])) f.addStep( ShellCommand(name=job_name + '-copydocs', command=[ 'docker', 'cp', Interpolate('doc_' + job_name + ':' + '/tmp/docs'), '/docs' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['copydocs', job_name])) # rm container f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', Interpolate('doc_' + job_name)], descriptionDone=['remove docker container', job_name])) # rm image f.addStep( ShellCommand(name=job_name + '-rm_image', command=[ 'docker', 'image', 'rm', Interpolate('scalable-doc:' + job_name) ], descriptionDone=['remove docker image', job_name])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-doctrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable(name=job_name.replace('_', '-') + '-' + rosdistro + '-doctrigger', builderNames=[ job_name + '_' + rosdistro + '_docbuild', ])) # Add builder config c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_docbuild', workernames=machines, factory=f)) # return the name of the job created return job_name + '_' + rosdistro + '_docbuild'
# upload the tarball (to the build master) self.addStep( FileUpload(slavesrc=WithProperties("%(filename)s"), masterdest=WithProperties("%(filename)s"), mode=0644, haltOnFailure=True)) # tell the master to upload the file to sourceforge self.addStep( Trigger(schedulerNames=["sourceforge-upload"], waitForFinish=True, set_properties={ "masterdir": WithProperties("%(masterdir)s"), "target-os": WithProperties("%(target-os)s"), "filename": WithProperties("%(filename)s"), "destname": WithProperties("%(destname)s"), "datestamp": WithProperties("%(datestamp:-)s"), "path": WithProperties("%(path:-)s"), "is_nightly": WithProperties("%(is_nightly:-)s") })) def _step_AdditionalProperties(self): pass def _step_Archive(self): command = [ "tar", "cjf", WithProperties("../../%(filename)s"), "--owner", "0", "--group", "0", "--checkpoint", "--exclude=.svn", "." ]
def makeHomebrewRecipeCreationFactory(): """Create the Homebrew recipe from a source distribution. This is separate to the recipe testing, to allow it to be done on a non-Mac platform. Once complete, this triggers the Mac testing. """ factory = getFlockerFactory(python="python2.7") factory.addSteps(installDependencies()) factory.addSteps(check_version()) # Create suitable names for files hosted on Buildbot master. sdist_file = Interpolate('Flocker-%(prop:version)s.tar.gz') sdist_path = resultPath('python', discriminator=sdist_file) sdist_url = resultURL('python', discriminator=sdist_file, isAbsolute=True) recipe_file = Interpolate('Flocker%(kw:revision)s.rb', revision=flockerRevision) recipe_path = resultPath('homebrew', discriminator=recipe_file) recipe_url = resultURL('homebrew', discriminator=recipe_file) # Build source distribution factory.addStep( ShellCommand(name='build-sdist', description=["building", "sdist"], descriptionDone=["build", "sdist"], command=[ virtualenvBinary('python'), "setup.py", "sdist", ], haltOnFailure=True)) # Upload source distribution to master factory.addStep( FileUpload( name='upload-sdist', slavesrc=Interpolate('dist/Flocker-%(prop:version)s.tar.gz'), masterdest=sdist_path, url=sdist_url, )) # Build Homebrew recipe from source distribution URL factory.addStep( ShellCommand( name='make-homebrew-recipe', description=["building", "recipe"], descriptionDone=["build", "recipe"], command=[ virtualenvBinary('python'), "-m", "admin.homebrew", # We use the Git commit SHA for the version here, since # admin.homebrew doesn't handle the version generated by # arbitrary commits. "--flocker-version", flockerRevision, "--sdist", sdist_url, "--output-file", recipe_file ], haltOnFailure=True)) # Upload new .rb file to BuildBot master factory.addStep( FileUpload( name='upload-homebrew-recipe', slavesrc=recipe_file, masterdest=recipe_path, url=recipe_url, )) # Trigger the homebrew-test build factory.addStep( Trigger( name='trigger/created-homebrew', schedulerNames=['trigger/created-homebrew'], set_properties={ # lint_revision is the commit that was merged against, # if we merged forward, so have the triggered build # merge against it as well. 'merge_target': Property('lint_revision') }, updateSourceStamp=True, waitForFinish=False, )) return factory
def trigger(**kwargs): waitForFinish = kwargs.pop("waitForFinish", True) return Trigger(waitForFinish=waitForFinish, **kwargs)