def test_compare_different_name(self): codebase = self.defaultCodebase() self.assertNotEquals( triggerable.Triggerable( name="runtests-Dependencies-1", codebases=codebase, builderNames=["runtests"]), triggerable.Triggerable( name="runtests-Dependencies", codebases=codebase, builderNames=["runtests"]), )
def test_compare_different_builders(self): repository = self.defaultCodebase() self.assertNotEquals( triggerable.Triggerable( name="runtests-Dependencies", codebases=repository, builderNames=["runtests"]), triggerable.Triggerable( name="runtests-Dependencies", codebases=repository, builderNames=["runtests-1"]), )
def get_schedulers(): first_phase = phases[0] last_phase = phases[-1] # The VC scheduler initiates the first phase. # Each phase, in turn, triggers the next phase, # until the final phase. for phase in phases: my_filter = ChangeFilter(filter_fn=_tracked_projects) phase_name = 'phase%d' % phase['number'] if phase == first_phase: delay=120 else: delay=15 my_filter = ChangeFilter(category = phase_name) yield basic.AnyBranchScheduler( name = phase_name, treeStableTimer=delay, change_filter = my_filter, builderNames = ['phase%d - %s' % (phase['number'], phase['name'])], ) # Add triggers for initiating the builds in each phase. for phase in phases: # Split the phase builders into separate stages. split_stages = get_phase_stages(phase) for i, (normal, experimental) in enumerate(split_stages): # Add the normal trigger, if used. if normal: yield triggerable.Triggerable( name = 'phase%d-stage%d' % (phase['number'], i), builderNames = [b['name'] for b in normal]) # Add the experimental trigger, if used. if experimental: yield triggerable.Triggerable( name = 'phase%d-stage%d-experimental' % (phase['number'], i), builderNames = [b['name'] for b in experimental]) # Add a final trigger to trigger the validated build scheduler. phase_name = 'GoodBuild' my_filter = ChangeFilter(category = phase_name) yield basic.AnyBranchScheduler( name = phase_name, treeStableTimer=5, builderNames = ['Validated Build',], change_filter = my_filter, )
def makeScheduler(self, **kwargs): sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b'], **kwargs), self.OBJECTID) sched._updateWaiters._reactor = self.clock return sched
def makeScheduler(self, overrideBuildsetMethods=False, **kwargs): self.master.db.insertTestData([fakedb.Builder(id=77, name='b')]) sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b'], **kwargs), self.OBJECTID, self.SCHEDULERID, overrideBuildsetMethods=overrideBuildsetMethods) return sched
def makeScheduler(self, **kwargs): self.master.db.insertTestData([fakedb.Builder(id=77, name='b')]) sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b'], **kwargs), self.OBJECTID) sched._updateWaiters._reactor = self.clock return sched
def test_compare_different_codebases(self): codebase = self.defaultCodebase() codebase_buildbot = {'buildbot': { 'repository': 'git://github.com/buildbot/buildbot.git', 'display_repository': 'git://github.com/buildbot/buildbot.git', 'display_name': 'buildbot', 'branch': 'master', 'project': 'general' } } self.assertNotEquals( triggerable.Triggerable( name="runtests-Dependencies", codebases=codebase, builderNames=["runtests"]), triggerable.Triggerable( name="runtests-Dependencies", codebases=codebase_buildbot, builderNames=["runtests"]), )
def getXmippSchedulers(groupId): xmippSchedulerNames = [XMIPP_TESTS + groupId] if groupId == PROD_GROUP_ID: xmippSchedulerNames += [XMIPP_INSTALL_PREFIX + groupId] if groupId == SDEVEL_GROUP_ID: xmippSchedulerNames.append(XMIPP_BUNDLE_TESTS + groupId) xmippSchedulerNames.append(XMIPP_DOCS_PREFIX + groupId) schedulers = [] for name in xmippSchedulerNames: schedulers.append( triggerable.Triggerable(name=name, builderNames=[name])) forceScheduler = '%s%s' % (FORCE_BUILDER_PREFIX, name) schedulers.append( ForceScheduler(name=forceScheduler, builderNames=[name])) return schedulers
spec_list["local_repo_path"], 's3://{s3_bucket}'.format( s3_bucket=spec_list["s3_bucket"]) ])) # Trigger if needed # if trigger_pkgs != None: # f.addStep( # Trigger( # schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-'+distro+'-'+arch+'-debtrigger' for t in trigger_pkgs], # waitForFinish = False, # alwaysRun=True # ) # ) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', slavenames=machines, factory=f)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
def ros_docbuild(c, job_name, url, branch, rosdistro, machines, trigger_pkgs=None): # Directory which will be bind-mounted binddir = job_name + '_' + rosdistro + '_docbuild' f = BuildFactory() # Remove any old crud in /tmp folder f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success)) # Check out repository (to /tmp) f.addStep( Git(repourl=url, branch=branch, alwaysUseLatest=True, mode='full' #workdir = binddir+'/src/'+job_name+'/' )) # Download script from master f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='scripts/docbuild.py', workerdest=Interpolate('%(prop:builddir)s/docbuild.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='scripts/unique_docker_doc.py', workerdest=Interpolate('%(prop:builddir)s/unique_docker_doc.py'), hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-script', mastersrc='docker_components/Dockerfile_doc', workerdest=Interpolate('%(prop:builddir)s/Dockerfile_doc'), hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-script', mastersrc='docker_components/docker-compose-doc.yaml', workerdest=Interpolate( '%(prop:builddir)s/docker-compose-doc.yaml'), hideStepIf=success)) # reedit docker-compose-doc.yaml f.addStep( ShellCommand( haltOnFailure=True, name=job_name + '-reedit-docker-compose', command=[ 'python', 'unique_docker_doc.py', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), Interpolate(job_name) ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['reedit docker-compose', job_name])) # Build docker image for creating doc f.addStep( ShellCommand( # haltOnFailure = True, name=job_name + '-create_docker', command=[ 'docker-compose', '-f', Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'), 'build' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['create_doc', job_name])) # creating doc in docker f.addStep( ShellCommand( # haltOnFailure=True, name=job_name + '-create_doc', command=[ 'docker', 'run', # '-v', 'ros-repository-docker_deb_repository:/home/package', '--name', Interpolate('doc_' + job_name), Interpolate('scalable-doc:' + job_name), 'python', '/root/docbuild.py', '/tmp/', rosdistro ], descriptionDone=['create doc', job_name])) f.addStep( ShellCommand(name=job_name + '-copydocs', command=[ 'docker', 'cp', Interpolate('doc_' + job_name + ':' + '/tmp/docs'), '/docs' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['copydocs', job_name])) # rm container f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', Interpolate('doc_' + job_name)], descriptionDone=['remove docker container', job_name])) # rm image f.addStep( ShellCommand(name=job_name + '-rm_image', command=[ 'docker', 'image', 'rm', Interpolate('scalable-doc:' + job_name) ], descriptionDone=['remove docker image', job_name])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-doctrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable(name=job_name.replace('_', '-') + '-' + rosdistro + '-doctrigger', builderNames=[ job_name + '_' + rosdistro + '_docbuild', ])) # Add builder config c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_docbuild', workernames=machines, factory=f)) # return the name of the job created return job_name + '_' + rosdistro + '_docbuild'
def addSimpleProject(self, name, category, repourl, builderconfigs): """Private. Add a project which builds when the source changes or when Force is clicked. """ # FACTORIES # FIXME: get list of steps from buildshim here #factory = BuildFactory() # check out the source # This fails with git-1.8 and up unless you specify the branch, so do this down lower where we now the branch #factory.addStep(Git(repourl=repourl, mode='full', method='copy')) # for step in ["patch", "install_deps", "configure", "compile", "check", "package", "upload", "uninstall_deps"]: # factory.addStep(ShellCommand(command=["../../srclink/" + name + "/buildshim", step], description=step)) # BUILDERS AND SCHEDULERS # For each builder in config file, see what OS they want to # run on, and assign them to suitable slaves. # Also create a force scheduler that knows about all the builders. branchnames = [] buildernames = [] for builderconfig in builderconfigs: bparams = '' if "params" in builderconfig: bparams = builderconfig["params"].encode('ascii', 'ignore') bsuffix = '' if "suffix" in builderconfig: bsuffix = builderconfig["suffix"].encode('ascii', 'ignore') sbranch = builderconfig["branch"].encode('ascii', 'ignore') if sbranch not in branchnames: branchnames.append(sbranch) sosses = builderconfig["os"].encode('ascii', 'ignore').split('>') sosses.reverse() # The first OS in the list triggers when there's a source change sos = sosses.pop() buildername = name + '-' + sos + '-' + sbranch + bsuffix factory = self.addSimpleBuilder(name, buildername, category, repourl, builderconfig, sos, sbranch, bparams) self['schedulers'].append( SingleBranchScheduler( name=buildername, change_filter=filter.ChangeFilter(branch=sbranch, repository=repourl), treeStableTimer=1 * 60, # Set this just high enough so you don't swamp the slaves, or to None if you don't want changes batched builderNames=[buildername])) buildernames.append(buildername) # The rest of the OSes in the list, if any, are triggered when the previous OS in the list finishes while len(sosses) > 0: prev_factory = factory sos = sosses.pop() buildername = name + '-' + sos + '-' + sbranch + bsuffix factory = self.addSimpleBuilder(name, buildername, category, repourl, builderconfig, sos, sbranch, bparams) self['schedulers'].append( triggerable.Triggerable(name=buildername, builderNames=[buildername])) prev_factory.addStep( trigger.Trigger(schedulerNames=[buildername], waitForFinish=False)) self['schedulers'].append( ForceScheduler( name=name + "-force", builderNames=buildernames, branch=FixedParameter(name="branch", default=""), revision=FixedParameter(name="revision", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default=""), properties=[], )) # CHANGESOURCES # It's a git git git git git world already = False for cs in self['change_source']: if cs.repourl == repourl: log.msg( "There's already a changesource for %s. Hope it has the branch you wanted." % cs.repourl) already = True if not already: self['change_source'].append( # Fuzz the interval to avoid slamming the git server and hitting the MaxStartups or MaxSessions limits # If you hit them, twistd.log will have lots of "ssh_exchange_identification: Connection closed by remote host" errors # See http://trac.buildbot.net/ticket/2480 GitPoller(repourl, branches=branchnames, workdir='gitpoller-workdir-' + name, pollinterval=60 + random.uniform(-10, 10)))
def makeScheduler(self, **kwargs): sched = self.attachScheduler( triggerable.Triggerable(name='n', builderNames=['b']), self.SCHEDULERID) return sched
def ros_docbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs = None): # Directory which will be bind-mounted binddir = '/tmp/'+job_name+'_'+rosdistro+'_docbuild' f = BuildFactory() # Remove any old crud in /tmp folder f.addStep( ShellCommand( command = ['rm', '-rf', binddir], hideStepIf = success ) ) # Check out repository (to /tmp) f.addStep( Git( repourl = url, branch = branch, alwaysUseLatest = True, mode = 'full', workdir = binddir+'/src/'+job_name+'/' ) ) # Download testbuild.py script from master f.addStep( FileDownload( name = job_name+'-grab-script', mastersrc = 'scripts/docbuild.py', slavedest = Interpolate('%(prop:workdir)s/docbuild.py'), hideStepIf = success ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Build docs in a cowbuilder f.addStep( ShellCommand( haltOnFailure = True, name = job_name+'-docbuild', command = ['cowbuilder', '--execute', Interpolate('%(prop:workdir)s/docbuild.py'), '--distribution', distro, '--architecture', arch, '--bindmounts', binddir, '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow', '--override-config', '--othermirror', othermirror, '--', binddir, rosdistro], env = {'DIST': distro}, descriptionDone = ['built docs', ] ) ) # Upload docs to master f.addStep( DirectoryUpload( name = job_name+'-upload', slavesrc = binddir+'/docs', masterdest = 'docs/' + rosdistro, hideStepIf = success ) ) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger( schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-doctrigger' for t in trigger_pkgs], waitForFinish = False, alwaysRun=True ) ) # Create trigger c['schedulers'].append( triggerable.Triggerable( name = job_name.replace('_','-')+'-'+rosdistro+'-doctrigger', builderNames = [job_name+'_'+rosdistro+'_docbuild',] ) ) # Add builder config c['builders'].append( BuilderConfig( name = job_name+'_'+rosdistro+'_docbuild', slavenames = machines, factory = f ) ) # return the name of the job created return job_name+'_'+rosdistro+'_docbuild'
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None, locks=[]): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:builddir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom debian/ros-groovy-foo_0.0.1_kinetic deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s' + distro + '_' + arch + '.deb' # final_name = debian_pkg+'_%(prop:release_version)s-%(prop:datestamp)s'+distro+'_'+arch+'.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Download script for building the source deb f.addStep( FileDownload(name=job_name + '-grab-docker-compose-debian', mastersrc='docker_components/docker-compose-deb.yaml', workerdest=Interpolate( '%(prop:builddir)s/docker-compose-deb.yaml'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-dockerfile-debian', mastersrc='docker_components/Dockerfile_deb', workerdest=Interpolate('%(prop:builddir)s/Dockerfile_deb'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload( name=job_name + '-grab-build-deb-shell', mastersrc='shell/builddebian.sh', workerdest=Interpolate('%(prop:builddir)s/builddebian.sh'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-rosdep-private', mastersrc='docker_components/rosdep_private.yaml', workerdest=Interpolate( '%(prop:builddir)s/rosdep_private.yaml'), mode=0o755, hideStepIf=success)) f.addStep( FileDownload(name=job_name + '-grab-rosdep-private', mastersrc='scripts/unique_docker_deb.py', workerdest=Interpolate( '%(prop:builddir)s/unique_docker_deb.py'), mode=0o755, hideStepIf=success)) # reedit docker-compose-deb.yaml f.addStep( ShellCommand( haltOnFailure=True, name=package + '-reedit-docker-compose', command=[ 'python', 'unique_docker_deb.py', Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'), Interpolate(package) ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['reedit docker-compose', package])) # Build docker image for creating debian f.addStep( ShellCommand( #haltOnFailure = True, name=package + '-buildsource', command=[ 'docker-compose', '-f', Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'), 'build' ], workdir=Interpolate('%(prop:builddir)s'), descriptionDone=['sourcedeb', package])) # build debian package f.addStep( ShellCommand( #haltOnFailure=True, name=job_name + '-build', command=[ 'docker', 'run', '-v', 'ros-buildbot-docker_deb_repository:/home/package', '--name', Interpolate(package), Interpolate('scalable-deb:' + package), 'bash', '/usr/local/sbin/builddeb.sh' ], descriptionDone=['build debian package', job_name])) # update to local repository f.addStep( ShellCommand(name=job_name + '-upload', command=[ 'docker', 'exec', '-e', Interpolate('package=' + debian_pkg + '*'), 'local-repository', 'bash', '/tmp/debian-upload.sh' ], descriptionDone=['release package', job_name])) # rm container f.addStep( ShellCommand(name=job_name + '-rm_container', command=['docker', 'rm', Interpolate(package)], descriptionDone=['remove docker container', job_name])) # rm image f.addStep( ShellCommand(name=job_name + '-rm_image', command=[ 'docker', 'image', 'rm', Interpolate('scalable-deb:' + package) ], descriptionDone=['remove docker image', job_name])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', properties={'release_version': version}, workernames=machines, factory=f, locks=locks)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
schedulers = [] nightly = timed.Nightly(name="Nightly", branch=None, builderNames=["System_Update"], minute=0, hour=1) schedulers.append(nightly) weekly = timed.Nightly( name="Weekly", branch=None, minute=0, hour=2, dayOfWeek=7, # 0 = Monday, 7 = Sunday builderNames=["JaCoCo_ITs_Linux"]) schedulers.append(weekly) schedulers.append( basic.SingleBranchScheduler( name="JaCoCo_Checkin", branch=None, treeStableTimer=1, builderNames=[] # TODO )) schedulers.append( triggerable.Triggerable(name="JaCoCo_Deploy", builderNames=["JaCoCo_Deploy"]))
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Build the source deb f.addStep( ShellCommand(haltOnFailure=True, name=package + '-buildsource', command=['git-buildpackage', '-S'] + gbp_args, descriptionDone=['sourcedeb', package])) # Upload sourcedeb to master (currently we are not actually syncing these with a public repo) f.addStep( FileUpload( name=package + '-uploadsource', slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'), masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'), hideStepIf=success)) # Stamp the changelog, in a similar fashion to the ROS buildfarm f.addStep( SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z", property="datestamp", name=package + '-getstamp', hideStepIf=success)) f.addStep( ShellCommand( haltOnFailure=True, name=package + '-stampdeb', command=[ 'git-dch', '-a', '--ignore-branch', '--verbose', '-N', Interpolate('%(prop:release_version)s-%(prop:datestamp)s' + distro) ], descriptionDone=[ 'stamped changelog', Interpolate('%(prop:release_version)s'), Interpolate('%(prop:datestamp)s') ])) # download hooks f.addStep( FileDownload( name=package + '-grab-hooks', mastersrc='hooks/D05deps', slavedest=Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf=success, mode=0777 # make this executable for the cowbuilder )) # build the binary from the git working copy f.addStep( ShellCommand( haltOnFailure=True, name=package + '-buildbinary', command=[ 'git-buildpackage', '--git-pbuilder', '--git-export=WC', Interpolate('--git-export-dir=%(prop:workdir)s') ] + gbp_args, env={ 'DIST': distro, 'GIT_PBUILDER_OPTIONS': Interpolate( '--hookdir %(prop:workdir)s/hooks --override-config'), 'OTHERMIRROR': othermirror }, descriptionDone=['binarydeb', package])) # Upload binarydeb to master f.addStep( FileUpload(name=package + '-uploadbinary', slavesrc=Interpolate('%(prop:workdir)s/' + final_name), masterdest=Interpolate('binarydebs/' + final_name), hideStepIf=success)) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand(name=package + 'includedeb', command=[ 'reprepro-include.bash', debian_pkg, Interpolate(final_name), distro, arch ], descriptionDone=['updated in apt', package])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', properties={'release_version': version}, slavenames=machines, factory=f)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'