def setProperty(self, propname, value, source): """Overridden to set got revision for codebase, if applicable""" if self.codebase != '': property_dict = self.getProperty(propname, {}) property_dict[self.codebase] = value return SetPropertyFromCommand.setProperty(self, propname, property_dict, source) return SetPropertyFromCommand.setProperty(self, propname, value, source)
def getText(self, cmd, results): if 'version' in self.property_changes: return [ "%s version %s" % (self.package, self.property_changes['version']) ] else: SetPropertyFromCommand.getText(self, cmd, results)
def __init__(self, python, package, **kw): SetPropertyFromCommand.__init__( self, name="check-%s-version" % package, description="checking %s version" % package, command=[python, '-c', self.src % dict(package=package)], extract_fn=self._extractVersion, **kw) self.package = package
def __init__(self, python, package, **kw): SetPropertyFromCommand.__init__( self, name="check-%s-version" % package, description="checking %s version" % package, env={"PYTHONPATH": "src;src"}, command=[python, '-c', self.src % dict(package=package)], extract_fn=self._extractVersion, **kw) self.package = package
def __init__(self, builder, **kwargs): def extractor(rc, stdout, stderr): if rc == 0: for fname in self.getCandidates(): if fname in stdout: print 'ABI: found', fname return {'abi_base_file':'/opt/build-worker/abi/%s' % fname} if isBranch34(builder): print 'ABI: fallback to 3.4.6' return {'abi_base_file':'/opt/build-worker/abi/dump-3.4.6.abi.tar.gz'} else: print 'ABI: fallback to 4.1.0' return {'abi_base_file':'/opt/build-worker/abi/dump-4.1.0.abi.tar.gz'} cmd = builder.envCmd + 'ls -1 /opt/build-worker/abi/*.abi.tar.gz' SetPropertyFromCommand.__init__(self, workdir='build', command=cmd, extract_fn=extractor, **kwargs)
def steps_odoo_port_reservation(configurator, options, environ=()): """Return steps for port reservation. The chosen port is stored in ``openerp_port`` property. Available manifest file options: :odoo.http-port-min: minimal value for the HTTP port (defaults to 6069) :odoo.http-port-max: maximal value for the HTTP port (defaults to 7069) :odoo.http-port-step: increment value for the HTTP port (defaults to 5) """ return ( FileDownload( mastersrc=os.path.join(BUILD_UTILS_PATH, 'port_reserve.py'), slavedest='port_reserve.py'), SetPropertyFromCommand( property='openerp_port', description=['Port', 'reservation'], locks=[port_lock.access('exclusive')], command=[ 'python', 'port_reserve.py', '--port-min=' + options.get('odoo.http-port-min', '6069'), '--port-max=' + options.get('odoo.http-port-max', '7068'), '--step=' + options.get('odoo.http-port-step', '5'), ]) )
def getCmakeWithNinjaWithMSVCBuildFactory( depends_on_projects=None, llvm_srcdir=None, obj_dir=None, checks=None, install_dir=None, clean=False, extra_configure_args=None, # VS tools environment variable if using MSVC. For example, # %VS140COMNTOOLS% selects the 2015 toolchain. vs=None, target_arch=None, env=None, **kwargs): assert not env, "Can't have custom builder env vars with MSVC build" # Make a local copy of the configure args, as we are going to modify that. if extra_configure_args: cmake_args = extra_configure_args[:] else: cmake_args = list() if checks is None: checks = ['check-all'] f = getLLVMBuildFactoryAndSourcecodeSteps( depends_on_projects=depends_on_projects, llvm_srcdir=llvm_srcdir, obj_dir=obj_dir, install_dir=install_dir, **kwargs) # Pass through all the extra arguments. f.addStep( SetPropertyFromCommand( command=builders_util.getVisualStudioEnvironment(vs, target_arch), extract_fn=builders_util.extractVSEnvironment)) env = util.Property('vs_env') cleanBuildRequested = lambda step: step.build.getProperty( "clean", default=step.build.getProperty("clean_obj")) or clean addCmakeSteps(f, generator='Ninja', cleanBuildRequested=cleanBuildRequested, obj_dir=f.obj_dir, install_dir=f.install_dir, extra_configure_args=cmake_args, env=env, **kwargs) addNinjaSteps(f, obj_dir=obj_dir, checks=checks, install_dir=f.install_dir, env=env, **kwargs) return f
def get_vs_env_step(): from buildbot.steps.shell import SetPropertyFromCommand from buildbot.process.properties import Interpolate return [ SetPropertyFromCommand( env={'PATH': Interpolate('%(prop:toolchain_path)s%(prop:PATH)s')}, command=Interpolate( 'vcvarsall.bat %(prop:architecture)s > nul && set'), extract_fn=extract_vs_paths) ]
def testGoodStep(self): f = BuildFactory() f.addStep( SetPropertyFromCommand(command=["echo", "value"], property="propname")) ss = SourceStamp() req = FakeBuildRequest("Testing", {ss.repository: ss}, None) b = f.newBuild([req]) b.build_status = FakeBuildStatus() b.slavebuilder = FakeSlaveBuilder() # This shouldn't raise an exception b.setupBuild(None)
def testGoodStep(self): f = BuildFactory() f.addStep(SetPropertyFromCommand(command=["echo", "value"], property="propname")) ss = mock.Mock(name="sourcestamp") ss.repository = 'repo' ss.changes = [] ss.patch = ss.patch_info = None req = FakeBuildRequest("Testing", {ss.repository: ss}, None) b = f.newBuild([req]) b.master = mock.Mock(name='master') b.build_status = FakeBuildStatus() b.slavebuilder = FakeSlaveBuilder() # This shouldn't raise an exception b.setupBuild(None)
def check_version(): """ Get the version of the package and store it in the ``version`` property. """ return [ SetPropertyFromCommand( command=[virtualenvBinary('python'), "setup.py", "--version"], name='check-version', description=['checking', 'version'], descriptionDone=['check', 'version'], property='version', env={ # Ignore warnings # In particular, setuptools warns about normalization. # Normalizing '..' to '..' normalized_version, .. 'PYTHONWARNINGS': 'ignore', }, ), ]
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Build the source deb f.addStep( ShellCommand(haltOnFailure=True, name=package + '-buildsource', command=['git-buildpackage', '-S'] + gbp_args, descriptionDone=['sourcedeb', package])) # Upload sourcedeb to master (currently we are not actually syncing these with a public repo) f.addStep( FileUpload( name=package + '-uploadsource', slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'), masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'), hideStepIf=success)) # Stamp the changelog, in a similar fashion to the ROS buildfarm f.addStep( SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z", property="datestamp", name=package + '-getstamp', hideStepIf=success)) f.addStep( ShellCommand( haltOnFailure=True, name=package + '-stampdeb', command=[ 'git-dch', '-a', '--ignore-branch', '--verbose', '-N', Interpolate('%(prop:release_version)s-%(prop:datestamp)s' + distro) ], descriptionDone=[ 'stamped changelog', Interpolate('%(prop:release_version)s'), Interpolate('%(prop:datestamp)s') ])) # download hooks f.addStep( FileDownload( name=package + '-grab-hooks', mastersrc='hooks/D05deps', slavedest=Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf=success, mode=0777 # make this executable for the cowbuilder )) # build the binary from the git working copy f.addStep( ShellCommand( haltOnFailure=True, name=package + '-buildbinary', command=[ 'git-buildpackage', '--git-pbuilder', '--git-export=WC', Interpolate('--git-export-dir=%(prop:workdir)s') ] + gbp_args, env={ 'DIST': distro, 'GIT_PBUILDER_OPTIONS': Interpolate( '--hookdir %(prop:workdir)s/hooks --override-config'), 'OTHERMIRROR': othermirror }, descriptionDone=['binarydeb', package])) # Upload binarydeb to master f.addStep( FileUpload(name=package + '-uploadbinary', slavesrc=Interpolate('%(prop:workdir)s/' + final_name), masterdest=Interpolate('binarydebs/' + final_name), hideStepIf=success)) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand(name=package + 'includedeb', command=[ 'reprepro-include.bash', debian_pkg, Interpolate(final_name), distro, arch ], descriptionDone=['updated in apt', package])) # Trigger if needed if trigger_pkgs != None: f.addStep( Trigger(schedulerNames=[ t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger' for t in trigger_pkgs ], waitForFinish=False, alwaysRun=True)) # Create trigger c['schedulers'].append( triggerable.Triggerable( name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' + arch + '-debtrigger', builderNames=[ job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', ])) # Add to builders c['builders'].append( BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild', properties={'release_version': version}, slavenames=machines, factory=f)) # return name of builder created return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
props['buildnumber']) # Steps to figure out which .whl version to use. whl_version_steps = [ # Get the point of last merge between this commit and master. # Buildbot is very selective about its fetches, so we need to make # sure we update the origin/master ref ourselves. ShellCommand(name="update-ref", command=[ "git", "fetch", "origin", "+refs/heads/master:refs/remotes/origin/master" ]), SetPropertyFromCommand("merge-base", command=[ "git", "merge-base", "origin/master", Property("got_revision") ], haltOnFailure=True), # Count the number of commits between the last release and the last merge. SetPropertyFromCommand("commit-index", command=["git", "rev-list", "--count", refspec], haltOnFailure=True), ] # Steps to publish the rtdist. publish_rtdist_steps = [ # Upload the stage directory. DirectoryUpload(slavesrc="built/stage", masterdest=rtdist_staging_dir, haltOnFailure=True),
dist_flags, "--osxtarget", Property("osxtarget"), "--no-gles", "--no-gles2", "--no-egl", "--version", Property("version"), ] build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand( "version", command=[python_executable, "makepanda/getversion.py", buildtype_flag], haltOnFailure=True), # Run makepanda - give it enough timeout (1h) Compile(command=build_cmd, timeout=1 * 60 * 60, env={ "MAKEPANDA_THIRDPARTY": "/Users/buildbot/thirdparty", "MAKEPANDA_SDKS": "/Users/buildbot/sdks", "PYTHONPATH": python_path }, haltOnFailure=True), ] build_publish_whl_steps = whl_version_steps + [ SetPropertyFromCommand(
"--python-libdir", python_libdir, common_flags, "--outputdir", built_dir, "--wheel", "--version", whl_version, ] build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand("version", command=["python", "makepanda/getversion.py"], haltOnFailure=True), # Steps to figure out which .whl version to use. ] + whl_version_steps + [ # Download the Dockerfile for this distribution. FileDownload(mastersrc=Interpolate("dockerfiles/manylinux1-%(prop:arch)s"), slavedest="Dockerfile", workdir="context"), # And the build scripts. FileDownload(mastersrc="build_scripts/build.sh", slavedest="build_scripts/build.sh", workdir="context"), FileDownload(mastersrc="build_scripts/build_utils.sh",
def getCmakeWithMSVCBuildFactory( clean=True, # False for incremental builds. depends_on_projects=None, # List of projects to listen. cmake_cache=None, # Path to a cmake cache file. extra_configure_args=None, # Extra CMake args if any. llvm_srcdir=None, # Source code root directory. obj_dir=None, # Build tree root directory. install_dir=None, # Directory to install the results to. checks=None, # List of checks to test the build. checks_on_target=None, # [(<name>,[<command tokens>])] array of # check name and command to run on target. jobs=None, # Restrict a degree of parallelism. env=None, # Environmental variables for all steps. # VS tools environment variable if using MSVC. # For example, "autodetect" to auto detect, %VS140COMNTOOLS% to select # the VS 2015 toolchain, or empty string if environment is already set. vs=None, **kwargs): if vs is None: # We autodetect Visual Studio, unless otherwise is requested. vs = "autodetect" if install_dir is None: install_dir = 'install' # Prepare environmental variables. Set here all env we want for all steps. merged_env = { 'TERM': 'dumb' # Make sure Clang doesn't use color escape sequences. } if env is not None: assert not vs, "Cannot have custom builder env vars with VS setup." # Overwrite pre-set items with the given ones, so user can set anything. merged_env.update(env) # Make a local copy of the configure args, as we are going to modify that. if extra_configure_args: cmake_args = extra_configure_args[:] else: cmake_args = list() if depends_on_projects is None: depends_on_projects = [ 'llvm', 'compiler-rt', 'clang', 'clang-tools-extra', 'libunwind', 'libcxx', 'libcxxabi', 'lld', ] if checks is None: # Check only host-side tools. Target-side tests should run on a target. checks = [ "check-llvm", "check-clang", "check-lld", ] source_remove_requested = lambda step: step.build.getProperty("clean") clean_build_requested = lambda step: \ clean or \ step.build.getProperty("clean", \ default=step.build.getProperty("clean_obj") \ ) f = LLVMBuildFactory(depends_on_projects=depends_on_projects, llvm_srcdir=llvm_srcdir, obj_dir=obj_dir, install_dir=install_dir, cleanBuildRequested=clean_build_requested, **kwargs) # Pass through all the extra arguments. # Remove the source code tree if requested. # NOTE: Somehow RemoveDirectory buildbot command often fails on Windows, # as somthing keeps a lock. We use rm command instead realying on a shell # to support that. f.addStep( ShellCommand(name='clean-%s-dir' % f.monorepo_dir, command=['rm', '-rf', f.monorepo_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove the source code', workdir='.', env=merged_env, doStepIf=source_remove_requested)) # Get the source code. f.addGetSourcecodeSteps(**kwargs) # Remove the build directory if requested. f.addStep( ShellCommand(name='clean-%s-dir' % f.obj_dir, command=['rm', '-rf', f.obj_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove build directory', workdir='.', env=merged_env, doStepIf=clean_build_requested)) if vs: # Configure MSVC environment if requested. f.addStep( SetPropertyFromCommand( command=builders_util.getVisualStudioEnvironment(vs, None), extract_fn=builders_util.extractVSEnvironment)) merged_env = Property('vs_env') # Since this is a build of a cross toolchain, we build only the host-side # tools first by the host system compiler. Libraries will be cross-compiled. cmake_args.append( InterpolateToPosixPath('-DLLVM_AR=%(builddir)s/' + f.obj_dir + '/bin/llvm-ar.exe')), CmakeCommand.applyDefaultOptions(cmake_args, [ ('-G', 'Ninja'), ('-DLLVM_ENABLE_PROJECTS=', 'llvm;clang;clang-tools-extra;lld'), ('-DCMAKE_BUILD_TYPE=', 'Release'), ('-DCMAKE_CXX_FLAGS=', '-D__OPTIMIZE__'), ('-DLLVM_ENABLE_ASSERTIONS=', 'ON'), ('-DLLVM_LIT_ARGS=', '-v -vv'), ]) if install_dir: install_dir_rel = LLVMBuildFactory.pathRelativeTo( install_dir, f.obj_dir) CmakeCommand.applyRequiredOptions(cmake_args, [ ('-DCMAKE_INSTALL_PREFIX=', install_dir_rel), ]) # Remove the build directory if requested. f.addStep( ShellCommand(name='clean-%s-dir' % install_dir, command=['rm', '-rf', install_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove install directory', workdir='.', env=merged_env, doStepIf=clean_build_requested)) src_dir_rel = LLVMBuildFactory.pathRelativeTo(f.llvm_srcdir, f.obj_dir) # Add given cmake cache at the very end. if cmake_cache: cmake_args.append('-C%s' % cmake_cache) f.addStep( CmakeCommand( name="cmake-configure", haltOnFailure=True, description=["Cmake", "configure"], options=cmake_args, path=src_dir_rel, workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) f.addStep( NinjaCommand( name="build-%s" % f.monorepo_dir, haltOnFailure=True, description=["Build", f.monorepo_dir], workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) # Test the components if requested one check at a time. for check in checks: f.addStep( LitTestCommand( haltOnFailure=False, # We want to test as much as we could. name='test-%s' % check, command=["ninja", WithProperties(check)], description=[ "Testing", "just", "built", "components", "for", check ], descriptionDone=[ "Test", "just", "built", "components", "for", check, "completed" ], env=merged_env, **kwargs # Pass through all the extra arguments. )) # Run commands on a target if requested. if checks_on_target: for check, cmd in checks_on_target: f.addStep( LitTestCommand( haltOnFailure=False, # We want to test as much as we could. name='test-%s' % check, command=cmd, description=[ "Testing", "just", "built", "components", "for", check ], descriptionDone=[ "Test", "just", "built", "components", "for", check, "completed" ], env=merged_env, **kwargs # Pass through all the extra arguments. )) # Install just built components if install_dir: f.addStep( NinjaCommand( name="install-all", haltOnFailure=True, targets=["install"], description=["Install", "just", "built", "components"], workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) return f
def ros_branch_build(c, job_name, packages, url, branch, distro, arch, rosdistro, machines, othermirror, keys): gbp_args = ['-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist='+distro, '--git-arch='+arch] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name = job_name+'-clean', dir = Interpolate('%(prop:workdir)s'), hideStepIf = success, ) ) # Pulling the repo f.addStep( Git( repourl = url, branch = 'HEAD', alwaysUseLatest = True, # this avoids broken builds when schedulers send wrong tag/rev mode = 'full', # clean out old versions getDescription={'tags': True} ) ) # Check out the repository branch/commit/tag f.addStep( ShellCommand( haltOnFailure = True, name = 'checkout: ' + branch, command = ['git', 'checkout', branch], ) ) # get the short commit hash f.addStep( SetPropertyFromCommand( command="git rev-parse --short HEAD", property="commit_hash", name = 'commit-short-hash', hideStepIf = success ) ) # get the time stamp f.addStep( SetPropertyFromCommand( command='date +%Y%M%d%H', property='date_stamp', name = 'date-stamp', hideStepIf = success ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Generate the changelog for the package f.addStep( ShellCommand( haltOnFailure = True, name = 'catkin_generate_changelog', command= ['catkin_generate_changelog', '-y'], descriptionDone = ['catkin_generate_changelog'] ) ) # Add all files including untracked ones f.addStep( ShellCommand( haltOnFailure = True, name = 'add_changelogs', command= ['git', 'add', '.'], descriptionDone = ['add_changelogs'] ) ) # Commit the changelog after updating it f.addStep( ShellCommand( haltOnFailure = True, name = 'update_changelogs', command= ['git', 'commit', '-m', '\"Updated changelogs\"'], descriptionDone = ['update_changelogs'] ) ) # Prepare the release without pushing it # Set very big number to avoid conflicts with available tags f.addStep( ShellCommand( haltOnFailure = True, name = 'catkin_prepare_release', command= ['catkin_prepare_release', '--version', '100.0.0', '--no-push', '-y'], descriptionDone = ['catkin_prepare_release'] ) ) # f.addStep( ShellCommand( haltOnFailure = True, name = 'git_bloom_generate_release', command = ['git-bloom-generate', '-y', 'rosrelease', rosdistro], ) ) f.addStep( ShellCommand( haltOnFailure = True, name = 'git_bloom_generate_debian', command = ['git-bloom-generate', '-y', 'rosdebian', '-a', '-p', 'release', rosdistro], ) ) # Get the tag number for the lastest commit f.addStep( SetPropertyFromCommand( command="git describe --tags", property="release_version", name = 'latest_tag', ) ) # Need to build each package in order for package in packages: debian_pkg = 'ros-'+rosdistro+'-'+package.replace('_','-') # debian package name (ros-groovy-foo) branch_name = 'debian/'+debian_pkg+'_%(prop:release_version)s-0_'+distro deb_name = debian_pkg+'_%(prop:release_version)s-0'+distro final_name = debian_pkg+'_%(prop:release_version)s-%(prop:date_stamp)s-%(prop:commit_hash)s-'+distro+'_'+arch+'.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand( haltOnFailure = True, name = package+'-checkout', command = ['git', 'checkout', Interpolate(branch_name), '--force'], hideStepIf = success ) ) # Stamp the changelog f.addStep( ShellCommand( haltOnFailure = True, name = package+'-stampdeb', command = ['gbp', 'dch', '-a', '--ignore-branch', '--verbose', '-N', Interpolate('%(prop:release_version)s-%(prop:date_stamp)s-%(prop:commit_hash)s-'+distro)], descriptionDone = ['stamped changelog', Interpolate('%(prop:release_version)s'), Interpolate('%(prop:date_stamp)s'), Interpolate('%(prop:commit_hash)s')] ) ) # download hooks f.addStep( FileDownload( name = package+'-grab-hooks', mastersrc = 'hooks/D05deps', slavedest = Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf = success, mode = 0777 # make this executable for the cowbuilder ) ) # Download script for building the binary deb f.addStep( FileDownload( name = job_name+'-grab-build-binary-deb-script', mastersrc = 'scripts/build_binary_deb.py', slavedest = Interpolate('%(prop:workdir)s/build_binary_deb.py'), mode = 0755, hideStepIf = success ) )
def functional(configurator, options, buildout_slave_path, environ=()): """Reserve a port, start openerp, launch testing commands, stop openerp. Available manifest file options: :functional.commands: whitespace separated list of scripts to launch. Each of them must accept two arguments: ``port`` and ``db_name`` :functional.parts: buildout parts to install to get the commands to work :functional.wait: time (in seconds) to wait for the server to be ready for functional testing after starting up (defaults to 30s) """ steps = [] buildout_parts = options.get('functional.parts', '').split() if buildout_parts: steps.append(ShellCommand( command=['bin/buildout', '-c', buildout_slave_path, WithProperties('buildout:eggs-directory=' '%(builddir)s/../buildout-caches/eggs'), 'install'] + buildout_parts, name="functional tools", description=['install', 'functional', 'buildout', 'parts'], descriptionDone=['installed', 'functional', 'buildout', 'parts'], haltOnFailure=True, env=environ, )) steps.append(FileDownload( mastersrc=os.path.join(BUILD_UTILS_PATH, 'port_reserve.py'), slavedest='port_reserve.py')) steps.append(SetPropertyFromCommand( property='openerp_port', description=['Port', 'reservation'], locks=[port_lock.access('exclusive')], command=['python', 'port_reserve.py', '--port-min=9069', '--port-max=11069', '--step=5'])) steps.append(ShellCommand( command=['rm', '-f', WithProperties('%(workdir)s/openerp.pid')], name='cleanup', description='clean pid file', descriptionDone='cleaned pid file', haltOnFailure=True, env=environ, )) steps.append(ShellCommand(command=['rm', '-f', 'server-functional.log'], name="Log cleanup", descriptionDone=['Cleaned', 'logs'], )) buildout_part = options.get('buildout-part', DEFAULT_BUILDOUT_PART) steps.append(ShellCommand( command=['/sbin/start-stop-daemon', '--pidfile', WithProperties('%(workdir)s/openerp.pid'), '--exec', WithProperties( '%(workdir)s/build/' + options.get('start-command', 'bin/start_' + buildout_part)), '--background', '--make-pidfile', '-v', '--start', '--', '--xmlrpc-port', Property('openerp_port'), WithProperties('--logfile=%(workdir)s/build/' 'server-functional.log')], name='start', description=['starting', 'application'], descriptionDone=['application', 'started'], haltOnFailure=True, env=environ, )) steps.append(ShellCommand( description=['Wait'], command=['sleep', options.get('functional.wait', '30')])) steps.extend(ShellCommand( command=[cmd, Property('openerp_port'), Property('testing_db')], name=cmd.rsplit('/')[-1], description="running %s" % cmd, descriptionDone="ran %s" % cmd, flunkOnFailure=True, haltOnFailure=False, logfiles=dict(server='server-functional.log'), env=environ) for cmd in options.get('functional.commands').split()) steps.append(ShellCommand( command=['/sbin/start-stop-daemon', '--pidfile', WithProperties('%(workdir)s/openerp.pid'), '--stop', '--oknodo', '--retry', '5'], name='start', description='stoping openerp', descriptionDone='openerp stopped', haltOnFailure=True, env=environ, )) return steps
def ros_branch_build(c, job_name, packages, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch ] with open(os.path.dirname(os.path.realpath(__file__)) + "/spec.yaml") as file: spec_list = yaml.full_load(file) f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch=branch, alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full', # clean out old versions getDescription={'tags': True})) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Generate the changelog for the package f.addStep( ShellCommand(haltOnFailure=True, name='catkin_generate_changelog', command=['catkin_generate_changelog', '-y'], descriptionDone=['catkin_generate_changelog'])) # Add all files including untracked ones f.addStep( ShellCommand(haltOnFailure=True, name='add_changelogs', command=['git', 'add', '.'], descriptionDone=['add_changelogs'])) # Commit the changelog after updating it f.addStep( ShellCommand(haltOnFailure=True, name='update_changelogs', command=['git', 'commit', '-m', '\"Updated changelogs\"'], descriptionDone=['update_changelogs'])) # Prepare the release without pushing it f.addStep( ShellCommand(haltOnFailure=True, name='catkin_prepare_release', command=[ 'catkin_prepare_release', '--bump', 'minor', '--no-push', '-y' ], descriptionDone=['catkin_prepare_release'])) # f.addStep( ShellCommand( haltOnFailure=True, name='git_bloom_generate_release', command=['git-bloom-generate', '-y', 'rosrelease', rosdistro], )) f.addStep( ShellCommand( haltOnFailure=True, name='git_bloom_generate_debian', command=[ 'git-bloom-generate', '-y', 'rosdebian', '-a', '-p', 'release', rosdistro ], )) # Get the tag number for the lastest commit f.addStep( SetPropertyFromCommand( command="git describe --tags", property="release_version", name='latest_tag', )) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s-0_' + distro deb_name = debian_pkg + '_%(prop:release_version)s-0' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # A hack for generating the debian folder so we could build the lastest commit of the specified branch # f.addStep( # ShellCommand( # haltOnFailure = True, # name = package+'-bloom_generate', # command= ['bloom-generate', 'rosdebian'], # descriptionDone = ['bloom_generate', package] # ) # ) # Download script for building the source deb f.addStep( FileDownload( name=job_name + '-grab-build-source-deb-script', mastersrc='scripts/build_source_deb.py', slavedest=Interpolate('%(prop:workdir)s/build_source_deb.py'), mode=0755, hideStepIf=success))
setarch, "/usr/bin/python3", "tests/build_samples.py" ] changelog_msg = Interpolate("Automatic build %(prop:buildnumber)s by builder %(prop:buildername)s") # Build steps shared by all builders. build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Patch a bug in 1.10.7 ShellCommand(command=["sed", "-i", "s/\"version\": SDK\\[\"PYTHONVERSION\"\\]\\[6:\\],/\"version\": SDK[\"PYTHONVERSION\"][6:].rstrip('dmu'),/", "makepanda/makepandacore.py"]), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand("version", command=[ "python3", "makepanda/getversion.py", buildtype_flag], haltOnFailure=True), # Delete the built dir, if requested. ShellCommand(name="clean", command=get_clean_command(), haltOnFailure=False, doStepIf=lambda step:step.getProperty("clean", False)), # These steps fill in properties used to determine upstream_version. ] + whl_version_steps + [ # Download the Dockerfile for this distribution. FileDownload(mastersrc=Interpolate("dockerfiles/%(prop:suite)s-%(prop:arch)s"), workerdest="Dockerfile", workdir="context"), # Make sure the base distribution is up-to-date. ShellCommand(command=cloudimg_cmd, workdir="context"),
def __init__(self, codebase='', **kwargs): self.codebase = codebase SetPropertyFromCommand.__init__(self, property="got_revision", command=["git", "rev-parse", "HEAD"], **kwargs)
def createTarballFactory(gerrit_repo): """ Generates a build factory for a tarball generating builder. Returns: BuildFactory: Build factory with steps for generating tarballs. """ bf = util.BuildFactory() # are we building a tag or a patchset? bf.addStep(SetProperty( property='category', value=buildCategory, hideStepIf=hide_except_error)) # update dependencies bf.addStep(ShellCommand( command=dependencyCommand, decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED }, haltOnFailure=True, logEnviron=False, doStepIf=do_step_installdeps, hideStepIf=hide_if_skipped, description=["installing dependencies"], descriptionDone=["installed dependencies"])) # Pull the patch from Gerrit bf.addStep(Gerrit( repourl=gerrit_repo, workdir="build/lustre", mode="full", method="fresh", retry=[60,60], timeout=3600, logEnviron=False, getDescription=True, haltOnFailure=True, description=["cloning"], descriptionDone=["cloned"])) # make tarball bf.addStep(ShellCommand( command=['sh', './autogen.sh'], haltOnFailure=True, description=["autogen"], descriptionDone=["autogen"], workdir="build/lustre")) bf.addStep(Configure( command=['./configure', '--enable-dist'], workdir="build/lustre")) bf.addStep(ShellCommand( command=['make', 'dist'], haltOnFailure=True, description=["making dist"], descriptionDone=["make dist"], workdir="build/lustre")) # upload it to the master bf.addStep(SetPropertyFromCommand( command=['sh', '-c', 'echo *.tar.gz'], property='tarball', workdir="build/lustre", hideStepIf=hide_except_error, haltOnFailure=True)) bf.addStep(FileUpload( workdir="build/lustre", slavesrc=util.Interpolate("%(prop:tarball)s"), masterdest=tarballMasterDest, url=tarballUrl)) # trigger our builders to generate packages bf.addStep(Trigger( schedulerNames=["package-builders"], copy_properties=['tarball', 'category'], waitForFinish=False)) return bf
rosdistro, package, Interpolate('%(prop:release_version)s'), Interpolate('%(prop:workdir)s') ] + gbp_args, descriptionDone=['sourcedeb', package])) # Upload sourcedeb to master (currently we are not actually syncing these with a public repo) f.addStep( FileUpload( name=package + '-uploadsource', slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'), masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'), hideStepIf=success)) # Stamp the changelog, in a similar fashion to the ROS buildfarm f.addStep( SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z", property="datestamp", name=package + '-getstamp', hideStepIf=success)) f.addStep( ShellCommand( haltOnFailure=True, name=package + '-stampdeb', command=[ 'gbp', 'dch', '-a', '--ignore-branch', '--verbose', '-N', Interpolate('%(prop:release_version)s-%(prop:datestamp)s' + distro) ], descriptionDone=[ 'stamped changelog', Interpolate('%(prop:release_version)s'), Interpolate('%(prop:datestamp)s') ]))
def createWindowsDevFactory(): f = BuildFactory() f.addStep( Git( description="fetching sources", descriptionDone="sources", haltOnFailure=True, repourl=repositoryUri, mode='full', method='clobber', )) f.addStep( ShellCommand(description="fetching packages", descriptionDone="packages", haltOnFailure=True, command=["paket.exe", "restore"], workdir=workingDirectory)) f.addStep( SetPropertyFromCommand(description="setting version", descriptionDone="version", haltOnFailure=True, command=[ "racket", "c:\\build-tools\\patch-version.rkt", "-p", "windows", "-v", "0.1.4", "-b", Property("buildnumber") ], property="buildPostfix", workdir=workingDirectory)) f.addStep( ShellCommand(description="building", descriptionDone="build", haltOnFailure=True, command=["msbuild", "CorvusAlba.ToyFactory.Windows.sln"], workdir=workingDirectory)) f.addStep( ShellCommand( description="archiving", descriptionDone="archive", haltOnFailure=True, command=[ "tar", "-zcvf", Interpolate("toy-factory-%(prop:buildPostfix)s.tar.gz"), "../bin" ], workdir=workingDirectory)) f.addStep( FileUpload( description="uploading", descriptionDone="upload", haltOnFailure=True, mode=0644, slavesrc=Interpolate("toy-factory-%(prop:buildPostfix)s.tar.gz"), masterdest=Interpolate( "~/builds/toy-factory-%(prop:buildPostfix)s.tar.gz"), workdir=workingDirectory))
props['buildnumber']) # Steps to figure out which .whl version to use. whl_version_steps = [ # Get the point of last merge between this commit and master. # Buildbot is very selective about its fetches, so we need to make # sure we update the origin/master ref ourselves. ShellCommand(name="update-ref", command=[ "git", "fetch", "origin", "+refs/heads/master:refs/remotes/origin/master" ]), SetPropertyFromCommand("merge-base", command=[ "git", "merge-base", "origin/master", Property("got_revision") ], haltOnFailure=True), # Count the number of commits between the last release and the last merge. SetPropertyFromCommand("commit-index", command=["git", "rev-list", "--count", refspec], haltOnFailure=True), # Count the number of commits on the current branch. SetPropertyFromCommand( "divergence", command=[ "git", "rev-list", "--count", Interpolate("%(prop:merge-base)s..%(prop:got_revision)s") ],
"makepanda\\makepackage.py", "--verbose", "--lzma", "--version", Property("version"), "--outputdir", outputdir, ] build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand("version", command=[ get_python_executable("cp37-cp37m"), "makepanda/getversion.py", buildtype_flag ], haltOnFailure=True), # Delete the built dir, if requested. ShellCommand(name="clean", command=["rmdir", "/S", "/Q", outputdir, outputdir_cp34], haltOnFailure=False, flunkOnFailure=False, warnOnFailure=False, flunkOnWarnings=False, warnOnWarnings=False, doStepIf=lambda step: step.getProperty("clean", False)), ] build_steps += whl_version_steps