hideStepIf=success)) # Download script for building the source deb f.addStep( FileDownload( name=job_name + '-grab-build-source-deb-script', mastersrc='scripts/build_source_deb.py', slavedest=Interpolate('%(prop:workdir)s/build_source_deb.py'), mode=0755, hideStepIf=success)) # Build the source deb f.addStep( ShellCommand( haltOnFailure=True, name=package + '-buildsource', command=[ Interpolate('%(prop:workdir)s/build_source_deb.py'), rosdistro, package, Interpolate('%(prop:release_version)s') ] + gbp_args, descriptionDone=['sourcedeb', package])) # Upload sourcedeb to master (currently we are not actually syncing these with a public repo) f.addStep( FileUpload( name=package + '-uploadsource', slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'), masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'), hideStepIf=success)) # Stamp the changelog, in a similar fashion to the ROS buildfarm f.addStep( SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z", property="datestamp",
def getLLDBRemoteTestSteps(f, bindir, build_type, remote_config, env): if None in (remote_config.test_archs, remote_config.test_compilers): return f # only supports linux and android as remote target at this time if remote_config.platform not in ('linux', 'android'): return f llvm_srcdir = "llvm" llvm_builddir = "build" stepDesc = remote_config.platform + "-" + remote_config.host_arch # get hostname slave_hostname = None f.addStep( SetProperty(name="get hostname", command=["hostname"], property="slave_hostname", description="set slave hostname", workdir=".")) # get configuration of remote target # config file should be placed under builddir on builder machine # file name: remote_cfg.json # content: json format with keys [remote_platform]-[remote_arch] # the value for each key defines "remote_host", "remote_port", "remote_dir", "toolchain", "deviceId" # example: {"android-i386": {"remote_host":"localhost", # "remote_port":"5430", # "remote_dir":"/data/local/tmp/lldb", # "toolchain_build":"/home/lldb_build/Toolchains/i386-android-toolchain-21", # "toolchain_test":"/home/lldb_build/Toolchains/i386-android-toolchain-16", # "deviceid":"XXXXXXX"}, def getRemoteCfg(rc, stdout, stderr): return json.loads(stdout)[stepDesc] f.addStep( SetProperty(name="get remote target " + stepDesc, command="cat remote_cfg.json", extract_fn=getRemoteCfg, description="get remote target", workdir=".")) # rsync if remote_config.platform is 'linux': shellcmd = ['ssh', WithProperties('%(remote_host)s')] hostname = '%(slave_hostname)s' launchcmd = shellcmd + ['screen', '-d', '-m'] terminatecmd = shellcmd + ['pkill', 'lldb-server'] cleandircmd = WithProperties( 'ssh %(remote_host)s rm -r %(remote_dir)s/*') f.addStep( ShellCommand(name="rsync lldb-server", command=[ 'rsync', '-havL', 'bin/lldb-server', WithProperties('%(remote_host)s:%(remote_dir)s') ], description="rsync lldb-server " + stepDesc, haltOnFailure=True, env=env, workdir='%s' % llvm_builddir)) f.addStep( ShellCommand(name="rsync python2.7", command=[ 'rsync', '-havL', 'lib/python2.7', WithProperties('%(remote_host)s:%(remote_dir)s') ], description="rsync python2.7 " + stepDesc, haltOnFailure=True, env=env, workdir='%s' % llvm_builddir)) elif remote_config.platform is 'android': shellcmd = ['adb', '-s', WithProperties('%(deviceid)s'), 'shell'] hostname = '127.0.0.1' launchcmd = ['screen', '-d', '-m'] + shellcmd + [ WithProperties("TMPDIR=%(remote_dir)s/tmp") ] terminatecmd = 'ps | grep lldb-server | awk \'{print $2}\' | xargs' terminatecmd = WithProperties('adb -s %(deviceid)s shell ' + terminatecmd + ' adb -s %(deviceid)s shell kill') cleandircmd = WithProperties( 'adb -s %(deviceid)s shell rm -rf %(remote_dir)s/*') # compile lldb-server for target platform f = getLLDBCmakeAndCompileSteps(f, 'gcc', build_type, ['lldb-server'], bindir, remote_config.platform, remote_config.host_arch, env) f.addStep( ShellCommand(name="adb push lldb-server " + stepDesc, command=[ 'adb', '-s', WithProperties('%(deviceid)s'), 'push', remote_config.platform + '-' + remote_config.host_arch + '/bin/lldb-server', WithProperties('%(remote_dir)s/') ], description="lldb-server", env=env, haltOnFailure=True, workdir='%s' % llvm_builddir)) f.addStep( ShellCommand(name="Build fingerprint " + stepDesc, command=[ 'adb', '-s', WithProperties('%(deviceid)s'), 'shell', 'getprop', 'ro.build.fingerprint' ], description="get build fingerprint", env=env, haltOnFailure=False, workdir='%s' % llvm_builddir)) # launch lldb-server f.addStep( ShellCommand( name="launch lldb-server " + stepDesc, command=launchcmd + [ WithProperties('%(remote_dir)s/lldb-server'), 'platform', '--listen', WithProperties(hostname + ':%(remote_port)s'), '--server' ], description="launch lldb-server on remote host", env=env, haltOnFailure=True, workdir='%s' % llvm_builddir)) # test steps f = getLLDBTestSteps(f, bindir, remote_config.test_archs, remote_config.test_compilers, remote_config.platform, '%(remote_host)s', '%(remote_port)s', '%(remote_dir)s', env) # terminate lldb-server on remote host f.addStep( ShellCommand(name="terminate lldb-server " + stepDesc, command=terminatecmd, description="terminate lldb-server", env=env, workdir='%s' % llvm_builddir)) # clean remote test directory f.addStep( ShellCommand(name="clean remote dir " + stepDesc, command=cleandircmd, description="clean remote dir", env=env)) return f
def getLLDBWindowsCMakeBuildFactory( clean=False, cmake='cmake', jobs="%(jobs)s", # Source directory containing a built python python_source_dir=r'C:/Python35', # Default values for VS devenv and build configuration vs=r"""%VS140COMNTOOLS%""", config='Release', target_arch='x86', extra_cmake_args=None, test=False, install=False): ############# PREPARING f = buildbot.process.factory.BuildFactory() # Determine Slave Environment and Set MSVC environment. f.addStep( SetProperty(command=getVisualStudioEnvironment(vs, target_arch), extract_fn=extractSlaveEnvironment)) f = getLLDBSource(f, 'llvm') build_cmd = ['ninja'] install_cmd = ['ninja', 'install'] test_cmd = ['ninja', 'check-lldb'] if jobs: build_cmd.append(WithProperties("-j%s" % jobs)) install_cmd.append(WithProperties("-j%s" % jobs)) test_cmd.append(WithProperties("-j%s" % jobs)) # Global configurations build_dir = 'build' # get full path to build directory f.addStep( SetProperty(name="get_builddir", command=["pwd"], property="builddir", description="set build dir", workdir=build_dir)) ############# CLEANING cleanBuildRequested = lambda step: step.build.getProperty("clean") or clean f.addStep( RemoveDirectory(name='clean ' + build_dir, dir=build_dir, haltOnFailure=False, flunkOnFailure=False, doStepIf=cleanBuildRequested)) cmake_cmd = [ "cmake", "-G", "Ninja", "../llvm", "-DCMAKE_BUILD_TYPE=" + config, "-DPYTHON_HOME=" + python_source_dir, "-DCMAKE_INSTALL_PREFIX=../install", "-DLLDB_TEST_COMPILER=\"%(builddir)s/bin/clang.exe\"" ] if extra_cmake_args: cmake_cmd += extra_cmake_args # Note: ShellCommand does not pass the params with special symbols right. # The " ".join is a workaround for this bug. f.addStep( ShellCommand(name="cmake-configure", description=["cmake configure"], command=WithProperties(" ".join(cmake_cmd)), haltOnFailure=True, warnOnWarnings=True, workdir=build_dir, env=Property('slave_env'))) f.addStep( WarningCountingShellCommand(name='build', command=build_cmd, haltOnFailure=True, description='ninja build', workdir=build_dir, env=Property('slave_env'))) ignoreInstallFail = bool(install != 'ignoreFail') f.addStep( ShellCommand(name='install', command=install_cmd, flunkOnFailure=ignoreInstallFail, description='ninja install', workdir=build_dir, doStepIf=bool(install), env=Property('slave_env'))) ignoreTestFail = bool(test != 'ignoreFail') f.addStep( ShellCommand(name='test', command=test_cmd, flunkOnFailure=ignoreTestFail, timeout=2400, description='ninja test', workdir=build_dir, doStepIf=bool(test), env=Property('slave_env'))) return f
def cleanSVNSourceTree(f, srcdir='llvm'): f.addStep(ShellCommand(name='clean svn source %s' % srcdir, command="svn status --no-ignore | grep '^[I?]' | cut -c 9- | while IFS= read -r f; do echo \"$f\"; rm -rf \"$f\"; done", description="clean SVN source tree", workdir='%s' % srcdir)) return f
def getLLDBBuildFactory(triple, useTwoStage=False, make='make', jobs='%(jobs)s', extra_configure_args=[], env={}, *args, **kwargs): llvm_srcdir = "llvm.src" llvm_objdir = "llvm.obj" f = buildbot.process.factory.BuildFactory() # Determine the build directory. f.addStep( SetProperty(name="get_builddir", command=["pwd"], property="builddir", description="set build dir", workdir=".")) # Find out what version of llvm and clang are needed to build this version # of lldb. Right now we will assume they use the same version. # XXX - could this be done directly on the master instead of the slave? f.addStep( SetProperty( command= 'svn cat http://llvm.org/svn/llvm-project/lldb/trunk/scripts/build-llvm.pl | grep ^our.*llvm_revision | cut -d \\" -f 2', property='llvmrev')) # The SVN build step provides no mechanism to check out a specific revision # based on a property, so just run the commands directly here. svn_co = ['svn', 'checkout', '--force'] svn_co += ['--revision', WithProperties('%(llvmrev)s')] # build llvm svn checkout command svn_co_llvm = svn_co + \ [WithProperties('http://llvm.org/svn/llvm-project/llvm/trunk@%(llvmrev)s'), llvm_srcdir] # build clang svn checkout command svn_co_clang = svn_co + \ [WithProperties('http://llvm.org/svn/llvm-project/cfe/trunk@%(llvmrev)s'), '%s/tools/clang' % llvm_srcdir] f.addStep( ShellCommand(name='svn-llvm', command=svn_co_llvm, haltOnFailure=True, workdir='.')) f.addStep( ShellCommand(name='svn-clang', command=svn_co_clang, haltOnFailure=True, workdir='.')) f.addStep( SVN(name='svn-lldb', mode='update', baseURL='http://llvm.org/svn/llvm-project/lldb/', defaultBranch='trunk', always_purge=True, workdir='%s/tools/lldb' % llvm_srcdir)) # Run configure config_args = [ WithProperties("%%(builddir)s/%s/configure" % llvm_srcdir), "--disable-bindings", "--without-llvmgcc", "--without-llvmgxx", ] if triple: config_args += ['--build=%s' % triple] config_args += extra_configure_args f.addStep( Configure(name='configure', command=config_args, env=env, workdir=llvm_objdir)) f.addStep( WarningCountingShellCommand( name="compile", command=['nice', '-n', '10', make, WithProperties("-j%s" % jobs)], env=env, haltOnFailure=True, workdir=llvm_objdir)) # Test. f.addStep( LitTestCommand(name="test lldb", command=['nice', '-n', '10', make], description="test lldb", env=env, workdir='%s/tools/lldb/test' % llvm_objdir)) return f
def createLLVMFactory(srcdir, builddir, installdir, test_install_dir): f = factory.BuildFactory() f.addStep( SVN( name='svn-llvm', mode='update', baseURL='http://llvm.org/svn/llvm-project/llvm/', defaultBranch='trunk', workdir=srcdir)) f.addStep( SVN( name='svn-clang', mode='update', baseURL='http://llvm.org/svn/llvm-project/cfe/', defaultBranch='trunk', workdir='%s/tools/clang' % srcdir)) f.addStep( ShellCommand( command=[ '%s/configure' % srcdir, '--prefix=' + installdir, '--enable-optimized', '--enable-targets=host', '--enable-shared'], workdir=builddir, haltOnFailure=True, name="configure", descriptionDone='configure', description='configuring')) f.addStep( ShellCommand( command=['make', '-j', '4'], workdir=builddir, haltOnFailure=True, name = "compile LLVM", descriptionDone = 'compiled LLVM', description='compiling LLVM')) f.addStep( ShellCommand( command=['make', 'check'], workdir=builddir, name='LLVM check', descriptionDone='checked LLVM', haltOnFailure=True, description='checking LLVM')) f.addStep( ShellCommand( command=['make', 'install'], env={'DESTDIR':test_install_dir}, workdir=builddir, haltOnFailure=True, name = 'install for test', descriptionDone='install', description='installing')) f=createPoclFactory( llvm_dir=test_install_dir+installdir, pedantic=False, f=f) f.addStep( ShellCommand( command=['make', 'install'], workdir=builddir, haltOnFailure=True, name = 'install final', descriptionDone='install', description='installing')) return f
def createPackageBuildFactory(): """ Generates a build factory for a lustre tarball builder. Returns: BuildFactory: Build factory with steps for a lustre tarball builder. """ bf = util.BuildFactory() # download our tarball and extract it bf.addStep(FileDownload( workdir="build/lustre", slavedest=util.Interpolate("%(prop:tarball)s"), mastersrc=tarballMasterDest)) bf.addStep(ShellCommand( workdir="build/lustre", command=["tar", "-xvzf", util.Interpolate("%(prop:tarball)s"), "--strip-components=1"], haltOnFailure=True, logEnviron=False, lazylogfiles=True, description=["extracting tarball"], descriptionDone=["extract tarball"])) # update dependencies bf.addStep(ShellCommand( command=dependencyCommand, decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED }, haltOnFailure=True, logEnviron=False, doStepIf=do_step_installdeps, hideStepIf=hide_if_skipped, description=["installing dependencies"], descriptionDone=["installed dependencies"])) # build spl and zfs if necessary bf.addStep(ShellCommand( command=buildzfsCommand, decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED }, haltOnFailure=True, logEnviron=False, doStepIf=do_step_zfs, hideStepIf=hide_if_skipped, description=["building spl and zfs"], descriptionDone=["built spl and zfs"])) # Build Lustre bf.addStep(ShellCommand( workdir="build/lustre", command=configureCmd, haltOnFailure=True, logEnviron=False, hideStepIf=hide_if_skipped, lazylogfiles=True, description=["configuring lustre"], descriptionDone=["configure lustre"])) bf.addStep(ShellCommand( workdir="build/lustre", command=makeCmd, haltOnFailure=True, logEnviron=False, hideStepIf=hide_if_skipped, lazylogfiles=True, description=["making lustre"], descriptionDone=["make lustre"])) # Build Products bf.addStep(ShellCommand( workdir="build/lustre", command=collectProductsCmd, haltOnFailure=True, logEnviron=False, doStepIf=do_step_collectpacks, hideStepIf=hide_if_skipped, lazylogfiles=True, description=["collect deliverables"], descriptionDone=["collected deliverables"])) # Build repo bf.addStep(ShellCommand( workdir="build/lustre/deliverables", command=buildRepoCmd, haltOnFailure=True, logEnviron=False, doStepIf=do_step_buildrepo, hideStepIf=hide_if_skipped, lazylogfiles=True, description=["building repo"], descriptionDone=["build repo"])) # Upload repo to master bf.addStep(DirectoryUpload( workdir="build/lustre", doStepIf=do_step_collectpacks, hideStepIf=hide_if_skipped, slavesrc="deliverables", masterdest=repoMasterDest, url=repoUrl)) # Cleanup bf.addStep(ShellCommand( workdir="build", command=["sh", "-c", "rm -rvf ./* /tmp/rpmbuild-*"], haltOnFailure=True, logEnviron=False, lazylogfiles=True, alwaysRun=True, description=["cleaning up"], descriptionDone=["clean up"])) return bf
# * from buildbot.process.factory import BuildFactory from buildbot.steps.source.github import GitHub from buildbot.steps.shell import ShellCommand from buildbot.steps.shell import Configure from master_includes import appendSchedulers from master_includes import appendBuilders # --- liblogging factory settings factoryLiblogging = BuildFactory() factoryLiblogging.addStep( GitHub(repourl=repoGitUrl, mode='full', retryFetch=True)) factoryLiblogging.addStep( ShellCommand(command=["autoreconf", "--force", "--verbose", "--install"])) factoryLiblogging.addStep( ShellCommand(command=["./configure", "--prefix=/usr"], logfiles={"config.log": "config.log"})) factoryLiblogging.addStep(ShellCommand(command=["make"])) factoryLiblogging.addStep( ShellCommand(command=["make", "check", "V=0"], logfiles={"test-suite.log": "tests/test-suite.log"}, lazylogfiles=True, maxTime=3600)) factoryLibloggingDebian = BuildFactory() factoryLibloggingDebian.addStep( GitHub(repourl=repoGitUrl, mode='full', retryFetch=True)) factoryLibloggingDebian.addStep( ShellCommand(command=["autoreconf", "--force", "--verbose", "--install"]))
def getOpenMPCMakeBuildFactory( jobs='%(jobs)s', # Number of concurrent jobs. clean=True, # "clean" step is requested if true env=None, # Environmental variables for all steps. ompt=False, # Whether to enable the OpenMP Tools Interface. test=True, # Test the built libraries. depends_on_projects=None, **kwargs): # Prepare environmental variables. Set here all env we want everywhere. merged_env = { 'TERM': 'dumb' # Make sure Clang doesn't use color escape sequences. } # Overwrite pre-set items with the given ones, so user can set anything. if env is not None: merged_env.update(env) llvm_srcdir = 'llvm.src' llvm_builddir = 'llvm.build' cleanBuildRequested = lambda step: clean or step.build.getProperty( "clean", default=step.build.getProperty("clean_obj")) if depends_on_projects is None: # Monorepo configuration requires llvm and clang to get cmake work. depends_on_projects = ['llvm', 'clang', 'openmp'] f = UnifiedTreeBuilder.getLLVMBuildFactoryAndSourcecodeSteps( depends_on_projects=depends_on_projects, llvm_srcdir=llvm_srcdir, obj_dir=llvm_builddir, cleanBuildRequested=cleanBuildRequested, env=merged_env, **kwargs) # Pass through all the extra arguments. f.addStep( ShellCommand(name='clean', command=['rm', '-rf', f.obj_dir], warnOnFailure=True, description=['clean'], doStepIf=cleanBuildRequested, workdir='.', env=merged_env)) # Configure LLVM and OpenMP (and Clang, if requested). cmake_args = ['cmake', '-G', 'Ninja'] cmake_args += ['-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON'] if ompt: cmake_args += ['-DLIBOMP_OMPT_SUPPORT=ON'] if test: lit_args = '-vv --show-unsupported --show-xfail -j %s' % jobs cmake_args += [WithProperties('-DLLVM_LIT_ARGS=%s' % lit_args)] CmakeCommand.applyRequiredOptions(cmake_args, [ ('-DLLVM_ENABLE_PROJECTS=', ";".join(f.depends_on_projects)), ]) # Add llvm-lit and clang (if built) to PATH merged_env.update({ 'PATH': WithProperties('%(workdir)s/' + llvm_builddir + '/bin:${PATH}') }) src_dir = LLVMBuildFactory.pathRelativeTo(f.llvm_srcdir, f.obj_dir) f.addStep( CmakeCommand( name='configure-openmp', description=['configure', 'openmp'], options=cmake_args, path=src_dir, env=merged_env, workdir=f.obj_dir, haltOnFailure=True, **kwargs # Pass through all the extra arguments. )) # Build OpenMP runtime libraries. f.addStep( NinjaCommand(name='compile-openmp', description='compile openmp', workdir=f.obj_dir, env=merged_env, haltOnFailure=True)) # Test OpenMP runtime libraries, if requested. if test: # Add llvm-lit and clang (if built) to PATH merged_env.update({ 'PATH': WithProperties('%(workdir)s/' + llvm_builddir + '/bin:${PATH}') }) ninja_test_args = ['ninja', WithProperties('-j %s' % jobs)] f.addStep( LitTestCommand(name='test-openmp', command=ninja_test_args + ['check-openmp'], description='test openmp', workdir=f.obj_dir, env=merged_env, haltOnFailure=True)) return f
from buildbot_config.settings.settings import BRANCH, PROJECT_NAME, PROJECT_CODE_URL svn_username = '******' svn_password = '******' nickname = 'sqlite' name = 'slave-%s' % nickname builder_name = 'builder-%s' % nickname # slave slave = BuildSlave(name, "%spassword" % name) # builder factory = BuildFactory() factory.addStep( ShellCommand( command= "svn update --username %s --password %s --trust-server-cert --non-interactive" % (svn_username, svn_password), workdir=PROJECT_CODE_URL)) # Pip install and update to environment which run this buildbot factory.addStep( ShellCommand(command=[ "pip", "install", "--upgrade", "--requirement=setup/requirements.txt" ], workdir=PROJECT_CODE_URL)) factory.addStep( ShellCommand(command=["pip", "freeze"], workdir=PROJECT_CODE_URL)) factory.addStep( ShellCommand(command=["/bin/bash", "reset_db"], workdir=PROJECT_CODE_URL)) factory.addStep( ShellCommand(command=["/bin/bash", "runtests"], workdir=PROJECT_CODE_URL))
def get_cmake_step(link, type, options=[]): from buildbot.process.properties import Interpolate from buildbot.steps.shell import ShellCommand build_type = '' if type == 'debug': build_type += '-DCMAKE_BUILD_TYPE=Debug' else: build_type += '-DCMAKE_BUILD_TYPE=Release' shared_libs = '' if link == 'static': shared_libs += '-DBUILD_SHARED_LIBS=FALSE' else: shared_libs += '-DBUILD_SHARED_LIBS=TRUE' build_frameworks = '' build_target = '' build_sdk = '' suffix = '' if 'frameworks' in options: build_frameworks += '-DSFML_BUILD_FRAMEWORKS=TRUE' suffix = [link, type, 'frameworks'] else: build_frameworks += '-DSFML_BUILD_FRAMEWORKS=FALSE' suffix = [link, type] if 'newSDK' in options: build_target += '-DCMAKE_OSX_DEPLOYMENT_TARGET=10.11' # the SDK is set by CMake suffix.append('10.11') if 'android' in options: build_target += '-DANDROID_ABI=armeabi-v7a' build_sdk += '-DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/android.toolchain.cmake' if 'ios' in options: build_sdk += '-DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/iOS.toolchain.cmake' configure_command = [ 'cmake', '-G', Interpolate('%(prop:generator)s'), '-DSFML_BUILD_EXAMPLES=TRUE', Interpolate('-DCMAKE_INSTALL_PREFIX=%(prop:workdir)s/install'), Interpolate( '-DCMAKE_INSTALL_FRAMEWORK_PREFIX=%(prop:workdir)s/install/Library/Frameworks' ), build_type, shared_libs, build_frameworks, build_sdk, build_target, '..' ] if 'scan-build' in options: configure_command.insert(0, 'scan-build') return ShellCommand( name='cmake', description=['configuring'], descriptionSuffix=suffix, descriptionDone=['configure'], doStepIf=lambda step: ('scan-build' in options) or ('android' in options) or ('ios' in options) or ( ((not options) or ('osx' in step.build.getProperty('buildername'))) and (link != 'static' or not ('osx' in step.build.getProperty( 'buildername')))), hideStepIf=skipped, workdir=Interpolate('%(prop:workdir)s/build/build'), command=configure_command, env={ 'PATH': Interpolate('%(prop:toolchain_path)s%(prop:PATH)s'), 'INCLUDE': Interpolate('%(prop:vc_include)s'), 'LIB': Interpolate('%(prop:vc_lib)s'), 'LIBPATH': Interpolate('%(prop:vc_libpath)s') }, want_stdout=True, want_stderr=True, logEnviron=False)
def _getClangCMakeBuildFactory( clean=True, test=True, cmake='cmake', jobs=None, # VS tools environment variable if using MSVC. For example, # %VS120COMNTOOLS% selects the 2013 toolchain. vs=None, vs_target_arch='x86', # Multi-stage compilation useTwoStage=False, testStage1=True, stage1_config='Release', stage2_config='Release', # Test-suite runTestSuite=False, nt_flags=None, testsuite_flags=None, submitURL=None, testerName=None, # Environmental variables for all steps. env=None, extra_cmake_args=None, # Extra repositories checkout_clang_tools_extra=True, checkout_compiler_rt=True, checkout_lld=True, checkout_libcxx=False, checkout_test_suite=False, # Upload artifacts to Google Cloud Storage (for the llvmbisect tool) stage1_upload_directory=None, # Use a lower compression level to generate the build-cache package faster # default is 6 according to documentation xz_compression_factor=6, use_pixz_compression=False, # Triggers trigger_after_stage1=None): ############# PREPARING if nt_flags is None: nt_flags = [] if testsuite_flags is None: testsuite_flags = [] if env is None: env = {} if extra_cmake_args is None: extra_cmake_args = [] if trigger_after_stage1 is None: trigger_after_stage1 = [] clean_build_requested = lambda step: \ step.build.getProperty( \ "clean", \ default=step.build.getProperty("clean_obj") \ ) or clean # We *must* checkout at least Clang+LLVM depends_on_projects = ['llvm', 'clang'] if checkout_clang_tools_extra: depends_on_projects.append('clang-tools-extra') if checkout_compiler_rt: depends_on_projects.append('compiler-rt') if checkout_lld: depends_on_projects.append('lld') if checkout_libcxx: depends_on_projects.append('libcxx') depends_on_projects.append('libcxxabi') depends_on_projects.append('libunwind') f = LLVMBuildFactory(depends_on_projects=depends_on_projects, llvm_srcdir='llvm') # Checkout the latest code for LNT # and the test-suite separately. Le's do this first, # so we wouldn't poison got_revision property. if runTestSuite or checkout_test_suite: f.addGetSourcecodeForProject(project='lnt', src_dir='test/lnt', alwaysUseLatest=True) f.addGetSourcecodeForProject(project='test-suite', src_dir='test/test-suite', alwaysUseLatest=True) # Then get the LLVM source code revision this particular build is for. f.addGetSourcecodeSteps() # If jobs not defined, Ninja will choose a suitable value jobs_cmd = [] lit_args = "'-v" if jobs is not None: jobs_cmd = ["-j" + str(jobs)] lit_args += " -j" + str(jobs) + "'" else: lit_args += "'" ninja_cmd = ['ninja'] + jobs_cmd ninja_install_cmd = ['ninja', 'install'] + jobs_cmd ninja_check_cmd = ['ninja', 'check-all'] + jobs_cmd # Global configurations stage1_build = 'stage1' stage1_install = 'stage1.install' stage2_build = 'stage2' stage2_install = 'stage2.install' # Set up VS environment, if appropriate. if vs: f.addStep( SetProperty(command=builders_util.getVisualStudioEnvironment( vs, vs_target_arch), extract_fn=builders_util.extractSlaveEnvironment)) assert not env, "Can't have custom builder env vars with VS" env = Property('slave_env') ############# CLEANING f.addStep( ShellCommand(name='clean stage 1', command=['rm', '-rf', stage1_build], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='cleaning stage 1', descriptionDone='clean', workdir='.', doStepIf=clean_build_requested)) ############# STAGE 1 CmakeCommand.applyRequiredOptions(extra_cmake_args, [ ('-DLLVM_ENABLE_PROJECTS=', ";".join(f.depends_on_projects)), ]) rel_src_dir = LLVMBuildFactory.pathRelativeToBuild(f.llvm_srcdir, stage1_build) f.addStep( ShellCommand( name='cmake stage 1', command=[ cmake, "-G", "Ninja", rel_src_dir, "-DCMAKE_BUILD_TYPE=" + stage1_config, "-DLLVM_ENABLE_ASSERTIONS=True", "-DLLVM_LIT_ARGS=" + lit_args, "-DCMAKE_INSTALL_PREFIX=../" + stage1_install ] + extra_cmake_args, haltOnFailure=True, description='cmake stage 1', workdir=stage1_build, doStepIf=FileDoesNotExist("build.ninja"), env=env)) f.addStep( WarningCountingShellCommand(name='build stage 1', command=ninja_cmd, haltOnFailure=True, description='ninja all', workdir=stage1_build, env=env)) if test and testStage1: haltOnStage1Check = not useTwoStage and not runTestSuite f.addStep( LitTestCommand(name='ninja check 1', command=ninja_check_cmd, haltOnFailure=haltOnStage1Check, description=["checking stage 1"], descriptionDone=["stage 1 checked"], workdir=stage1_build, env=env)) if useTwoStage or runTestSuite or stage1_upload_directory: f.addStep( ShellCommand(name='clean stage 1 install', command=['rm', '-rf', stage1_install], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='cleaning stage 1 install', descriptionDone='clean', workdir='.')) f.addStep( ShellCommand(name='install stage 1', command=ninja_install_cmd, description='ninja install', workdir=stage1_build, env=env)) if stage1_upload_directory: addGCSUploadSteps(f, 'stage 1', stage1_install, stage1_upload_directory, env, gcs_url_property='stage1_package_gcs_url', use_pixz_compression=use_pixz_compression, xz_compression_factor=xz_compression_factor) # Compute the cmake define flag to set the C and C++ compiler to clang. Use # clang-cl if we used MSVC for stage1. if not vs: cc = 'clang' cxx = 'clang++' else: cc = 'clang-cl.exe' cxx = 'clang-cl.exe' ############# STAGE 2 if useTwoStage: # We always cleanly build the stage 2. If the compiler has been # changed on the stage 1, we cannot trust any of the intermediate file # from the old compiler. And if the stage 1 compiler is the same, we # should not build in the first place. f.addStep( ShellCommand(name='clean stage 2', command=['rm', '-rf', stage2_build], warnOnFailure=True, description='cleaning stage 2', descriptionDone='clean', workdir='.')) # Set the compiler using the CC and CXX environment variables to work around # backslash string escaping bugs somewhere between buildbot and cmake. The # env.exe helper is required to run the tests, so hopefully it's already on # PATH. rel_src_dir = LLVMBuildFactory.pathRelativeToBuild( f.llvm_srcdir, stage2_build) cmake_cmd2 = [ 'env', WithProperties('CC=%(workdir)s/' + stage1_install + '/bin/' + cc), WithProperties('CXX=%(workdir)s/' + stage1_install + '/bin/' + cxx), cmake, "-G", "Ninja", rel_src_dir, "-DCMAKE_BUILD_TYPE=" + stage2_config, "-DLLVM_ENABLE_ASSERTIONS=True", "-DLLVM_LIT_ARGS=" + lit_args, "-DCMAKE_INSTALL_PREFIX=../" + stage2_install ] + extra_cmake_args f.addStep( ShellCommand(name='cmake stage 2', command=cmake_cmd2, haltOnFailure=True, description='cmake stage 2', workdir=stage2_build, env=env)) f.addStep( WarningCountingShellCommand(name='build stage 2', command=ninja_cmd, haltOnFailure=True, description='ninja all', workdir=stage2_build, env=env)) if test: f.addStep( LitTestCommand(name='ninja check 2', command=ninja_check_cmd, haltOnFailure=not runTestSuite, description=["checking stage 2"], descriptionDone=["stage 2 checked"], workdir=stage2_build, env=env)) ############# TEST SUITE ## Test-Suite (stage 2 if built, stage 1 otherwise) if runTestSuite: compiler_path = stage1_install if useTwoStage: compiler_path = stage2_install f.addStep( ShellCommand(name='clean stage 2 install', command=['rm', '-rf', stage2_install], warnOnFailure=True, description='cleaning stage 2 install', descriptionDone='clean', workdir='.')) f.addStep( ShellCommand(name='install stage 2', command=ninja_install_cmd, description='ninja install 2', workdir=stage2_build, env=env)) # Get generated python, lnt python = WithProperties('%(workdir)s/test/sandbox/bin/python') lnt = WithProperties('%(workdir)s/test/sandbox/bin/lnt') lnt_setup = WithProperties('%(workdir)s/test/lnt/setup.py') # Paths sandbox = WithProperties('%(workdir)s/test/sandbox') test_suite_dir = WithProperties('%(workdir)s/test/test-suite') # Get latest built Clang (stage1 or stage2) cc = WithProperties('%(workdir)s/' + compiler_path + '/bin/' + cc) cxx = WithProperties('%(workdir)s/' + compiler_path + '/bin/' + cxx) # LNT Command line (don't pass -jN. Users need to pass both --threads # and --build-threads in nt_flags/test_suite_flags to get the same effect) use_runtest_testsuite = len(nt_flags) == 0 if not use_runtest_testsuite: test_suite_cmd = [ python, lnt, 'runtest', 'nt', '--no-timestamp', '--sandbox', sandbox, '--test-suite', test_suite_dir, '--cc', cc, '--cxx', cxx ] # Append any option provided by the user test_suite_cmd.extend(nt_flags) else: lit = WithProperties('%(workdir)s/' + stage1_build + '/bin/llvm-lit') test_suite_cmd = [ python, lnt, 'runtest', 'test-suite', '--no-timestamp', '--sandbox', sandbox, '--test-suite', test_suite_dir, '--cc', cc, '--cxx', cxx, '--use-lit', lit ] # Append any option provided by the user test_suite_cmd.extend(testsuite_flags) # Only submit if a URL has been specified if submitURL is not None: if not isinstance(submitURL, list): submitURL = [submitURL] for url in submitURL: test_suite_cmd.extend(['--submit', url]) # lnt runtest test-suite doesn't understand --no-machdep-info: if testerName and not use_runtest_testsuite: test_suite_cmd.extend(['--no-machdep-info', testerName]) # CC and CXX are needed as env for build-tools test_suite_env = copy.deepcopy(env) test_suite_env['CC'] = cc test_suite_env['CXX'] = cxx # Steps to prepare, build and run LNT f.addStep( ShellCommand(name='clean sandbox', command=['rm', '-rf', 'sandbox'], haltOnFailure=True, description='removing sandbox directory', workdir='test', env=env)) f.addStep( ShellCommand(name='recreate sandbox', command=['virtualenv', 'sandbox'], haltOnFailure=True, description='recreating sandbox', workdir='test', env=env)) f.addStep( ShellCommand(name='setup lit', command=[python, lnt_setup, 'develop'], haltOnFailure=True, description='setting up LNT in sandbox', workdir='test/sandbox', env=env)) f.addStep( LitTestCommand(name='test-suite', command=test_suite_cmd, haltOnFailure=True, description=['running the test suite'], workdir='test/sandbox', logfiles={ 'configure.log': 'build/configure.log', 'build-tools.log': 'build/build-tools.log', 'test.log': 'build/test.log', 'report.json': 'build/report.json' }, env=test_suite_env)) return f
def addGCSUploadSteps(f, package_name, install_prefix, gcs_directory, env, gcs_url_property=None, use_pixz_compression=False, xz_compression_factor=6): """ Add steps to upload to the Google Cloud Storage bucket. f - The BuildFactory to modify. package_name - The name of this package for the descriptions (e.g. 'stage 1') install_prefix - The directory the build has been installed to. gcs_directory - The subdirectory of the bucket root to upload to. This should match the builder name. env - The environment to use. Set BOTO_CONFIG to use a configuration file in a non-standard location, and BUCKET to use a different GCS bucket. gcs_url_property - Property to assign the GCS url to. """ gcs_url_fmt = ('gs://%(gcs_bucket)s/%(gcs_directory)s/' 'clang-r%(got_revision)s-t%(now)s-b%(buildnumber)s.tar.xz') time_fmt = '%Y-%m-%d_%H-%M-%S' output_file_name = '../install.tar.xz' gcs_url = \ WithProperties( gcs_url_fmt, gcs_bucket=lambda _: env.get('BUCKET', 'llvm-build-artifacts'), gcs_directory=lambda _: gcs_directory, now=lambda _: datetime.utcnow().strftime(time_fmt)) if gcs_url_property: f.addStep( SetProperty(name="record GCS url for " + package_name, command=['echo', gcs_url], property=gcs_url_property)) if use_pixz_compression: # tweak the xz compression level to generate packages faster tar_command = ['tar', '-Ipixz', '-cvf', output_file_name, '.'] else: xz_command = 'xz -{0}'.format(xz_compression_factor) tar_command = ['tar', '-I', xz_command, '-cvf', output_file_name, '.'] f.addStep( ShellCommand(name='package ' + package_name, command=tar_command, description='packaging ' + package_name + '...', workdir=install_prefix, env=env)) f.addStep( ShellCommand(name='upload ' + package_name + ' to storage bucket', command=['gsutil', 'cp', '../install.tar.xz', gcs_url], description=('uploading ' + package_name + 'to storage bucket ...'), workdir=install_prefix, env=env))
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs=None): gbp_args = [ '-uc', '-us', '--git-ignore-branch', '--git-ignore-new', '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch, '-j4' ] f = BuildFactory() # Remove the build directory. f.addStep( RemoveDirectory( name=job_name + '-clean', dir=Interpolate('%(prop:workdir)s'), hideStepIf=success, )) # Check out the repository master branch, since releases are tagged and not branched f.addStep( Git( repourl=url, branch='master', alwaysUseLatest= True, # this avoids broken builds when schedulers send wrong tag/rev mode='full' # clean out old versions )) # Update the cowbuilder f.addStep( ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys, hideStepIf=success)) # Need to build each package in order for package in packages: debian_pkg = 'ros-' + rosdistro + '-' + package.replace( '_', '-') # debian package name (ros-groovy-foo) branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro # release branch from bloom deb_name = debian_pkg + '_%(prop:release_version)s' + distro final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb' # Check out the proper tag. Use --force to delete changes from previous deb stamping f.addStep( ShellCommand(haltOnFailure=True, name=package + '-checkout', command=[ 'git', 'checkout', Interpolate(branch_name), '--force' ], hideStepIf=success)) # Download script for building the source deb f.addStep( FileDownload( name=job_name + '-grab-build-source-deb-script', mastersrc='scripts/build_source_deb.py', slavedest=Interpolate('%(prop:workdir)s/build_source_deb.py'), mode=0755, hideStepIf=success))
configOpts = configOpts + ['--disable-kernel-cache'] f.addStep(ShellCommand( command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=myenviron )) if tests_dir!=None and not cmake: f.addStep(ShellCommand(command=["make", "prepare-examples"], haltOnFailure=True, name="prepare examples", env=myenviron, description="preparing", descriptionDone="prepare")) if tcedir: f.addStep(ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": logfile}, timeout=60*60)) else: f.addStep(ShellCommand(command=["make", "check"],
def getLLDBuildFactory( clean = True, jobs = None, extra_configure_args = None, env = None): # Set defaults if jobs is None: jobs = "%(jobs)s" if extra_configure_args is None: extra_configure_args = [] # Prepare environmental variables. Set here all env we want everywhere. merged_env = { 'CC' : "clang", 'CXX' : "clang++", 'TERM' : 'dumb' # Be cautious and disable color output from all tools. } if env is not None: # Overwrite pre-set items with the given ones, so user can set anything. merged_env.update(env) f = LLVMBuildFactory( depends_on_projects=['llvm', 'lld'], llvm_srcdir="llvm.src", llvm_objdir="llvm.obj") # Get LLVM and Lld f.addSVNSteps() # Clean directory, if requested. cleanBuildRequested = lambda step: step.build.getProperty("clean") or clean f.addStep(RemoveDirectory(name='clean ' + f.llvm_objdir, dir=f.llvm_objdir, haltOnFailure=False, flunkOnFailure=False, doStepIf=cleanBuildRequested )) # Create configuration files with cmake f.addStep(CmakeCommand(name="cmake-configure", description=["cmake configure"], haltOnFailure=True, options=extra_configure_args, path="../%s" % f.llvm_srcdir, env=merged_env, workdir=f.llvm_objdir, doStepIf=FileDoesNotExist( "./%s/CMakeCache.txt" % f.llvm_objdir))) # Build Lld f.addStep(ShellCommand(name="build_Lld", command=['nice', '-n', '10', 'make', WithProperties("-j%s" % jobs)], haltOnFailure=True, description=["build lld"], env=merged_env, workdir=f.llvm_objdir)) # Test Lld f.addStep(ShellCommand(name="test_lld", command=["make", "lld-test"], haltOnFailure=True, description=["test lld"], env=merged_env, workdir=f.llvm_objdir)) return f
def createPoclFactory( environ={}, repository='https://github.com/pocl/pocl.git', branch='master', buildICD=True, llvm_dir='/usr/', icd_dir='/usr/', tests_dir=None, config_opts='', pedantic=True, tcedir='', f=None, cmake=False, cache_dir=None ): """ Create a buildbot factory object that builds pocl. environ Dictionary: The environment variables to append to the build. PATH and LD_LIBRARY_PATH will be added from llvm_dir (if given). repository String: the repo to build from. defaults to pocl on github branch String: the branch in 'repository' to build from. default to master buildICD Bool: if false, the ICD extension is not built. llvm_dir String: LLVM installation dir. I.e. without the 'bin/' or 'lib/'. icd_dir String: ICD loader installation dir. We expect here to be a ICD loader that understand the OCL_ICD_VENDORS parameter, i.e. ocl-icd or patched Khronos loader. tests_dir String: Path where the extenral testsuite packages can be copied from. ('cp' is used, so they need to be on the same filesystem). NOTE: currently only a placeholder - not tested on the public buildbot config_opts String: extra options to pass to ./configure cmake Bool: use CMake instead of autotools to build pocl cache_dir String: Set the pocl kernel cache to this dir. If not set, the kcache is disabled. """ myenviron = environ.copy() if 'PATH' in myenviron.keys(): myenviron['PATH'] = llvm_dir+"/bin/:"+myenviron['PATH']+":${PATH}" else: myenviron['PATH'] = llvm_dir+"/bin/:${PATH}" if 'LD_LIBRARY_PATH' in myenviron.keys(): myenviron['LD_LIBRARY_PATH'] = llvm_dir+"/lib/:"+myenviron['PATH']+":${LD_LIBRARY_PATH}" else: myenviron['LD_LIBRARY_PATH'] = llvm_dir+"/lib/:${LD_LIBRARY_PATH}" if tcedir: myenviron['PATH'] = tcedir+"/bin/:"+myenviron['PATH'] myenviron['LD_LIBRARY_PATH'] = tcedir+"/lib/:"+myenviron['LD_LIBRARY_PATH'] if cache_dir: myenviron['POCL_BUILD_KERNEL_CACHE']='1' myenviron['POCL_CACHE_DIR']=cache_dir else: myenviron['POCL_BUILD_KERNEL_CACHE']='0' if cmake: logfile="Testing/Temporary/LastTest.log" else: logfile="tests/testsuite.log" if f==None: f = factory.BuildFactory() f.addStep( Git( repourl=repository, mode=Property('git_mode'), ignore_ignores=True, branch=branch ) ) #clear last test round's kernel cahce. #NB: if you run two slave builds on the same machine, this #will not work! if cache_dir: f.addStep( ShellCommand( command=['rm', '-rf', cache_dir], haltOnFailure=True, name='clean kcache', description='cleaning kcache', descriptionDone='cleaned kcache' )) if not cmake: f.addStep(ShellCommand( command=["./autogen.sh"], haltOnFailure=True, name="autoconfig", env=myenviron, description="autoconfiging", descriptionDone="autoconf")) if tests_dir!=None: f.addStep(ShellCommand( haltOnFailure=True, command=["cp", "-u", tests_dir+AMD_test_pkg, "examples/AMD/"+AMD_test_pkg], name="copy AMD", description="copying", descriptionDone="copied AMD", #kludge around 'cp' always complaining if source is missing decodeRC={0:SUCCESS,1:SUCCESS} )) f.addStep(ShellCommand( haltOnFailure=False, command=["cp", "-u", tests_dir+ViennaCL_test_pkg, "examples/ViennaCL/"+ViennaCL_test_pkg], name="copy ViennaCL", description="copying", descriptionDone="copied ViennaCL", decodeRC={0:SUCCESS,1:SUCCESS} )) if cmake: f.addStep( ShellCommand( command=["cmake", "."], env=myenviron, haltOnFailure=True, name="CMake", description="cmaking", descriptionDone="cmade")) else: configOpts=config_opts.split(' ') if pedantic==True: configOpts = configOpts + ['--enable-pedantic'] if buildICD==False: configOpts = configOpts + ['--disable-icd'] if cache_dir=None: configOpts = configOpts + ['--disable-kernel-cache'] f.addStep(ShellCommand( command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure"))
def getHtmlDocsBuildFactory(depends_on_projects=None, clean=False, env=None, **kwargs): if depends_on_projects is None: # All the projects by default. _depends_on_projects = llvm_docs.keys() else: # Make a local copy of depends_on_projects, as we are going to modify # that. _depends_on_projects = depends_on_projects[:] # Prepare environmental variables. Set here all env we want everywhere. merged_env = { 'TERM': 'dumb' # Be cautious and disable color output from all tools. } if env is not None: # Overwrite pre-set items with the given ones, so user can set anything. merged_env.update(env) # HTML Sphinx documentation builds in tree, each in its own directory. # For that, make sure the obj_dir is the same as llvm_srcdir. src_dir = kwargs.pop('llvm_srcdir', '.') f = factory.LLVMBuildFactory( clean=clean, depends_on_projects=_depends_on_projects, llvm_srcdir=src_dir, obj_dir=src_dir, **kwargs) # Pass through all the extra arguments. # Build the documentation for project in llvm_docs: # Checkout the source code and remove all the untracked files, so # we would build a fresh new documentation. f.addStep( steps.Git(name='Checkout the {} source code'.format(project), repourl=f.repourl_prefix + "llvm-{}.git".format(project), mode='full', method='fresh', progress=True, workdir=util.WithProperties(project), env=merged_env, **kwargs)) target, build_path, local_path, remote_path = llvm_docs[project] build_dir = util.WithProperties("{}".format("/".join( [project, build_path]))) f.addStep( steps.WarningCountingShellCommand( name="Build {} documentation".format(project), command=[build_cmd, target], haltOnFailure=True, workdir=build_dir, env=merged_env, **kwargs)) # Publish just built documentation f.addStep( ShellCommand( name="Publish {}".format(project), description=[ "Publish", "just", "built", "documentation", "for", "{}".format(project) ], command=[ 'rsync', '-vrl', '--delete', '--force', '--delay-updates', '--delete-delay', '--ignore-times', '--checksum', '-p', '--chmod=Du=rwx,Dg=rwx,Do=rx,Fu=rw,Fg=rw,Fo=r', "{}".format(local_path), "lists.llvm.org:web/{}".format(remote_path), ], workdir=build_dir, env=merged_env, )) return f
def createTarballFactory(gerrit_repo): """ Generates a build factory for a tarball generating builder. Returns: BuildFactory: Build factory with steps for generating tarballs. """ bf = util.BuildFactory() # are we building a tag or a patchset? bf.addStep(SetProperty( property='category', value=buildCategory, hideStepIf=hide_except_error)) # update dependencies bf.addStep(ShellCommand( command=dependencyCommand, decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED }, haltOnFailure=True, logEnviron=False, doStepIf=do_step_installdeps, hideStepIf=hide_if_skipped, description=["installing dependencies"], descriptionDone=["installed dependencies"])) # Pull the patch from Gerrit bf.addStep(Gerrit( repourl=gerrit_repo, workdir="build/lustre", mode="full", method="fresh", retry=[60,60], timeout=3600, logEnviron=False, getDescription=True, haltOnFailure=True, description=["cloning"], descriptionDone=["cloned"])) # make tarball bf.addStep(ShellCommand( command=['sh', './autogen.sh'], haltOnFailure=True, description=["autogen"], descriptionDone=["autogen"], workdir="build/lustre")) bf.addStep(Configure( command=['./configure', '--enable-dist'], workdir="build/lustre")) bf.addStep(ShellCommand( command=['make', 'dist'], haltOnFailure=True, description=["making dist"], descriptionDone=["make dist"], workdir="build/lustre")) # upload it to the master bf.addStep(SetPropertyFromCommand( command=['sh', '-c', 'echo *.tar.gz'], property='tarball', workdir="build/lustre", hideStepIf=hide_except_error, haltOnFailure=True)) bf.addStep(FileUpload( workdir="build/lustre", slavesrc=util.Interpolate("%(prop:tarball)s"), masterdest=tarballMasterDest, url=tarballUrl)) # trigger our builders to generate packages bf.addStep(Trigger( schedulerNames=["package-builders"], copy_properties=['tarball', 'category'], waitForFinish=False)) return bf
from buildbot.process.factory import BuildFactory from buildbot.steps.source.github import GitHub from buildbot.steps.shell import ShellCommand from buildbot.steps.shell import Configure from buildbot.plugins import schedulers, util from buildbot.config import BuilderConfig from buildbot.schedulers.basic import SingleBranchScheduler from buildbot.schedulers.forcesched import ForceScheduler from buildbot.changes import filter from master_includes import appendSchedulers from master_includes import appendBuilders # standard build steps librelp_make_check=ShellCommand(command=["make", "-j4", "check", "VERBOSE=1"], env={'UNDER_CI':'YES'}, logfiles={"test-suite.log": "tests/test-suite.log"}, lazylogfiles=True, maxTime=3600, timeout=300, name="distcheck (gtls only)") librelp_gather_check_logs=ShellCommand(command=["bash", "-c", "ls -l tests; find . -name '*.log'; cat $(find $(find . -name tests) -name \"*.log\" ! -name \"librelp*\"); exit 0"], haltOnFailure=False, name="gather individual test logs") # --- librelp default factory settings factoryLibrelp= BuildFactory() factoryLibrelp.addStep(GitHub(repourl=repoGitUrl, mode='full', retryFetch=True)) factoryLibrelp.addStep(ShellCommand(command=["bash", "-c", "ps aux|grep receive; killall lt-receive; ps aux|grep receive; exit 0"], name="process cleanup")) factoryLibrelp.addStep(ShellCommand(command=["autoreconf", "--force", "--verbose", "--install"], name="autoreconf", description="autoreconf running", descriptionDone="autoreconf done")) # NO tls factoryLibrelp.addStep(ShellCommand(command=["./configure", "--disable-tls", "--disable-tls-openssl"], name="configure (no tls)", logfiles={"config.log": "config.log"})) factoryLibrelp.addStep(ShellCommand(command=["make"], name="make")) factoryLibrelp.addStep(librelp_make_check) factoryLibrelp.addStep(librelp_gather_check_logs) # gtls factoryLibrelp.addStep(ShellCommand(command=["make", "clean"], name="cleanup for next test")) factoryLibrelp.addStep(ShellCommand(command=["./configure", "--enable-tls", "--disable-tls-openssl"], name="configure (gtls only)", logfiles={"config.log": "config.log"}))
def getPollyBuildFactory(clean=False, install=False, make='make', jobs=None, checkAll=False, extraCmakeArgs=[]): llvm_srcdir = "llvm.src" llvm_objdir = "llvm.obj" llvm_instdir = "llvm.inst" polly_srcdir = '%s/tools/polly' % llvm_srcdir clang_srcdir = '%s/tools/clang' % llvm_srcdir jobs_cmd = [] if jobs is not None: jobs_cmd = ["-j" + str(jobs)] build_cmd = [make] + jobs_cmd install_cmd = [make, 'install'] + jobs_cmd check_all_cmd = [make, 'check-all'] + jobs_cmd check_polly_cmd = [make, 'check-polly'] + jobs_cmd cmake_install = [] if install: cmake_install = ["-DCMAKE_INSTALL_PREFIX=../%s" % llvm_instdir] f = buildbot.process.factory.BuildFactory() # Determine the build directory. f.addStep( buildbot.steps.shell.SetProperty(name="get_builddir", command=["pwd"], property="builddir", description="set build dir", workdir=".")) # Get LLVM, Clang and Polly f.addStep( SVN(name='svn-llvm', mode='update', baseURL='http://llvm.org/svn/llvm-project/llvm/', defaultBranch='trunk', workdir=llvm_srcdir)) f.addStep( SVN(name='svn-clang', mode='update', baseURL='http://llvm.org/svn/llvm-project/cfe/', defaultBranch='trunk', workdir=clang_srcdir)) f.addStep( SVN(name='svn-polly', mode='update', baseURL='http://llvm.org/svn/llvm-project/polly/', defaultBranch='trunk', workdir=polly_srcdir)) # Clean build dir if clean: f.addStep( ShellCommand(name='clean-build-dir', command=['rm', '-rf', llvm_objdir], warnOnFailure=True, description=["clean build dir"], workdir='.')) # Create configuration files with cmake f.addStep( ShellCommand(name="create-build-dir", command=["mkdir", "-p", llvm_objdir], haltOnFailure=False, description=["create build dir"], workdir=".")) cmakeCommand = [ "cmake", "../%s" % llvm_srcdir, "-DCMAKE_COLOR_MAKEFILE=OFF", "-DPOLLY_TEST_DISABLE_BAR=ON", "-DCMAKE_BUILD_TYPE=Release" ] + cmake_install + extraCmakeArgs f.addStep( ShellCommand(name="cmake-configure", command=cmakeCommand, haltOnFailure=False, description=["cmake configure"], workdir=llvm_objdir)) # Build f.addStep( ShellCommand(name="build", command=build_cmd, haltOnFailure=True, description=["build"], workdir=llvm_objdir)) # Clean install dir if install and clean: f.addStep( ShellCommand(name='clean-install-dir', command=['rm', '-rf', llvm_instdir], haltOnFailure=False, description=["clean install dir"], workdir='.')) # Install if install: f.addStep( ShellCommand(name="install", command=install_cmd, haltOnFailure=False, description=["install"], workdir=llvm_objdir)) # Test if checkAll: f.addStep( ShellCommand(name="check_all", command=check_all_cmd, haltOnFailure=False, description=["check all"], workdir=llvm_objdir)) else: f.addStep( ShellCommand(name="check_polly", command=check_polly_cmd, haltOnFailure=False, description=["check polly"], workdir=llvm_objdir)) return f
def packaging(configurator, options, buildout_slave_path, environ=()): """Post download steps for packaging, meant for hg-versioned buildouts. Extraction is made from src/ to dist/, then the buildout dir is renamed as build/ to let the testing proceed. It ends by returning the new cfg file name : release.cfg This takes care of creating the tarball and preparing the buildmaster to get the upload. Side-effect: totally disables the 'auto-watch' option, so that the main testing, that runs on the extracted code, does not try to introspect live repos on it. Options: :packaging.root-dir: the root directory in master into which all artifacts will be uploaded (possibly for several different buildouts). The separation of this option is meant to allow for different scopes (e.g, use only one ``root_dir`` value for a whole ``MANIFEST.cfg`` file in a ``DEFAULT`` section) :packaging.upload-dir: subdirectory, relative to ``packaging.root-dir`` where artifacts will be uploaded. :packaging.prefix: prefix for the artifact name. Tag name will be appended to it. :packaging.parts: buildout parts to extract in the tarball. :packaging.base-url: URL corresponding to ``packaging.root-dir``, for display in the waterfall. """ options['auto-watch'] = 'false' steps = [] master_dir = os.path.join(options['packaging.root-dir'], options['packaging.upload-dir']) # creation of upload dir beforehand to control perms # see https://openerp.anybox.fr/anytracker/anybox/ticket/2496 # option -p of mkdir is set to avoid a failure if directory # already exists. It is NOT meant to create parent directories: # the -m option applies only to the innermost directory steps.append( MasterShellCommand( command=["mkdir", "-p", "-m", "755", WithProperties(master_dir)], haltOnFailure=True, hideStepIf=True, )) steps.append( ShellCommand(command=['rm', '-rf', 'build', 'dist'], description="cleaning", workdir='.')) cache = '%(builddir)s/../buildout-caches' # lame duplication eggs_cache = cache + '/eggs' openerp_cache = cache + '/openerp' archive_name_interp = options['packaging.prefix'] + '-%(buildout-tag)s' steps.append( ShellCommand(command=[ 'hg', 'archive', WithProperties('../dist/' + archive_name_interp) ], description=["Archive", "buildout"], haltOnFailure=True, workdir='./src')) parts = options.get('packaging.parts').split() steps.extend( configurator.steps_unibootstrap( buildout_slave_path, options, eggs_cache, workdir='./src', dump_options_to=WithProperties('../dist/' + archive_name_interp + '/bootstrap.ini'))) steps.append( ShellCommand(command=[ 'bin/buildout', '-c', buildout_slave_path, WithProperties('buildout:eggs-directory=' + eggs_cache), WithProperties('buildout:openerp-downloads-' 'directory=' + openerp_cache), 'install' ] + parts, description=["buildout", "install"], workdir='./src', haltOnFailure=True)) extract_cmd = [ 'bin/buildout', '-o', '-c', buildout_slave_path, WithProperties('buildout:eggs-directory=' + eggs_cache), WithProperties('buildout:openerp-downloads-' 'directory=' + openerp_cache), ] extract_cmd.extend( WithProperties(('%s:extract-downloads-to=../dist/' % part) + archive_name_interp) for part in parts) steps.append( ShellCommand( command=extract_cmd, description=["Extract", "buildout", "downloads"], workdir='./src', haltOnFailure=True, )) steps.append( ShellCommand(command=[ 'tar', 'cjf', WithProperties(archive_name_interp + '.tar.bz2'), WithProperties(archive_name_interp) ], description=["tar"], haltOnFailure=True, workdir='./dist')) steps.append( ShellCommand( command=WithProperties('md5sum ' + archive_name_interp + '.tar.bz2 > ' + archive_name_interp + '.tar.bz2.md5'), description=["md5"], warnOnFailure=False, workdir='./dist')) steps.append( ShellCommand( workdir='.', command=[ 'mv', WithProperties('./dist/' + archive_name_interp), 'build' ], )) return 'release.cfg', steps
def getLLDBCMakeBuildFactory( clean=False, jobs="%(jobs)s", # Source directory containing a built python python_source_dir=None, # Default values for VS devenv and build configuration vs=None, config='Release', target_arch='x86', extra_cmake_args=None, test=False, testTimeout=2400, install=False): ############# PREPARING # Global configurations build_dir='build' f = LLVMBuildFactory( depends_on_projects=["llvm", "clang", "lldb", "lld"], obj_dir=build_dir) # Determine Slave Environment and Set MSVC environment. if vs: f.addStep(SetProperty( command=getVisualStudioEnvironment(vs, target_arch), extract_fn=extractSlaveEnvironment)) f.addGetSourcecodeSteps() build_cmd=['ninja'] install_cmd = ['ninja','install'] test_cmd = ['ninja','check-lldb'] if jobs: build_cmd.append(WithProperties("-j%s" % jobs)) install_cmd.append(WithProperties("-j%s" % jobs)) test_cmd.append(WithProperties("-j%s" % jobs)) ############# CLEANING cleanBuildRequested = lambda step: clean or step.build.getProperty("clean", default=step.build.getProperty("clean_obj")) f.addStep(RemoveDirectory(name='clean '+build_dir, dir=build_dir, haltOnFailure=False, flunkOnFailure=False, doStepIf=cleanBuildRequested )) rel_src_dir = LLVMBuildFactory.pathRelativeToBuild(f.llvm_srcdir, f.obj_dir) cmake_options = [ "-G", "Ninja", "-DCMAKE_BUILD_TYPE=" + config, "-DCMAKE_INSTALL_PREFIX=../install", "-DLLVM_ENABLE_PROJECTS=%s" % ";".join(f.depends_on_projects), ] if python_source_dir: cmake_options.append("-DPYTHON_HOME=" + python_source_dir) if extra_cmake_args: cmake_options += extra_cmake_args f.addStep(CmakeCommand(name="cmake-configure", description=["cmake configure"], haltOnFailure=True, options=cmake_options, path=rel_src_dir, env=Property('slave_env'), workdir=build_dir)) f.addStep(WarningCountingShellCommand(name='build', command=build_cmd, haltOnFailure=True, description='ninja build', workdir=build_dir, env=Property('slave_env'))) ignoreInstallFail = bool(install != 'ignoreFail') f.addStep(ShellCommand(name='install', command=install_cmd, flunkOnFailure=ignoreInstallFail, description='ninja install', workdir=build_dir, doStepIf=bool(install), env=Property('slave_env'))) ignoreTestFail = bool(test != 'ignoreFail') f.addStep(ShellCommand(name='test', command=test_cmd, flunkOnFailure=ignoreTestFail, timeout=testTimeout, description='ninja test', workdir=build_dir, doStepIf=bool(test), env=Property('slave_env'))) return f
# Note that the *same* configuration objects are used for both runs of the # master. This is a more strenuous test than strictly required, since a master # will generally re-execute master.cfg on startup. However, it's good form and # will help to flush out any bugs that may otherwise be difficult to find. c = BuildmasterConfig = {} c['workers'] = [Worker("local1", "localpw")] c['protocols'] = {'pb': {'port': 'tcp:0'}} c['change_source'] = [] c['change_source'] = PBChangeSource() c['schedulers'] = [] c['schedulers'].append( AnyBranchScheduler(name="all", change_filter=ChangeFilter(project_re='^testy/'), treeStableTimer=1 * 60, builderNames=[ 'testy', ])) c['schedulers'].append(ForceScheduler(name="force", builderNames=["testy"])) f1 = BuildFactory() f1.addStep(ShellCommand(command='echo hi')) c['builders'] = [] c['builders'].append( BuilderConfig(name="testy", workernames=["local1"], factory=f1)) c['status'] = [] c['title'] = "test" c['titleURL'] = "test" c['buildbotURL'] = "http://localhost:8010/" c['db'] = {'db_url': "sqlite:///state.sqlite"}
def getLLDBTestSteps(f, bindir, test_archs, test_compilers, remote_platform=None, remote_host=None, remote_port=None, remote_dir=None, env=None): # Skip test steps if no test compiler or arch is specified if None in (test_archs, test_compilers): return f llvm_srcdir = "llvm" llvm_builddir = "build" if env is None: env = {} flunkTestFailure = True extraTestFlag = '' # TODO: for now, run tests with 8 threads and without mi tests on android # come back when those issues are addressed testenv = dict(env) for compiler in test_compilers: # find full path for top of tree clang if compiler == 'totclang': compilerPath = bindir + '/clang' elif remote_platform is 'android': compilerPath = os.path.join('%(toolchain_test)s', 'bin', compiler) else: compilerPath = compiler for arch in test_archs: DOTEST_OPTS = ''.join([ '--executable ' + bindir + '/lldb ', '-A %s ' % arch, '-C %s ' % compilerPath, '-s lldb-test-traces-%s-%s ' % (compiler, arch), '-u CXXFLAGS ', '-u CFLAGS ', '--channel ', '"gdb-remote packets" ', '--channel ', '"lldb all"' ]) testname = "local" if remote_platform is not None: urlStr = 'connect://%(remote_host)s:%(remote_port)s' if remote_platform is 'android': #i386/x86_64 are the only android archs that are expected to pass at this time flunkTestFailure = arch in ('i386', 'x86_64') testenv['LLDB_TEST_THREADS'] = '8' extraTestFlag = ' -m' urlStr = 'adb://%(deviceid)s:%(remote_port)s' # for Android, remove all forwarded ports before running test # it is noticed that forwarded socket connections were not cleaned for certain crashed tests # clean it here to avoid too many "LISTEN" ports left on slave f.addStep( ShellCommand( name="remove port forwarding %s" % arch, command=['adb', 'forward', '--remove-all'], description="Remove port forwarding", env=env, haltOnFailure=False, workdir='%s' % llvm_builddir)) DOTEST_OPTS += ''.join([ ' --platform-name remote-' + remote_platform, ' --platform-url ' + urlStr, ' --platform-working-dir %(remote_dir)s', ' --env OS=' + remote_platform.title() ]) testname = "remote-" + remote_platform DOTEST_OPTS += extraTestFlag f.addStep( LitTestCommand( name="test lldb %s (%s-%s)" % (testname, compiler, arch), command=[ '../%s/tools/lldb/test/dosep.py' % llvm_srcdir, '--options', WithProperties(DOTEST_OPTS) ], description="test lldb", parseSummaryOnly=True, flunkOnFailure=flunkTestFailure, warnOnFailure=flunkTestFailure, workdir='%s' % llvm_builddir, timeout=1800, env=testenv)) f = cleanSVNSourceTree(f, '%s/tools/lldb/test' % llvm_srcdir) return f
def getClangWithLTOBuildFactory( depends_on_projects = None, clean = False, jobs = None, extra_configure_args = None, compare_last_2_stages = True, lto = None, # The string gets passed to -flto flag as is. Like -flto=thin. env = None): # Set defaults if depends_on_projects: depends_on_projects = list(depends_on_projects) else: # By default we link with LLD. depends_on_projects = ['llvm', 'clang', 'lld'] if lto is None: lto = 'ON' if jobs is None: jobs = "%(jobs)s" if extra_configure_args is None: extra_configure_args = [] else: extra_configure_args = list(extra_configure_args) # Make sure CMAKE_INSTALL_PREFIX and -G are not specified # in the extra_configure_args. We set them internally as needed. # TODO: assert extra_configure_args. install_prefix_specified = any(a.startswith('-DCMAKE_INSTALL_PREFIX=') for a in extra_configure_args) assert not install_prefix_specified, "Please do not explicitly specify the install prefix for multi-stage build." # Prepare environmental variables. Set here all env we want everywhere. merged_env = { 'TERM' : 'dumb' # Be cautious and disable color output from all tools. } if env is not None: # Overwrite pre-set items with the given ones, so user can set anything. merged_env.update(env) f = LLVMBuildFactory( depends_on_projects=depends_on_projects, llvm_srcdir="llvm.src", stage_objdirs=[ "build/stage1", "build/stage2", "build/stage3", "build/stage4", ], stage_installdirs=[ "install/stage1", "install/stage2", "install/stage3", "install/stage4", ], staged_compiler_idx = 1) cleanBuildRequested = lambda step: step.build.getProperty("clean") or clean # Do a clean checkout if requested. f.addStep(RemoveDirectory(name='clean-src-dir', dir=f.llvm_srcdir, haltOnFailure=False, flunkOnFailure=False, doStepIf=cleanBuildRequested, )) # Get the source code. f.addSVNSteps() # Build with the system compiler first _addSteps4SystemCompiler(f, stage_idx=0, clean=cleanBuildRequested, jobs=jobs, extra_configure_args=extra_configure_args, env=merged_env) # Then build the compiler we would use for the bootstrap. _addSteps4StagedCompiler(f, stage_idx=1, jobs=jobs, extra_configure_args=extra_configure_args, env=merged_env) # Build all the remaining stages with exactly the same configuration. CmakeCommand.applyRequiredOptions(extra_configure_args, [ ('-DLLVM_ENABLE_LTO=', lto), ]) # If we build LLD, we would link with LLD. # Otherwise we link with the system linker. if 'lld' in depends_on_projects: CmakeCommand.applyRequiredOptions(extra_configure_args, [ ('-DLLVM_ENABLE_LLD=', 'ON'), ]) # The rest are test stages, which depend on the staged compiler we are ultimately after. s = f.staged_compiler_idx + 1 staged_install = f.stage_installdirs[f.staged_compiler_idx] for i in range(s, len(f.stage_objdirs[s:]) + s): configure_args = extra_configure_args[:] configure_args.append( WithProperties( "-DCMAKE_AR=%(workdir)s/" + staged_install + "/bin/llvm-ar" )) configure_args.append( WithProperties( "-DCMAKE_RANLIB=%(workdir)s/" + staged_install + "/bin/llvm-ranlib" )) _addSteps4StagedCompiler(f, stage_idx=i, use_stage_idx=f.staged_compiler_idx, jobs=jobs, extra_configure_args=configure_args, env=merged_env) if compare_last_2_stages: # Compare the compilers built on the last 2 stages if requested. diff_command = [ "diff", "-q", f.stage_installdirs[-2] + "/bin/clang", f.stage_installdirs[-1] + "/bin/clang", ] f.addStep( ShellCommand( name="compare-compilers", description=[ "compare", "stage%d" % (len(f.stage_installdirs)-1), "and", "stage%d" % len(f.stage_installdirs), "compilers", ], haltOnFailure=False, command=WithProperties(" ".join(diff_command)), workdir=".", env=merged_env ) ) # Only if the compare-compilers step has failed. def _prevStepFailed(step): steps = step.build.getStatus().getSteps() prev_step = steps[-2] (result, _) = prev_step.getResults() return (result == FAILURE) dir1 = f.stage_objdirs[-2] dir2 = f.stage_objdirs[-1] inc_pattern = "-type f -not -name *.inc -printf '%f\n'" find_cmd = "find %s %s" % (dir1, dir2) diff_cmd = "diff -ru %s %s -x '*.tmp*' -X -" % (dir1, dir2) # Note: Use a string here as we want the command executed by a shell. diff_tablegen_inc_files_command = "%s %s | %s" % (find_cmd, inc_pattern, diff_cmd) f.addStep( ShellCommand( name="compare-tablegen-inc-files", description=[ "compare", "stage%d" % (len(f.stage_installdirs)-1), "and", "stage%d" % len(f.stage_installdirs), "Tablegen inc files", ], command=diff_tablegen_inc_files_command, workdir=".", env=merged_env, doStepIf=_prevStepFailed, ) ) return f
def getLLDBUbuntuCMakeBuildFactory(build_compiler, build_type, local_test_archs=None, local_test_compilers=None, remote_configs=None, jobs='%(jobs)s', env=None): """Generate factory steps for ubuntu cmake builder Arguments: build_compiler -- string of compile name, example 'clang', the compiler will be used to build binaries for host platform build_type -- 'Debug' or 'Release', used to define build type for host platform as well as remote platform if any local_test_archs -- list of architectures, example ['i386','x86_64'], defines architectures to run local tests against, if None, local tests won't be executed local_test_compiler -- list of compilers, example ['clang','gcc4.8.2'], definds compilers to run local tests with, if None, local tests won't be executed remote_configs -- list of RemoteConfig objects, example [RemoteConfig(...)], if None, remote tests won't be executed jobs -- number of threads for compilation step, example 40 default value is jobs number defined during slave creation env -- environment variables passed to shell commands """ if env is None: env = {} llvm_srcdir = "llvm" llvm_builddir = "build" bindir = '%(builddir)s/' + llvm_builddir + '/bin' f = buildbot.process.factory.BuildFactory() # Determine the build directory. f.addStep( SetProperty(name="get_builddir", command=["pwd"], property="builddir", description="set build dir", workdir=".")) # Determine the binary directory of *-tblgen. f.addStep( SetProperty(name="get tblgen dir", command=["echo", WithProperties(bindir)], property="tblgen_bindir", description="set tblgen dir", workdir=".")) # Get source code f = getLLDBSource(f, llvm_srcdir) # Clean Build Folder f.addStep( ShellCommand(name="clean", command="rm -rf *", description="clear build folder", env=env, workdir='%s' % llvm_builddir)) f = getLLDBCmakeAndCompileSteps(f, build_compiler, build_type, [], bindir, 'linux', 'x86_64', env) # TODO: it will be good to check that architectures listed in test_archs are compatible with host architecture # For now, the caller of this function should make sure that each target architecture is supported by builder machine # Add local test steps f = getLLDBTestSteps(f, bindir, local_test_archs, local_test_compilers) # Remote test steps if remote_configs is not None: for config in remote_configs: f = getLLDBRemoteTestSteps(f, bindir, build_type, config, env) # archive test traces f = archiveLLDBTestTraces(f, "build/lldb-test-traces-*") return f
def getCmakeWithMSVCBuildFactory( clean=True, # False for incremental builds. depends_on_projects=None, # List of projects to listen. cmake_cache=None, # Path to a cmake cache file. extra_configure_args=None, # Extra CMake args if any. llvm_srcdir=None, # Source code root directory. obj_dir=None, # Build tree root directory. install_dir=None, # Directory to install the results to. checks=None, # List of checks to test the build. checks_on_target=None, # [(<name>,[<command tokens>])] array of # check name and command to run on target. jobs=None, # Restrict a degree of parallelism. env=None, # Environmental variables for all steps. # VS tools environment variable if using MSVC. # For example, "autodetect" to auto detect, %VS140COMNTOOLS% to select # the VS 2015 toolchain, or empty string if environment is already set. vs=None, **kwargs): if vs is None: # We autodetect Visual Studio, unless otherwise is requested. vs = "autodetect" if install_dir is None: install_dir = 'install' # Prepare environmental variables. Set here all env we want for all steps. merged_env = { 'TERM': 'dumb' # Make sure Clang doesn't use color escape sequences. } if env is not None: assert not vs, "Cannot have custom builder env vars with VS setup." # Overwrite pre-set items with the given ones, so user can set anything. merged_env.update(env) # Make a local copy of the configure args, as we are going to modify that. if extra_configure_args: cmake_args = extra_configure_args[:] else: cmake_args = list() if depends_on_projects is None: depends_on_projects = [ 'llvm', 'compiler-rt', 'clang', 'clang-tools-extra', 'libunwind', 'libcxx', 'libcxxabi', 'lld', ] if checks is None: # Check only host-side tools. Target-side tests should run on a target. checks = [ "check-llvm", "check-clang", "check-lld", ] source_remove_requested = lambda step: step.build.getProperty("clean") clean_build_requested = lambda step: \ clean or \ step.build.getProperty("clean", \ default=step.build.getProperty("clean_obj") \ ) f = LLVMBuildFactory(depends_on_projects=depends_on_projects, llvm_srcdir=llvm_srcdir, obj_dir=obj_dir, install_dir=install_dir, cleanBuildRequested=clean_build_requested, **kwargs) # Pass through all the extra arguments. # Remove the source code tree if requested. # NOTE: Somehow RemoveDirectory buildbot command often fails on Windows, # as somthing keeps a lock. We use rm command instead realying on a shell # to support that. f.addStep( ShellCommand(name='clean-%s-dir' % f.monorepo_dir, command=['rm', '-rf', f.monorepo_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove the source code', workdir='.', env=merged_env, doStepIf=source_remove_requested)) # Get the source code. f.addGetSourcecodeSteps(**kwargs) # Remove the build directory if requested. f.addStep( ShellCommand(name='clean-%s-dir' % f.obj_dir, command=['rm', '-rf', f.obj_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove build directory', workdir='.', env=merged_env, doStepIf=clean_build_requested)) if vs: # Configure MSVC environment if requested. f.addStep( SetPropertyFromCommand( command=builders_util.getVisualStudioEnvironment(vs, None), extract_fn=builders_util.extractVSEnvironment)) merged_env = Property('vs_env') # Since this is a build of a cross toolchain, we build only the host-side # tools first by the host system compiler. Libraries will be cross-compiled. cmake_args.append( InterpolateToPosixPath('-DLLVM_AR=%(builddir)s/' + f.obj_dir + '/bin/llvm-ar.exe')), CmakeCommand.applyDefaultOptions(cmake_args, [ ('-G', 'Ninja'), ('-DLLVM_ENABLE_PROJECTS=', 'llvm;clang;clang-tools-extra;lld'), ('-DCMAKE_BUILD_TYPE=', 'Release'), ('-DCMAKE_CXX_FLAGS=', '-D__OPTIMIZE__'), ('-DLLVM_ENABLE_ASSERTIONS=', 'ON'), ('-DLLVM_LIT_ARGS=', '-v -vv'), ]) if install_dir: install_dir_rel = LLVMBuildFactory.pathRelativeTo( install_dir, f.obj_dir) CmakeCommand.applyRequiredOptions(cmake_args, [ ('-DCMAKE_INSTALL_PREFIX=', install_dir_rel), ]) # Remove the build directory if requested. f.addStep( ShellCommand(name='clean-%s-dir' % install_dir, command=['rm', '-rf', install_dir], warnOnFailure=True, haltOnFailure=False, flunkOnFailure=False, description='Remove install directory', workdir='.', env=merged_env, doStepIf=clean_build_requested)) src_dir_rel = LLVMBuildFactory.pathRelativeTo(f.llvm_srcdir, f.obj_dir) # Add given cmake cache at the very end. if cmake_cache: cmake_args.append('-C%s' % cmake_cache) f.addStep( CmakeCommand( name="cmake-configure", haltOnFailure=True, description=["Cmake", "configure"], options=cmake_args, path=src_dir_rel, workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) f.addStep( NinjaCommand( name="build-%s" % f.monorepo_dir, haltOnFailure=True, description=["Build", f.monorepo_dir], workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) # Test the components if requested one check at a time. for check in checks: f.addStep( LitTestCommand( haltOnFailure=False, # We want to test as much as we could. name='test-%s' % check, command=["ninja", WithProperties(check)], description=[ "Testing", "just", "built", "components", "for", check ], descriptionDone=[ "Test", "just", "built", "components", "for", check, "completed" ], env=merged_env, **kwargs # Pass through all the extra arguments. )) # Run commands on a target if requested. if checks_on_target: for check, cmd in checks_on_target: f.addStep( LitTestCommand( haltOnFailure=False, # We want to test as much as we could. name='test-%s' % check, command=cmd, description=[ "Testing", "just", "built", "components", "for", check ], descriptionDone=[ "Test", "just", "built", "components", "for", check, "completed" ], env=merged_env, **kwargs # Pass through all the extra arguments. )) # Install just built components if install_dir: f.addStep( NinjaCommand( name="install-all", haltOnFailure=True, targets=["install"], description=["Install", "just", "built", "components"], workdir=f.obj_dir, env=merged_env, **kwargs # Pass through all the extra arguments. )) return f
def getLLDBxcodebuildFactory(use_cc=None, build_type='Debug', remote_configs=None, env=None): if env is None: env = {} f = buildbot.process.factory.BuildFactory() f.addStep( SetProperty(name='get_builddir', command=['pwd'], property='builddir', description='set build dir', workdir='.')) lldb_srcdir = 'lldb' OBJROOT = '%(builddir)s/' + lldb_srcdir + '/build' f.addStep( SetProperty(name='get_bindir', command=[ 'echo', WithProperties('%(builddir)s/' + lldb_srcdir + '/build/' + build_type) ], property='lldb_bindir', description='set bin dir', workdir='.')) # cleaning out the build directory is vital for codesigning. f.addStep( ShellCommand(name='clean.lldb-buid', command=['rm', '-rf', WithProperties(OBJROOT)], haltOnFailure=True, workdir=WithProperties('%(builddir)s'))) f.addStep( ShellCommand(name='clean.llvm-buid', command=['rm', '-rf', '%s/llvm-build' % lldb_srcdir], haltOnFailure=True, workdir=WithProperties('%(builddir)s'))) f.addStep( ShellCommand(name='clean.test trace', command='rm -rf build/*', haltOnFailure=False, flunkOnFailure=False, workdir='.')) # Remove symbolic link to lldb, otherwise xcodebuild will have circular dependency f.addStep( ShellCommand(name='remove symbolic link lldb', command=['rm', lldb_srcdir + '/llvm/tools/lldb'], haltOnFailure=False, flunkOnFailure=False, workdir=WithProperties('%(builddir)s'))) f.addStep( SVN(name='svn-lldb', mode='update', baseURL='http://llvm.org/svn/llvm-project/lldb/', defaultBranch='trunk', workdir=lldb_srcdir)) f.addStep( SVN(name='svn-llvm', mode='update', baseURL='http://llvm.org/svn/llvm-project/llvm/', defaultBranch='trunk', workdir='%s/llvm' % lldb_srcdir)) f.addStep( SVN(name='svn-clang', mode='update', baseURL='http://llvm.org/svn/llvm-project/cfe/', defaultBranch='trunk', workdir='%s/llvm/tools/clang' % lldb_srcdir)) # setup keychain for codesign # In order for the codesigning to work inside of buildbot, security must be # called to unlock the keychain, which requires a password. # I've set up a special keychain for this purpose, so as to not compromise # the login password of the buildslave. # This means I have to set the special keychain as the default and unlock it # prior to building the sources. f.addStep( ShellCommand(name='check.keychain', command=['security', 'default-keychain'], haltOnFailure=True, workdir=WithProperties('%(builddir)s'))) f.addStep( ShellCommand( name='find.certificate', command=['security', 'find-certificate', '-c', 'lldb_codesign'], haltOnFailure=True, workdir=WithProperties('%(builddir)s'))) # Building the sources # buildcmd = ' '.join([ 'xcrun', 'xcodebuild', '-target', 'desktop', '-configuration', build_type, 'SYMROOT=' + OBJROOT, 'OBJROOT=' + OBJROOT ]) f.addStep( ShellCommand(name='lldb-build', command=WithProperties(buildcmd + " || " + buildcmd), haltOnFailure=True, workdir=lldb_srcdir)) # Testing # if not use_cc: use_cc = '/Applications/Xcode.app/Contents/Developer/Toolchains/' use_cc += 'XcodeDefault.xctoolchain/usr/bin/clang' f.addStep( SetProperty(name='set.cc', command=['xcrun', '-find', 'clang'], property='use_cc', description='set cc', workdir=lldb_srcdir)) else: f.addStep( SetProperty(name='set.cc', command=['echo', use_cc], property='use_cc', description='set cc', workdir=lldb_srcdir)) DOTEST_OPTS = ' '.join([ '--executable', '%(lldb_bindir)s/lldb', '--framework', '%(lldb_bindir)s/LLDB.framework', '-A', 'x86_64', '-C', 'clang', '-s', '../../build/lldb-test-traces' ]) f.addStep( LitTestCommand( name='lldb-test', command=['./dosep.py', '--options', WithProperties(DOTEST_OPTS)], haltOnFailure=False, workdir='%s/test' % lldb_srcdir, env={'DYLD_FRAMEWORK_PATH': WithProperties('%(lldb_bindir)s')})) # Remote test steps if remote_configs is not None: # Source structure to use cmake command f.addStep( SetProperty( name='get tblgen bindir', command=[ 'echo', WithProperties( '%(builddir)s/' + lldb_srcdir + '/llvm-build/Release+Asserts/x86_64/Release+Asserts/bin' ) ], property='tblgen_bindir', description='set tblgen binaries dir', workdir='.')) f = getSymbLinkSteps(f, lldb_srcdir) for config in remote_configs: f = getLLDBRemoteTestSteps( f, '%(lldb_bindir)s', build_type, config, env={'DYLD_FRAMEWORK_PATH': WithProperties('%(lldb_bindir)s')}) # Compress and upload test log f = archiveLLDBTestTraces(f, "build/lldb-test-traces*") # Results go in a directory coded named according to the date and time of the test run, e.g.: # # 2012-10-16-11_26_48/Failure-x86_64-_Applications_Xcode.app_Contents_Developer_Toolchains_XcodeDefault.xctoolchain_usr_bin_clang-TestLogging.LogTestCase.test_with_dsym.log # # Possible results are ExpectedFailure, Failure, SkippedTest, UnexpectedSuccess, and Error. return f return f
def getSphinxDocsBuildFactory( llvm_html=False, # Build LLVM HTML documentation llvm_man=False, # Build LLVM man pages clang_html=False, # Build Clang HTML documentation clang_tools_html=False, # Build Clang Extra Tools HTML documentation lld_html=False, # Build LLD HTML documentation libcxx_html=False, # Build Libc++ HTML documentation libunwind_html=False, # Build libunwind HTML documentation lldb_html=False # Build LLDB HTML documentation ): f = buildbot.process.factory.BuildFactory() llvm_srcdir = 'llvm/src' llvm_objdir = 'llvm/build' clang_srcdir = llvm_srcdir + '/tools/clang' clang_tools_srcdir = llvm_srcdir + '/tools/clang/tools/extra' lld_srcdir = llvm_srcdir + '/tools/lld' lldb_srcdir = llvm_srcdir + '/tools/lldb' libcxx_srcdir = llvm_srcdir + '/projects/libcxx' libcxxabi_srcdir = llvm_srcdir + '/projects/libcxxabi' libunwind_srcdir = llvm_srcdir + '/projects/libunwind' # Get LLVM. This is essential for all builds # because we build all subprojects in tree f.addStep( SVN(name='svn-llvm', mode='update', baseURL='http://llvm.org/svn/llvm-project/llvm/', defaultBranch='trunk', workdir=llvm_srcdir)) if clang_html or clang_tools_html or lld_html: f.addStep( SVN(name='svn-clang', mode='update', baseURL='http://llvm.org/svn/llvm-project/cfe/', defaultBranch='trunk', workdir=clang_srcdir)) if clang_tools_html: f.addStep( SVN(name='svn-clang-tools', mode='update', baseURL='http://llvm.org/svn/llvm-project/clang-tools-extra/', defaultBranch='trunk', workdir=clang_tools_srcdir)) if lld_html: f.addStep( SVN(name='svn-lld', mode='update', baseURL='http://llvm.org/svn/llvm-project/lld/', defaultBranch='trunk', workdir=lld_srcdir)) if lldb_html: f.addStep( SVN(name='svn-lldb', mode='update', baseURL='http://llvm.org/svn/llvm-project/lldb/', defaultBranch='trunk', workdir=lldb_srcdir)) if libcxx_html: f.addStep( SVN(name='svn-libcxx', mode='update', baseURL='http://llvm.org/svn/llvm-project/libcxx/', defaultBranch='trunk', workdir=libcxx_srcdir)) f.addStep( SVN(name='svn-libcxxabi', mode='update', baseURL='http://llvm.org/svn/llvm-project/libcxxabi/', defaultBranch='trunk', workdir=libcxxabi_srcdir)) if libunwind_html: f.addStep( SVN(name='svn-libunwind', mode='update', baseURL='http://llvm.org/svn/llvm-project/libunwind/', defaultBranch='trunk', workdir=libunwind_srcdir)) # Use CMake to configure cmakeCommand = [ "cmake", WithProperties('%s/' + llvm_srcdir, 'workdir'), '-G', 'Ninja', '-DLLVM_ENABLE_SPHINX:BOOL=ON', '-DSPHINX_OUTPUT_HTML:BOOL=ON', '-DSPHINX_OUTPUT_MAN:BOOL=ON', '-DLLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN=ON', '-DLLDB_INCLUDE_TESTS=OFF', ] f.addStep( ShellCommand(name="cmake-configure", command=cmakeCommand, description=["cmake configure"], workdir=llvm_objdir)) if llvm_html: f.addStep( NinjaCommand(name="docs-llvm-html", haltOnFailure=True, description=["Build LLVM Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-llvm-html'])) if llvm_man: f.addStep( NinjaCommand(name="docs-llvm-man", haltOnFailure=True, description=["Build LLVM Sphinx man pages"], workdir=llvm_objdir, targets=['docs-llvm-man'])) if clang_html: f.addStep( NinjaCommand(name="docs-clang-html", haltOnFailure=True, description=["Build Clang Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-clang-html'])) if clang_tools_html: f.addStep( NinjaCommand( name="docs-clang-tools-html", haltOnFailure=True, description=[ "Build Clang Extra Tools Sphinx HTML documentation" ], workdir=llvm_objdir, targets=['docs-clang-tools-html'])) if lld_html: f.addStep( NinjaCommand(name="docs-lld-html", haltOnFailure=True, description=["Build LLD Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-lld-html'])) if lldb_html: f.addStep( NinjaCommand(name="docs-lldb-html", haltOnFailure=True, description=["Build LLDB Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-lldb-html'])) if libcxx_html: f.addStep( NinjaCommand( name="docs-libcxx-html", haltOnFailure=True, description=["Build Libc++ Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-libcxx-html'])) if libunwind_html: f.addStep( NinjaCommand( name="docs-libunwind-html", haltOnFailure=True, description=["Build libunwind Sphinx HTML documentation"], workdir=llvm_objdir, targets=['docs-libunwind-html'])) return f