self.addStep( FileDownload(name="src-download", doStepIf=lambda step: (not ("src_url" in step.build.getProperties())) or (step.getProperty("src_url") == ""), mastersrc=WithProperties("%(src_archive)s"), slavedest=WithProperties("%(src_archive)s"), mode=0644, haltOnFailure=True)) self.addStep( Compile(name="src-extract", description=["source", "extract"], descriptionDone=["source", "extracted"], command=["make", "-f", "mingw-makefile", "src-extract"], env={ "TARGET_ARCH": WithProperties("%(target_arch)s"), "SRC_ARCHIVE": WithProperties("%(src_archive)s") })) # Install mingw headers self.addStep( Compile( name="mingw-headers-install", description=["mingw headers", "install"], descriptionDone=["mingw headers", "installed"], command=["make", "-f", "mingw-makefile", "headers-install"], env={ "SRC_ARCHIVE": WithProperties("%(src_archive)s"), "TARGET_ARCH": WithProperties("%(target_arch)s") }))
# Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand("version", command=[ python_executable, "makepanda/getversion.py", buildtype_flag], haltOnFailure=True), ] whl_steps = [ SetPropertyFromCommand("python-abi", command=[ python_executable, "-c", "import makewheel;print(makewheel.get_abi_tag())"], workdir="build/makepanda", haltOnFailure=True), ] + whl_version_steps build_steps = [ # Run makepanda - give it enough timeout (6h) since some steps take ages Compile(command=build_cmd, timeout=6*60*60, env={"MAKEPANDA_THIRDPARTY": "C:\\thirdparty", "MAKEPANDA_SDKS": "C:\\sdks"}, haltOnFailure=True), ] publish_exe_steps = [ FileUpload(slavesrc=exe_filename, masterdest=exe_upload_filename, mode=0o664, haltOnFailure=True), MakeTorrent(exe_upload_filename), SeedTorrent(exe_upload_filename), ] publish_sdk_steps = [ # Upload the wheel. FileUpload(slavesrc=whl_filename, masterdest=whl_upload_filename, mode=0o664, haltOnFailure=True),
def createPoclFactory(environ={}, repository='https://github.com/pocl/pocl.git', branch='master', buildICD=True, llvm_dir='/usr/', icd_dir='/usr/', tests_dir='', config_opts='', pedantic=True): """ Create a buildbot factory object that builds pocl. environ Dictionary: The environment variables to append to the build. PATH and LD_LIBRARY_PATH will be added from llvm_dir (if given). repository String: the repo to build from. defaults to pocl on github branch String: the branch in 'repository' to build from. default to master buildICD Bool: if false, the ICD extension is not built. llvm_dir String: LLVM installation dir. I.e. without the 'bin/' or 'lib/'. icd_dir String: ICD loader installation dir. We expect here to be a ICD loader that understand the OCL_ICD_VENDORS parameter, i.e. ocl-icd or patched Khronos loader. tests_dir String: Path where the extenral testsuite packages can be copied from. ('cp' is used, so they need to be on the same filesystem). NOTE: currently only a placeholder - not tested on the public buildbot config_opts String: extra options to pass to ./configure """ environ['PATH'] = llvm_dir + "/bin/:${PATH}" environ['LD_LIBRARY_PATH'] = llvm_dir + "/lib/:${LD_LIBRARY_PATH}" f = factory.BuildFactory() f.addStep( source.Git( repourl=repository, #mode='update', #rm -rf the build tree. Have this only when changing #branches during releases mode='clobber', branch=branch)) f.addStep( ShellCommand(command=["./autogen.sh"], haltOnFailure=True, name="autoconfig", env=environ, description="autoconfiging", descriptionDone="autoconf")) f.addStep( ShellCommand( haltOnFailure=True, command=[ "cp", "-u", tests_dir + AMD_test_pkg, "examples/AMD/" + AMD_test_pkg ], name="copy AMD", description="copying", descriptionDone="copied AMD", #kludge around 'cp' always complaining if source is missing decodeRC={ 0: SUCCESS, 1: SUCCESS })) f.addStep( ShellCommand(haltOnFailure=False, command=[ "cp", "-u", tests_dir + ViennaCL_test_pkg, "examples/ViennaCL/" + ViennaCL_test_pkg ], name="copy ViennaCL", description="copying", descriptionDone="copied ViennaCL", decodeRC={ 0: SUCCESS, 1: SUCCESS })) configOpts = config_opts.split(' ') if pedantic == True: configOpts = configOpts + ['--enable-pedantic'] if buildICD == False: configOpts = configOpts + ['--disable-icd'] f.addStep( ShellCommand(command=["./configure"] + configOpts, haltOnFailure=True, name="configure", env=environ, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=environ)) #enable this later ttacheck = False if ttacheck: f.addStep( ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=environ, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, timeout=60 * 60)) else: f.addStep( ShellCommand( command=["make", "check"], #for beagle, contiunu to clean it up #haltOnFailure=True, name="checks", env=environ, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, #blas3 alone takes 15-20 min. timeout=60 * 60)) #Keep this here for a reference, if we want to record the benchmarking progress at some point in time #Benchmark only the vanilla pocl #if do_benchmark and baseurl=='lp:' and defaultbranch=='pocl': # f.addStep( # ShellCommand( # haltOnFailure=True, # env=environ, # command=['./tools/scripts/benchmark.py', '--lightweight', '-o', 'benchmark_log.txt' ], # logfiles = {'log.txt': 'benchmark_log.txt'}, # name = 'benchmark', # description='benchmarking', # descriptionDone='benchmarked', # # 4hour timeout - PPC runs for a *long* time # timeout=60*60*4)) # f.addStep( # ShellCommand( # command=[ # 'scp', # 'benchmark_log.txt', # WithProperties("marvin:/var/www/pocl_benchmarks/benchmark-"+processor+"-r%(got_revision)s.txt")], # name = 'copy benchmark', # description='copying', # descriptionDone='copied')) return f
def CreateLinuxChromeFactory(): """Run chrome tests with the latest dynamorio. TODO(rnk): Run drmemory, not dynamorio. We use a build of chrome produced weekly from a known good revision on the same slave. """ cr_src = '../../linux-cr-builder/build/src' ret = factory.BuildFactory() ret.addStep( SVN(svnurl=dr_svnurl, workdir='dynamorio', mode='update', name='Checkout DynamoRIO')) # If we need to execute 32-bit children, we'll need a full exports package. ret.addStep( Configure(command=['cmake', '..', '-DDEBUG=OFF'], workdir='dynamorio/build', name='Configure release DynamoRIO')) ret.addStep( Compile(command=['make', '-j5'], workdir='dynamorio/build', name='Compile release DynamoRIO')) # Don't follow python children. This should speed up net_unittests, which # spawns a bunch of simple http servers to talk to. ret.addStep( ShellCommand( command=['bin64/drconfig', '-reg', 'python', '-norun', '-v'], workdir='dynamorio/build', name='don\'t follow python', description='don\'t follow python', descriptionDone='don\'t follow python')) # Chromium tests for test in LINUX_CHROME_TESTS: cmd = [ 'xvfb-run', '-a', '../dynamorio/build/bin64/drrun', '-stderr_mask', '12', # Show DR crashes '--', cr_src + '/out/Release/' + test ] if test == 'browser_tests': cmd += ['--gtest_filter=AutofillTest.BasicFormFill'] elif test == 'net_unittests': cmd += [ '--gtest_filter=-CertDatabaseNSSTest.ImportCACertHierarchy*' ] elif test == 'remoting_unittests': cmd += [ '--gtest_filter=' '-VideoFrameCapturerTest.Capture:' 'DesktopProcessTest.DeathTest' ] elif test == 'base_unittests': # crbug.com/308273: this test is flaky cmd += [ '--gtest_filter=-TraceEventTestFixture.TraceContinuousSampling' ] elif test == 'content_shell': cmd += ['-dump-render-tree', 'file:///home/chrome-bot/bb.html'] # We used to md5 the output, but that's too brittle. Just dump it to stdout # so humans can verify it. The return code will tell us if we crash. # TODO(rnk): We should run some selection of layout tests if we want to # verify output. ret.addStep( Test(command=cmd, env={'CHROME_DEVEL_SANDBOX': '/opt/chromium/chrome_sandbox'}, name=test, descriptionDone=test, description=test)) return ret
] build_steps = [ Git(config.git_url, getDescription={'match': 'v*'}), # Decode the version number from the dtool/PandaVersion.pp file. SetPropertyFromCommand( "version", command=[python_executable, "makepanda/getversion.py", buildtype_flag], haltOnFailure=True), # Run makepanda - give it enough timeout (1h) Compile(command=build_cmd, timeout=1 * 60 * 60, env={ "MAKEPANDA_THIRDPARTY": "/Users/buildbot/thirdparty", "MAKEPANDA_SDKS": "/Users/buildbot/sdks", "PYTHONPATH": python_path }, haltOnFailure=True), ] build_publish_whl_steps = whl_version_steps + [ SetPropertyFromCommand( "python-abi", command=[ python_executable, "-c", "import makewheel;print(makewheel.get_abi_tag())" ], workdir="build/makepanda", haltOnFailure=True),
def make_dolphin_win_build(build_type, mode="normal"): f = BuildFactory() mode = mode.split(",") normal = "normal" in mode debug = "debug" in mode wip = "wip" in mode pr = "pr" in mode fifoci_golden = "fifoci_golden" in mode f.addStep( GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git", progress=True, mode="incremental")) f.addStep(RemoveDirectory(dir="build/Binary")) branch = WithProperties("%s", "branchname") env = {"DOLPHIN_BRANCH": branch, "DOLPHIN_DISTRIBUTOR": "dolphin-emu.org"} if normal: env["DOLPHIN_DEFAULT_UPDATE_TRACK"] = "beta" f.addStep( Compile(command=[ "msbuild.exe", "/v:m", "/p:Platform=x64", "/p:Configuration=%s" % build_type, "dolphin-emu.sln" ], env=env, workdir="build/Source", description="building", descriptionDone="build", haltOnFailure=True)) f.addStep( Test(command=[ "msbuild.exe", "/v:m", "/p:Platform=x64", "/p:Configuration=%s" % build_type, "/p:RunUnitTests=true", "dolphin-emu.sln" ], env=env, workdir="build/Source", description="testing", descriptionDone="test", haltOnFailure=True)) dolphin_name = "DolphinD" if debug else "Dolphin" f.addStep( ShellCommand(command=[ "C:\\buildbot\\signbin.bat", "Binary\\x64\\%s.exe" % dolphin_name ], logEnviron=False, description="signing binary", descriptionDone="sign binary")) f.addStep( ShellCommand( command=["xcopy", "Binary\\x64", "Dolphin-x64", "/S", "/I", "/Y"], logEnviron=False, description="copying output", descriptionDone="output copy")) out_filename = WithProperties("Dolphin-%s-%s-x64.7z", "branchname", "shortrev") f.addStep( ShellCommand(command=["7z", "a", "-r", out_filename, "Dolphin-x64"], logEnviron=False, description="compressing", descriptionDone="compression")) if debug: fn_arch = "dbg-x64" else: fn_arch = "x64" if "normal" in mode: master_filename = WithProperties( "/srv/http/dl/builds/dolphin-%%s-%%s-%s.7z" % fn_arch, "branchname", "shortrev") url = WithProperties( "https://dl.dolphin-emu.org/builds/dolphin-%%s-%%s-%s.7z" % fn_arch, "branchname", "shortrev") elif wip: master_filename = WithProperties( "/srv/http/dl/wips/%%s-dolphin-%%s-%%s-%s.7z" % fn_arch, "author", "branchname", "shortrev") url = WithProperties( "https://dl.dolphin-emu.org/wips/%%s-dolphin-%%s-%%s-%s.7z" % fn_arch, "author", "branchname", "shortrev") elif pr: master_filename = WithProperties( "/srv/http/dl/prs/%%s-dolphin-latest-%s.7z" % fn_arch, "branchname") url = WithProperties( "https://dl.dolphin-emu.org/prs/%%s-dolphin-latest-%s.7z" % fn_arch, "branchname") else: master_filename = url = "" f.addStep(SetProperty(property="build_url", value=url)) if master_filename and url: f.addStep( FileUpload(workersrc=out_filename, masterdest=master_filename, url=url, keepstamp=True, mode=0o644)) if fifoci_golden: if pr: f.addStep( Trigger(schedulerNames=["pr-fifoci-win"], copy_properties=[ "pr_id", "headrev", "branchname", "shortrev", "build_url" ])) else: f.addStep( TriggerIfBranch(schedulerNames=["fifoci-win"], branchList=["master"], copy_properties=["shortrev", "build_url"])) if "normal" in mode and "debug" not in mode: f.addStep( MasterShellCommand( command= "/home/buildbot/venv/bin/python /home/buildbot/bin/send_build.py", env={ "BRANCH": WithProperties("%s", "branchname"), "SHORTREV": WithProperties("%s", "shortrev"), "HASH": WithProperties("%s", "revision"), "AUTHOR": WithProperties("%s", "author"), "DESCRIPTION": WithProperties("%s", "description"), "TARGET_SYSTEM": "Windows x64", "USER_OS_MATCHER": "win", "BUILD_URL": url, }, description="notifying website", descriptionDone="website notice")) f.addStep( MasterShellCommand(command=[ "/home/buildbot/venv/bin/python", "/home/buildbot/bin/make_manifest.py", "--input", master_filename, "--version_hash", WithProperties("%s", "revision"), "--output-manifest-store", "/data/nas/update/manifest", "--output-content-store", "/data/nas/update/content", "--signing-key", "/home/buildbot/update.signing.key" ], description="writing update manifest", descriptionDone="update manifest write")) f.addStep( ShellCommand(command=["del", "/F", "/S", "/Q", out_filename], logEnviron=False, description="cleaning up files", descriptionDone="cleanup files")) f.addStep( ShellCommand(command=["rmdir", "/S", "/Q", "Dolphin-x64"], logEnviron=False, description="cleaning up dirs", descriptionDone="cleanup dirs")) return f
if pedantic==True: configOpts = configOpts + ['--enable-pedantic'] if buildICD==False: configOpts = configOpts + ['--disable-icd'] if cache_dir=None: configOpts = configOpts + ['--disable-kernel-cache'] f.addStep(ShellCommand( command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=myenviron )) if tests_dir!=None and not cmake: f.addStep(ShellCommand(command=["make", "prepare-examples"], haltOnFailure=True, name="prepare examples", env=myenviron, description="preparing", descriptionDone="prepare")) if tcedir: f.addStep(ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=myenviron,
# And the build scripts. FileDownload(mastersrc="build_scripts/build.sh", slavedest="build_scripts/build.sh", workdir="context"), FileDownload(mastersrc="build_scripts/build_utils.sh", slavedest="build_scripts/build_utils.sh", workdir="context"), # Build the Docker image. ShellCommand( name="setup", command=setup_cmd, workdir="context", haltOnFailure=True), # Invoke makepanda and makewheel. Compile(command=build_cmd, haltOnFailure=True), # Upload the wheel file. FileUpload(slavesrc=whl_filename, masterdest=whl_upload_filename, mode=0o664, haltOnFailure=True), ] manylinux_factory = BuildFactory() for step in build_steps: manylinux_factory.addStep(step) def manylinux_builder(suite, arch): platform = "-".join((suite, arch))
def get_build_step(link, type, options=[]): from buildbot.process.properties import Interpolate from buildbot.steps.shell import Compile suffix = '' target = 'install' if 'frameworks' in options: suffix = [link, type, 'frameworks'] else: suffix = [link, type] if 'newSDK' in options: suffix.append('10.11') target = 'all' if 'scan-build' in options: target = 'all' build_command = 'cmake --build . --target ' + target if 'scan-build' in options: build_command = 'scan-build ' + build_command if 'Makefiles' in [Interpolate('%(prop:generator)s')]: build_command += [' -- -j' + Interpolate('%(prop:parallel)s')] else: # Not Makefiles, likely a multi-target generator (Xcode, VS, etc.) # so we must specify buid config now if type == 'debug': build_command += ' --config Debug' else: build_command += ' --config Release' # iOS build uses arch arm64 if 'ios' in options: build_command += ' -- -arch arm64' return Compile(description=['building'], descriptionSuffix=suffix, descriptionDone=['build'], doStepIf=lambda step: ('scan-build' in options) or ('android' in options) or ('ios' in options) or ( ((not options) or ('osx' in step.build.getProperty('buildername'))) and (link != 'static' or not ('osx' in step.build. getProperty('buildername')))), hideStepIf=skipped, workdir=Interpolate('%(prop:workdir)s/build/build'), locks=[slave_cpu_lock.access('counting')], command=build_command, env={ 'PATH': Interpolate('%(prop:toolchain_path)s%(prop:PATH)s'), 'INCLUDE': Interpolate('%(prop:vc_include)s'), 'LIB': Interpolate('%(prop:vc_lib)s'), 'LIBPATH': Interpolate('%(prop:vc_libpath)s') }, want_stdout=True, want_stderr=True, logEnviron=False)
def getPerPlatformBuilders(self, platform): if not platform.canBuild(self): return [] src_path = "{0}/src/{1}".format(platform.workerdatapath, self.name) configure_path = src_path + "/configure" build_path = "{0}/builds/{1}/{2}".format(platform.workerdatapath, platform.name, self.name) packages_path = "{0}/packages/snapshots/{1}".format( platform.workerdatapath, self.name) env = platform.getEnv(self) f = factory.BuildFactory() f.useProgress = False f.addStep(steps.Clean(dir="", doStepIf=Property("clean", False))) f.addStep( steps.SetPropertyIfOlder(name="check config.mk freshness", src=configure_path, generated="config.mk", property="do_configure")) if self.verbose_build: platform_build_verbosity = "--enable-verbose-build" else: platform_build_verbosity = "" f.addStep( Configure(command=[configure_path, platform_build_verbosity] + platform.getConfigureArgs(self), doStepIf=Property("do_configure", default=True, defaultWhenFalse=False), env=env)) f.addStep(Compile(command=["make", "-j5"], env=env)) # No tests packaging_cmd = None if platform.getPackagingCmd(self) is not None: packaging_cmd = platform.getPackagingCmd(self) else: if platform.getStripCmd(self) is not None: f.addStep(steps.Strip(command=platform.getStripCmd())) if platform.canPackage(self): f.addStep( steps.Package( disttarget=packaging_cmd, srcpath=src_path, dstpath=packages_path, data_files=self.data_files, buildname="{0}-{1}".format(platform.name, self.name), platform_built_files=platform.getBuiltFiles(self), platform_data_files=platform.getDataFiles(self), archive_format=platform.archiveext, env=env)) return [ BuilderConfig( name="{0}-{1}".format(self.name, platform.name), workername=platform.workername, workerbuilddir=build_path, factory=f, locks=[ lock_build.access('counting'), self.lock_src.access("counting") ], tags=[self.name], properties={ "platformname": platform.name, "workerimage": platform.getWorkerImage(self), }, ) ]
for abi in ('cp39-cp39', 'cp38-cp38', 'cp37-cp37m', 'cp36-cp36m', 'cp27-cp27m', 'cp34-cp34m', 'cp35-cp35m'): whl_filename = get_whl_filename(abi) copy_python = (abi == 'cp37-cp37m') do_step = True if abi in ('cp27-cp27m', 'cp34-cp34m', 'cp35-cp35m'): do_step = is_branch('release/1.10.x') build_steps += [ # Run makepanda. Give it enough timeout (6h) since some steps take ages Compile(name="compile " + abi, timeout=6 * 60 * 60, command=get_build_command(abi, copy_python=copy_python), env={ "MAKEPANDA_THIRDPARTY": "C:\\thirdparty", "MAKEPANDA_SDKS": "C:\\sdks" }, haltOnFailure=True, doStepIf=do_step), # Run the test suite, but in a virtualenv. Test(name="test " + abi, command=get_test_command(abi, whl_filename), haltOnFailure=True, doStepIf=do_step), # Upload the wheel. FileUpload(name="upload whl " + abi, workersrc=whl_filename, masterdest=Interpolate("%s/%s", common.upload_dir,
def make_fifoci_linux(type, mode="normal"): # Requirements for a FifoCI linux buildworker: # - ~/python pointing to the fifoci virtualenv Python. # - ~/dff existing to cache DFF files # - ~/fifoci pointing to FifoCI Git mode = mode.split(",") normal = "normal" in mode pr = "pr" in mode f = BuildFactory() f.addStep( GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git", progress=True, mode="incremental")) f.addStep( ShellCommand( command= "cd ~/fifoci && git fetch && git checkout master && git reset --hard origin/master || true", logEnviron=False, description="Updating FifoCI", descriptionDone="FifoCI update")) f.addStep( ShellCommand(command=["mkdir", "-p", "build"], logEnviron=False, description="mkbuilddir", descriptionDone="mkbuilddir")) f.addStep( ShellCommand( command= "cmake -DCMAKE_INSTALL_PREFIX=$(pwd)/prefix -DENABLE_QT=OFF -DENABLE_EVDEV=OFF -GNinja ..", workdir="build/build", description="configuring", descriptionDone="configure", haltOnFailure=True)) f.addStep( Compile(command=["ninja"], workdir="build/build", description="building", descriptionDone="build", haltOnFailure=True)) f.addStep( Compile(command=["ninja", "install"], workdir="build/build", description="installing", descriptionDone="install", haltOnFailure=True)) url_base = "https://fifoci.dolphin-emu.org" args = [ "--type", type, "--dolphin", "$(pwd)/prefix/bin/dolphin-emu-nogui", "--rev_base_hash", "$(git rev-parse HEAD)", "--output", "result.zip", "--url_base", url_base, "--dff_dir", "~/dff", ] if normal: args += [ "--rev_hash", "$(git rev-parse HEAD)", "--rev_name", "%(shortrev)s", "--rev_submitted", "true", ] elif pr: args += [ "--rev_hash", "%(headrev)s", "--rev_name", "%(branchname)s-%(shortrev)s", "--rev_submitted", "false", ] command = "~/python ~/fifoci/runner/runner.py " + " ".join(args) f.addStep( ShellCommand(command=WithProperties(command), workdir="build/build", description="gfx testing", descriptionDone="gfx test", haltOnFailure=True)) f.addStep( FileUpload(workersrc="build/result.zip", masterdest="/tmp/fifoci-%s-result.zip" % type, mode=0o644)) f.addStep( MasterShellCommand( command="sudo -u fifoci /home/fifoci/python " "/home/fifoci/fifoci/frontend/manage.py import_results " "/tmp/fifoci-%s-result.zip" % type, description="importing result", descriptionDone="result import")) return f
def make_dolphin_osx_build(mode="normal"): f = BuildFactory() f.addStep( GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git", progress=True, mode="incremental")) f.addStep( ShellCommand(command=["mkdir", "-p", "build"], logEnviron=False, description="mkbuilddir", descriptionDone="mkbuilddir")) f.addStep( ShellCommand(command=[ "cmake", "-GNinja", "-DDISTRIBUTOR=dolphin-emu.org", ".." ], workdir="build/build", description="configuring", descriptionDone="configure", haltOnFailure=True)) f.addStep( Compile(command=["ninja"], workdir="build/build", description="building", descriptionDone="build", haltOnFailure=True)) f.addStep( Test(command=["ninja", "unittests"], workdir="build/build", description="testing", descriptionDone="test", haltOnFailure=True)) f.addStep( ShellCommand(command="/build/codesign.sh --deep Binaries/Dolphin.app", workdir="build/build", description="signing", descriptionDone="sign", haltOnFailure=True)) f.addStep( ShellCommand(command=[ "hdiutil", "create", "dolphin.dmg", "-format", "UDBZ", "-srcfolder", "Binaries/dolphin.app", "-ov", "-volname", WithProperties("Dolphin %s-%s", "branchname", "shortrev") ], workdir="build/build", logEnviron=False, description="packaging", descriptionDone="package")) f.addStep( ShellCommand(command="/build/codesign.sh --deep dolphin.dmg", workdir="build/build", description="signing dmg", descriptionDone="sign dmg", haltOnFailure=True)) if mode == "normal": master_filename = WithProperties( "/srv/http/dl/builds/dolphin-%s-%s.dmg", "branchname", "shortrev") url = WithProperties( "https://dl.dolphin-emu.org/builds/dolphin-%s-%s.dmg", "branchname", "shortrev") elif mode == "wip": master_filename = WithProperties( "/srv/http/dl/wips/%s-dolphin-%s-%s.dmg", "author", "branchname", "shortrev") url = WithProperties( "https://dl.dolphin-emu.org/wips/%s-dolphin-%s-%s.dmg", "author", "branchname", "shortrev") elif mode == "pr": master_filename = WithProperties( "/srv/http/dl/prs/%s-dolphin-latest.dmg", "branchname") url = WithProperties( "https://dl.dolphin-emu.org/prs/%s-dolphin-latest.dmg", "branchname") else: master_filename = url = "" if master_filename and url: f.addStep( FileUpload(workersrc="build/dolphin.dmg", masterdest=master_filename, url=url, keepstamp=True, mode=0o644)) if mode == "normal": f.addStep( MasterShellCommand(command="/home/buildbot/bin/send_build.py", env={ "BRANCH": WithProperties("%s", "branchname"), "SHORTREV": WithProperties("%s", "shortrev"), "HASH": WithProperties("%s", "revision"), "AUTHOR": WithProperties("%s", "author"), "DESCRIPTION": WithProperties("%s", "description"), "TARGET_SYSTEM": "macOS", "USER_OS_MATCHER": "osx", "BUILD_URL": url, }, description="notifying website", descriptionDone="website notice")) return f
def createPoclFactory(environ={}, repository='https://github.com/pocl/pocl.git', branch='master', buildICD=True, llvm_dir='/usr/', icd_dir='/usr/', tests_dir=None, config_opts='', pedantic=True, tcedir='', f=None, cmake=False, cache_dir=None): """ Create a buildbot factory object that builds pocl. environ Dictionary: The environment variables to append to the build. PATH and LD_LIBRARY_PATH will be added from llvm_dir (if given). repository String: the repo to build from. defaults to pocl on github branch String: the branch in 'repository' to build from. default to master buildICD Bool: if false, the ICD extension is not built. llvm_dir String: LLVM installation dir. I.e. without the 'bin/' or 'lib/'. icd_dir String: ICD loader installation dir. We expect here to be a ICD loader that understand the OCL_ICD_VENDORS parameter, i.e. ocl-icd or patched Khronos loader. tests_dir String: Path where the extenral testsuite packages can be copied from. ('cp' is used, so they need to be on the same filesystem). NOTE: currently only a placeholder - not tested on the public buildbot config_opts String: extra options to pass to ./configure cmake Bool: use CMake instead of autotools to build pocl cache_dir String: Set the pocl kernel cache to this dir. If not set, the kcache is disabled. """ myenviron = environ.copy() if 'PATH' in myenviron.keys(): myenviron[ 'PATH'] = llvm_dir + "/bin/:" + myenviron['PATH'] + ":${PATH}" else: myenviron['PATH'] = llvm_dir + "/bin/:${PATH}" if 'LD_LIBRARY_PATH' in myenviron.keys(): myenviron['LD_LIBRARY_PATH'] = llvm_dir + "/lib/:" + myenviron[ 'PATH'] + ":${LD_LIBRARY_PATH}" else: myenviron['LD_LIBRARY_PATH'] = llvm_dir + "/lib/:${LD_LIBRARY_PATH}" if tcedir: myenviron['PATH'] = tcedir + "/bin/:" + myenviron['PATH'] myenviron['LD_LIBRARY_PATH'] = tcedir + "/lib/:" + myenviron[ 'LD_LIBRARY_PATH'] if cache_dir: myenviron['POCL_BUILD_KERNEL_CACHE'] = '1' else: myenviron['POCL_BUILD_KERNEL_CACHE'] = '0' if f == None: f = factory.BuildFactory() f.addStep( Git(repourl=repository, mode=Property('git_mode'), ignore_ignores=True, branch=branch)) #clear last test round's kernel cahce. #NB: if you run two slave builds on the same machine, this #will not work! if cache_dir: f.addStep( ShellCommand(command=['rm', '-rf', cache_dir], haltOnFailure=True, name='clean kcache', description='cleaning kcache', descriptionDone='cleaned kcache')) if not cmake: f.addStep( ShellCommand(command=["./autogen.sh"], haltOnFailure=True, name="autoconfig", env=myenviron, description="autoconfiging", descriptionDone="autoconf")) if tests_dir != None: f.addStep( ShellCommand( haltOnFailure=True, command=[ "cp", "-u", tests_dir + AMD_test_pkg, "examples/AMD/" + AMD_test_pkg ], name="copy AMD", description="copying", descriptionDone="copied AMD", #kludge around 'cp' always complaining if source is missing decodeRC={ 0: SUCCESS, 1: SUCCESS })) f.addStep( ShellCommand(haltOnFailure=False, command=[ "cp", "-u", tests_dir + ViennaCL_test_pkg, "examples/ViennaCL/" + ViennaCL_test_pkg ], name="copy ViennaCL", description="copying", descriptionDone="copied ViennaCL", decodeRC={ 0: SUCCESS, 1: SUCCESS })) if cmake: f.addStep( ShellCommand(command=["cmake", "."], env=myenviron, haltOnFailure=True, name="CMake", description="cmaking", descriptionDone="cmade")) else: configOpts = config_opts.split(' ') if pedantic == True: configOpts = configOpts + ['--enable-pedantic'] if buildICD == False: configOpts = configOpts + ['--disable-icd'] f.addStep( ShellCommand(command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=myenviron)) if tests_dir != None and not cmake: f.addStep( ShellCommand(command=["make", "prepare-examples"], haltOnFailure=True, name="prepare examples", env=myenviron, description="preparing", descriptionDone="prepare")) if tcedir: f.addStep( ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, timeout=60 * 60)) else: f.addStep( ShellCommand( command=["make", "check"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, #blas3 alone takes 15-20 min. timeout=60 * 60)) #run the test once more, now from the kernel cache dir, if used if cache_dir: f.addStep( ShellCommand(command=["make", "check"], haltOnFailure=True, name="kcache checks", env=myenviron, description="testing kcache", descriptionDone="tested kcache", logfiles={"test.log": "tests/testsuite.log"}, timeout=5)) return f
# only take place on one slave. from buildbot.steps.shell import ShellCommand, Configure, Compile, Test from buildbot.steps.source import SVN from buildbot.steps.vstudio import VC10 from buildbot.steps.trigger import Trigger from buildbot.process.factory import BuildFactory from cmake import CMakeFactory step_svn_copy = SVN(mode='copy', baseURL=(ea_svnurl + '%%BRANCH%%'), defaultBranch='trunk', retry=(30, 2), logEnviron=True) step_autoconf = Compile(command=["autoconf"], description=["running autoconf"], descriptionDone=["autoconf"], logEnviron=False) step_configure = Configure(command=["./configure"], logEnviron=False) step_configure_64 = Configure(command=["./configure", "--enable-64bit"], logEnviron=False) step_compile_all = Compile(command=["make", "clean", "all"], logEnviron=False) step_compile_txt = Compile(command=["make", "clean", "txt"], description="compiling txt", descriptionDone="compile txt", logEnviron=False) step_compile_sql = Compile(command=["make", "clean", "sql"], description="compiling sql", descriptionDone="compile sql", logEnviron=False) step_compile_VS10 = Compile( command=["devenv.com", "eAthena-10.sln", "/REBUILD"], logEnviron=False)
FileDownload( mastersrc=Interpolate("dockerfiles/%(prop:suite)s-%(prop:arch)s"), slavedest="Dockerfile", workdir="context"), # Make sure the base distribution is up-to-date. ShellCommand(command=['wget', '-N', cloudimg_url], workdir="context"), # Build the Docker image. ShellCommand( name="setup", command=setup_cmd, workdir="context", haltOnFailure=True), # Invoke makepanda. Compile(command=build_cmd, haltOnFailure=True, env={'PYTHONPATH': python_path}), ] # Define a global lock, since reprepro won't allow simultaneous access to the repo. repo_lock = MasterLock('reprepro') # Steps to publish the runtime and SDK. publish_deb_steps = [ # Upload the deb package. FileUpload(slavesrc=deb_filename, masterdest=deb_upload_filename, mode=0o664, haltOnFailure=True), # Create a torrent file and start seeding it.
def loadConfig(config): ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. Here we point to the buildbot clone of pyflakes. from buildbot.changes.gitpoller import GitPoller from buildbot.changes.filter import ChangeFilter config['change_source'].append( GitPoller( repourl = '[email protected]:ORNL/xdd.git', workdir='gitpoller-workdir-xdd-master', pollinterval=120, branch='master', project='xdd')) xdd_filter = ChangeFilter( project = 'xdd', branch = 'testing') ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which slaves can execute them. Note that any particular build will # only take place on one slave. from buildbot.process.factory import BuildFactory, GNUAutoconf from buildbot.steps.source import Git from buildbot.steps.shell import ShellCommand, Configure, Compile, Test xdd_factory = BuildFactory() # Check out the source xdd_factory.addStep(Git(repourl='[email protected]:ORNL/xdd.git', mode='copy', branch='master')) # Generate the test configuration xdd_factory.addStep(ShellCommand(command=['./contrib/buildbot_gen_test_config.sh'], name="configuring")) # Compile the code xdd_factory.addStep(Compile(description=["compiling"])) # Install the code xdd_factory.addStep(ShellCommand(command=['make', 'install'], name="make install")) # Perform make check xdd_factory.addStep(ShellCommand(command=['make', 'check'], name="make check", maxTime=600)) # Perform make test xdd_factory.addStep(Test(description=["make test"], maxTime=600)) # Perform cleanup xdd_factory.addStep(ShellCommand(command=['pkill', '-f', 'xdd', '||', 'echo ""'], name='process cleanup', maxTime=60)) # Add the XDD Build factory to each of the available builders described in the master.cfg from buildbot.config import BuilderConfig # config['builders'].append(BuilderConfig(name="xdd-rhel5-x86_64", slavenames=["pod7"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-rhel6-x86_64", slavenames=["pod9"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"},category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-sles10-x86_64", slavenames=["pod10"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) config['builders'].append(BuilderConfig(name="xdd-sles11-x86_64", slavenames=["pod11"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) config['builders'].append(BuilderConfig(name="xdd-osx-10-8", slavenames=["natureboy"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-rhel6-ppc64", slavenames=["spry02"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build # Configure the nightly testing so that every test lives in the same buildset from buildbot.schedulers.basic import SingleBranchScheduler from buildbot.schedulers.timed import Periodic,Nightly build_nightly_xdd=Nightly(name="xdd-nightly1", branch = "master", properties={'owner' : ['*****@*****.**']}, builderNames=["xdd-sles11-x86_64", "xdd-osx-10-8"], hour = 2, minute = 3) config['schedulers'].append(build_nightly_xdd) # Configure each force build seperately so that they live in differing buildsets from buildbot.schedulers.forcesched import ForceScheduler # config['schedulers'].append(ForceScheduler(name="xdd-force1", builderNames=["xdd-rhel5-x86_64"])) # config['schedulers'].append(ForceScheduler(name="xdd-force2", builderNames=["xdd-rhel6-x86_64"])) # config['schedulers'].append(ForceScheduler(name="xdd-force3", builderNames=["xdd-sles10-x86_64"])) config['schedulers'].append(ForceScheduler(name="xdd-force4", builderNames=["xdd-sles11-x86_64"])) config['schedulers'].append(ForceScheduler(name="xdd-force6", builderNames=["xdd-osx-10-8"])) # config['schedulers'].append(ForceScheduler(name="xdd-force7", builderNames=["xdd-rhel6-ppc64"])) ####### STATUS TARGETS # 'status' is a list of Status Targets. The results of each build will be # pushed to these targets. buildbot/status/*.py has a variety to choose from, # including web pages, email senders, and IRC bots. from buildbot.status.mail import MailNotifier xddMN = MailNotifier(fromaddr="*****@*****.**", extraRecipients=['*****@*****.**'], categories='xdd', buildSetSummary=True, messageFormatter=xddSummaryMail) config['status'].append(xddMN)
def __init__(self, source, python="python", test=None): BuildFactory.__init__(self, [source]) self.addStep(Compile(command=[python, "./setup.py", "build"])) if test is not None: self.addStep(Test(command=test))
def get_build_step(link, type, options=[]): from buildbot.process.properties import Interpolate from buildbot.steps.shell import Compile suffix = '' target = 'install' if 'frameworks' in options: suffix = [link, type, 'frameworks'] else: suffix = [link, type] if 'newSDK' in options: suffix.append('10.15') target = 'all' if ('scan-build' in options) or ('coverity' in options): target = 'all' build_command = 'cmake --build . --target ' + target # For multi-target generators (Xcode, VS, etc.) we must specify the build configuration if type == 'debug': build_command += ' --config Debug' else: build_command += ' --config Release' # iOS build uses arch arm64 # if 'ios' in options: # build_command += ' -- -arch arm64' if 'scan-build' in options: build_command = 'scan-build ' + build_command if 'coverity' in options: build_command = 'cov-build --dir cov-int ' + build_command return Compile( name='build (' + link + ' ' + type + ')', description=['building'], descriptionSuffix=suffix, descriptionDone=['build'], doStepIf=lambda step: ('scan-build' in options) or ('coverity' in options) or ('android' in options) or ('ios' in options) or ('clang' in options) or ( ((not options) or ('macos' in step.build.getProperty('buildername'))) and (link != 'static' or not ('macos' in step.build.getProperty( 'buildername')))), hideStepIf=skipped, workdir=Interpolate('%(prop:builddir)s/build/build'), command=Interpolate( '%(kw:command)s %(prop:makefile:#?| -- -k -j %(prop:parallel)s|)s', command=build_command), env={ 'PATH': Interpolate('%(prop:toolchain_path)s%(prop:PATH)s'), 'INCLUDE': Interpolate('%(prop:vc_include)s'), 'LIB': Interpolate('%(prop:vc_lib)s'), 'LIBPATH': Interpolate('%(prop:vc_libpath)s') }, want_stdout=True, want_stderr=True, logEnviron=False)
def __init__(self, source, perl="perl"): BuildFactory.__init__(self, [source]) self.addStep(Configure(command=[perl, "Makefile.PL"])) self.addStep(Compile(command=["make"])) self.addStep(PerlModuleTest(command=["make", "test"]))
def __init__(self, **kwargs): factory.BuildFactory.__init__(self, **kwargs) # set properties about this builder self.addStep( SetProperty(property="masterdir", command=["echo", os.getcwd()])) self.addStep( SetProperty(property="basedir", command=["bash", "-c", "builtin pwd"])) self.addStep( SetProperty(property="gmp_version", command=["echo", gConfig.get("libraries", "gmp")], doStepIf=lambda step: (not step.build.hasProperty("gmp_version")))) self.addStep( SetProperty(property="mpfr_version", command=["echo", gConfig.get("libraries", "mpfr")], doStepIf=lambda step: (not step.build.hasProperty("mpfr_version")))) self.addStep( SetProperty(property="mpc_version", command=["echo", gConfig.get("libraries", "mpc")], doStepIf=lambda step: (not step.build.hasProperty("mpc_version")))) self.addStep( SetProperty(property="binutils_branch", command=["echo", "trunk"], doStepIf=lambda step: (not step.build.hasProperty("binutils_branch")))) self.addStep( SetProperty(property="gcc_branch", command=["echo", "trunk"], doStepIf=lambda step: (not step.build.hasProperty("gcc_branch")))) self.addStep( SetProperty(property="mingw_branch", command=["echo", "trunk"], doStepIf=lambda step: (not step.build.hasProperty("mingw_branch")))) self.addStep( SetProperty(property="filename", command=[ "echo", Property("src_archive", default="mingw-w64-src.tar.bz2") ])) self.addStep( SetProperty( property="srcname_format", command=["echo", "mingw-w64-src%(datestamp:-)s.tar.bz2"], doStepIf=lambda step: (not step.build.hasProperty("srcname_format")))) #self.addStep(M64NightlyRev) if self.clobber: self.addStep( ShellCommand(name="clobber", command=[ "rm", "-rfv", "build", "src", Property("filename") ], haltOnFailure=False, description=["clobber all"], descriptionDone=["clobbered"])) self.addStep( ShellCommand( name="makefile-checkout", description=["makefile", "checkout"], descriptionDone=["checked out", "makefile"], command=[ "curl", "-o", "mingw-makefile", "https://svn.code.sf.net/p/mingw-w64/code/experimental/buildsystem/makebuildroot.mk" ], haltOnFailure=True)) # self.addStep(SVN(mode="export", # svnurl="https://mingw-w64.svn.sourceforge.net/svnroot/mingw-w64/experimental/buildsystem", # extra_args=["--trust-server-cert"])) self.addStep( FileUpload(masterdest="mingw-makefile", slavesrc="mingw-makefile", maxsize=102400, mode=0600)) self.addStep( ShellCommand( name="patch-pull", command=["make", "-f", "mingw-makefile", "patch-pull"], description=["patches", "pull"], descriptionDone=["pulled", "patches"])) # download binutils self.addStep( Compile(name="binutils-pull", description=["binutils", "pull"], descriptionDone=["pulled", "binutils"], command=["make", "-f", "mingw-makefile", "binutils-pull"], env={ "BINUTILS_REVISION": Property("binutils_revision", default="head"), "BINUTILS_BRANCH": Property("binutils_branch") })) self.addStep( ShellCommand( name="binutils-patch", description=["patch", "binutils"], descriptionDone=["binutils", "patched"], doStepIf=lambda step: (step.getProperty("scheduler") != "try"), workdir="build/src/binutils", command=[ "bash", "-c", """if [ -n "$( ls ../patches/binutils/*.patch )" ] ; then for i in ../patches/binutils/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # download gcc self.addStep( Compile(name="gcc-pull", description=["gcc", "pull"], descriptionDone=["pulled", "gcc"], command=["make", "-f", "mingw-makefile", "gcc-pull"], env={ "GCC_REVISION": Property("gcc_revision", default="head"), "GCC_BRANCH": Property("gcc_branch") })) self.addStep( ShellCommand( name="gcc-patch", description=["patch", "gcc"], descriptionDone=["gcc", "patched"], doStepIf=lambda step: (step.getProperty("scheduler") != "try"), workdir="build/src/gcc/src", command=[ "bash", "-c", """if [ -n "$( ls ../../patches/gcc/*.patch )" ] ; then for i in ../../patches/gcc/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # download gmp self.addStep( Compile(name="gmp-download", description=["gmp", "download"], descriptionDone=["downloaded", "gmp"], command=["make", "-f", "mingw-makefile", "gmp-download"], env={"GMP_VERSION": Property("gmp_version")})) self.addStep( Compile(name="gmp-extract", description=["gmp", "extract"], descriptionDone=["extracted", "gmp"], command=["make", "-f", "mingw-makefile", "gmp-extract"], env={"GMP_VERSION": Property("gmp_version")})) # Fix gmp (fails to find m4 for flex) self.addStep( ShellCommand( name="gmp-patch", workdir="build/src/gcc/src/gmp", description=["patch", "gmp"], command=[ "bash", "-c", """if [ -n "$( ls ../../../patches/gmp/*.patch )" ] ; then for i in ../../../patches/gmp/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # download mpfr self.addStep( Compile(name="mpfr-download", description=["mpfr", "download"], descriptionDone=["downloaded", "mpfr"], command=["make", "-f", "mingw-makefile", "mpfr-download"], env={"MPFR_VERSION": Property("mpfr_version")})) self.addStep( Compile(name="mpfr-extract", description=["mpfr", "extract"], descriptionDone=["extracted", "mpfr"], command=["make", "-f", "mingw-makefile", "mpfr-extract"], env={"MPFR_VERSION": Property("mpfr_version")})) self.addStep( ShellCommand( name="mpfr-patch", description=["patch", "mpfr"], descriptionDone=["patched", "mpfr"], doStepIf=lambda step: (step.getProperty("scheduler") != "try"), workdir="build/src/gcc/src/mpfr", command=[ "bash", "-c", """if [ -n "$( ls ../../../patches/mpfr/*.patch )" ] ; then for i in ../../../patches/mpfr/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # download mpc self.addStep( Compile(name="mpc-download", description=["mpc", "download"], descriptionDone=["downloaded", "mpc"], command=["make", "-f", "mingw-makefile", "mpc-download"], env={"MPC_VERSION": Property("mpc_version")})) self.addStep( Compile(name="mpc-extract", description=["mpc", "extract"], descriptionDone=["extracted", "mpc"], command=["make", "-f", "mingw-makefile", "mpc-extract"], env={"MPC_VERSION": Property("mpc_version")})) self.addStep( ShellCommand( name="mpc-patch", description=["patch", "mpc"], descriptionDone=["patched", "mpc"], doStepIf=lambda step: (step.getProperty("scheduler") != "try"), workdir="build/src/gcc/src/mpc", command=[ "bash", "-c", """if [ -n "$( ls ../../../patches/mpc/*.patch )" ] ; then for i in ../../../patches/mpc/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # download mingw-w64 crt and headers self.addStep( Compile(name="mingw-pull", description=["mingw", "pull"], descriptionDone=["pulled", "mingw"], command=["make", "-f", "mingw-makefile", "mingw-pull"], env={ "MINGW_REVISION": Property("mingw_revision", default="head"), "MINGW_BRANCH": Property("mingw_branch") })) self.addStep( ShellCommand( name="mingw-patch", description=["patch", "mingw"], descriptionDone=["patched", "mingw"], workdir="build/src/mingw", doStepIf=lambda step: (step.getProperty("scheduler") != "try"), command=[ "bash", "-c", """if [ -n "$( ls ../patches/mingw/*.patch )" ] ; then for i in ../patches/mingw/*.patch ; do patch -p1 -f -i "$i" ; done ; fi""".replace("\n", " ") ])) # update the build stamp self.addStep( SubversionRevProperty(name="gcc-svnrev", workdir="build/src/gcc/src", prop_prefix="gcc_", config_dir=WithProperties("%(basedir:-.)s"))) self.addStep( SubversionRevProperty(name="mingw-svnrev", workdir="build/src/mingw", prop_prefix="mingw_", config_dir=WithProperties("%(basedir:-.)s"))) self.addStep( SetProperty(property="datestamp", command=["date", "-u", "+_%Y%m%d"], doStepIf=lambda step: (not step.getProperty("datestamp") == ""))) self.addStep( ShellCommand( name="mingw-datestamp", workdir="build/src/mingw/mingw-w64-crt", description=["writing", "buildstamp"], descriptionDone=["buildstamp", "written"], command=[ "bash", "-c", WithProperties( """echo -e '/* generated by buildbot */\n""" """#define __MINGW_W64_REV "%(mingw_revision)s"\n""" """#define __MINGW_W64_REV_STAMP "%(mingw_datestamp)s"\n'""" """ > revstamp.h """) ])) # Set the gcc version strings if this is a formal release self.addStep( ShellCommand( name="release-information", workdir="build/src/gcc/src/gcc", description=["writing", "version", "string"], descriptionDone=["version", "string", "written"], doStepIf=lambda step: step.getProperty("release_build"), command=[ "bash", "-c", WithProperties( """echo '%(release_gcc_ver:-)s' > BASE-VER && echo > DEV-PHASE """ ) ])) # make the tarball self.addStep( SetProperty(property="destname", command=[ "echo", WithPropertiesRecursive( WithProperties("%(srcname_format)s")) ])) self.addStep( Compile(name="src-package", description=["tarball", "package"], descriptionDone=["packaged", "tarball"], command=["make", "-f", "mingw-makefile", "src-archive"], env={"SRC_ARCHIVE": Property("filename")})) # upload the tarball to the master self.addStep( FileUpload(name="src-upload", slavesrc=Property("filename"), masterdest=Property("filename"))) # trigger upload self.addStep( Trigger( name="src-publish", doStepIf=lambda step: step.build.getProperty("is_nightly"), schedulerNames=["sourceforge-upload"], waitForFinish=True, # needed for the builders set_properties={ "masterdir": WithProperties("%(masterdir)s"), "filename": WithProperties("%(filename)s"), "destname": WithProperties("%(destname)s"), "datestamp": WithProperties("%(datestamp:-)s"), "target-os": "src", "path": WithProperties("%(path:-)s"), "is_nightly": WithProperties("%(is_nightly:-)s") })) # set the path that the build will be uploaded to (so the other slaves can # go ahead and download the source tarballs from sourceforge rather than # over the buildbot connection). Note that if the "path" property is set, # we use that as an override instead. self.addStep( SetProperty( property="src_url", doStepIf=lambda step: step.build.getProperty("is_nightly"), command=[ "echo", WithProperties( "http://downloads.sourceforge.net/project/%s/%%(path:-%s)s/%%(destname)s" % (gConfig.get("sourceforge", "group_id"), gConfig.get("sourceforge", "path-src"))) ])) # trigger building self.addStep( Trigger(name="start-build", schedulerNames=[ "trigger-linux-x86_64-x86_64", "trigger-linux-x86_64-x86", "trigger-linux-x86-x86_64", "trigger-linux-x86-x86", "trigger-cygwin-x86-x86_64", "trigger-cygwin-x86-x86", "trigger-mingw-x86-x86_64", "trigger-mingw-x86-x86", "trigger-darwin-x86-x86_64", "trigger-darwin-x86-x86" ], waitForFinish=False, updateSourceStamp=True, set_properties={ "is_nightly": Property("is_nightly"), "datestamp": Property("datestamp"), "binutils_branch": Property("binutils_branch"), "gcc_branch": Property("gcc_branch"), "mingw_branch": Property("mingw_branch"), "binutils_config_args": Property("binutils_config_args", default=""), "gcc_config_args": Property("gcc_config_args", default=""), "mingw_config_args": Property("mingw_config_args", default=""), "gmp_config_args": Property("gmp_config_args", default=""), "mpfr_config_args": Property("mpfr_config_args", default=""), "mpc_config_args": Property("mpc_config_args", default=""), "gcc_revision": Property("gcc_revision"), "mingw_revision": Property("mingw_revision"), "masterdir": Property("masterdir"), "path": Property("path"), "src_archive": Property("filename"), "src_url": Property("src_url", default="") }))