def __init__(self, repository, vcs_type=None, branch=None, username=None, password=None): factory.BuildFactory.__init__(self, []) if not repository.endswith("/"): repository += "/" if not vcs_type: if repository.startswith("https://svn."): vcs_type = "svn" elif repository.startswith("git://github.com/"): vcs_type = "git" if not branch: branch = dict(svn="trunk", git="master")[vcs_type] if vcs_type == "svn": self.addStep( SVN( baseURL=repository, defaultBranch=branch, username=username, password=password, )) elif vcs_type == "git": self.addStep(Git( repourl=repository, branch=branch, ))
def addGetSourcecodeForProject(self, project, name=None, src_dir=None, **kwargs): # project contains a repo name which is not a part of the monorepo. # We do not enforce it here, though. _repourl = kwargs.pop('repourl', None) if not _repourl: _repourl = self.repourl_prefix + "llvm-%s.git" % project if not name: name = 'Checkout %s' % project # Check out to the given directory if any. # Otherwise this is a part of the unified source tree. if src_dir is None: src_dir = 'llvm-%s' % project # Ignore workdir if given. We check out to src_dir. kwargs.pop('workdir', None) self.addStep( Git(name=name, repourl=_repourl, progress=True, workdir=WithProperties(src_dir), **kwargs))
def addGetSourcecodeSteps(self, **kwargs): # Checkout the monorepo. self.addStep( Git(name='Checkout the source code', repourl=self.repourl_prefix + "llvm-project.git", progress=True, workdir=WithProperties(self.monorepo_dir), **kwargs))
def sphinx_factory(repo, target='publish', branch='master'): f = factory.BuildFactory() # check out the source f.addStep(Git(repourl=repo, branch=branch, retry=GIT_RETRY)) # run the build process. f.addStep(ShellCommand(command=["make", target]))
def addGetSourcecodeForProject(self, project, name=None, src_dir=None, **kwargs): # Remove 'is_legacy_mode' if it leaked in to kwargs. kwargs.pop('is_legacy_mode', None) # Bail out if we are in the legacy mode and SVN checkout is required. if self.is_legacy_mode: workdir, baseURL = svn_repos[project] if not name: name = 'svn-%s' % project # Check out to the given directory if any. # Otherwise this is a part of the unified source tree. if src_dir is None: src_dir = workdir % {'llvm_srcdir': self.llvm_srcdir} if not kwargs.get('mode', None): kwargs['mode'] = 'update' if not kwargs.get('defaultBranch', None): kwargs['defaultBranch'] = 'trunk' self.addStep( SVN(name=name, workdir=src_dir, baseURL=WithProperties(baseURL), **kwargs)) else: # project contains a repo name which is not a part of the monorepo. # We do not enforce it here, though. _repourl = kwargs.pop('repourl', None) if not _repourl: _repourl = self.repourl_prefix + "llvm-%s.git" % project if not name: name = 'Checkout %s' % project # Check out to the given directory if any. # Otherwise this is a part of the unified source tree. if src_dir is None: src_dir = 'llvm-%s' % project # Ignore workdir if given. We check out to src_dir. kwargs.pop('workdir', None) self.addStep( Git(name=name, repourl=_repourl, progress=True, workdir=WithProperties(src_dir), **kwargs))
def __init__(self, repo, *testnames): BuildProcedure.__init__(self, 'elisp') self.addSteps( Git(repourl='git://github.com/%s.git' % repo), SetPropertiesFromEnv(variables=['EMACS']), SetProperty( command=[ Emacs(), '--batch', '--eval', '(princ (make-temp-file "home" t ".bbot"))' ], extract_fn=lambda rc, stdout, stderr: dict(FakeHome=stdout))) for t in testnames or ['test/test']: self.addStep(EmacsTest(load=t + '.el'))
def addGetSourcecodeSteps(self, **kwargs): # Remove 'is_legacy_mode' if it leaked in to kwargs. kwargs.pop('is_legacy_mode', None) # Bail out if we are in the legacy mode and SVN checkout is required. if self.is_legacy_mode: self.addSVNSteps(**kwargs) return # Checkout the monorepo. self.addStep( Git(name='Checkout the source code', repourl=self.repourl_prefix + "llvm-project.git", progress=True, workdir=WithProperties(self.monorepo_dir), **kwargs))
def __init__(self, source, python="python"): # Add twistedchecker Git step first, so got_revision is twisted's source = [ Git( repourl="https://github.com/twisted/twistedchecker", branch="master", alwaysUseLatest=True, mode="update", workdir="twistedchecker", ) ] + source TwistedBaseFactory.__init__(self, python, source, False) self.addStep(CheckCodesByTwistedChecker, want_stderr=False, env={ "PATH": ["../twistedchecker/bin", "${PATH}"], "PYTHONPATH": ["../twistedchecker", "${PYTHONPATH}"] })
def addRepository(self, project=None, repository=None, vcs_type=None, branch=None, username=None, password=None, **kwargs): kwargs = dict(kwargs) if not repository.endswith("/"): repository += "/" if not vcs_type: if repository.startswith("https://svn."): vcs_type = "svn" elif repository.startswith("git://github.com/"): vcs_type = "git" if not branch: branch = dict(svn="trunk", git="master")[vcs_type] if vcs_type == "svn": kwargs.update( dict( baseURL=repository, defaultBranch=branch, username=username, password=password, codebase=project, )) self.addStep(SVN(**kwargs)) elif vcs_type == "git": kwargs.update( dict( repourl=repository, branch=branch, codebase=project, )) self.addStep(Git(**kwargs))
def commandComplete(self, cmd): if self.repo_changed(): # This is where the got_revision property gets set in the # buildbot Source step. Only capture that if we're # working on the same repository. Git.commandComplete(self, cmd)
def initFactory(self,arch): return GNUAutoconf(Git("http://repo.or.cz/r/polylib.git"), test=None, distcheck=None, configureFlags=self.configure_flags)
def createPoclFactory(environ={}, repository='https://github.com/pocl/pocl.git', branch='master', buildICD=True, llvm_dir='/usr/', icd_dir='/usr/', tests_dir=None, config_opts='', pedantic=True, tcedir='', f=None, cmake=False, cache_dir=None): """ Create a buildbot factory object that builds pocl. environ Dictionary: The environment variables to append to the build. PATH and LD_LIBRARY_PATH will be added from llvm_dir (if given). repository String: the repo to build from. defaults to pocl on github branch String: the branch in 'repository' to build from. default to master buildICD Bool: if false, the ICD extension is not built. llvm_dir String: LLVM installation dir. I.e. without the 'bin/' or 'lib/'. icd_dir String: ICD loader installation dir. We expect here to be a ICD loader that understand the OCL_ICD_VENDORS parameter, i.e. ocl-icd or patched Khronos loader. tests_dir String: Path where the extenral testsuite packages can be copied from. ('cp' is used, so they need to be on the same filesystem). NOTE: currently only a placeholder - not tested on the public buildbot config_opts String: extra options to pass to ./configure cmake Bool: use CMake instead of autotools to build pocl cache_dir String: Set the pocl kernel cache to this dir. If not set, the kcache is disabled. """ myenviron = environ.copy() if 'PATH' in myenviron.keys(): myenviron[ 'PATH'] = llvm_dir + "/bin/:" + myenviron['PATH'] + ":${PATH}" else: myenviron['PATH'] = llvm_dir + "/bin/:${PATH}" if 'LD_LIBRARY_PATH' in myenviron.keys(): myenviron['LD_LIBRARY_PATH'] = llvm_dir + "/lib/:" + myenviron[ 'PATH'] + ":${LD_LIBRARY_PATH}" else: myenviron['LD_LIBRARY_PATH'] = llvm_dir + "/lib/:${LD_LIBRARY_PATH}" if tcedir: myenviron['PATH'] = tcedir + "/bin/:" + myenviron['PATH'] myenviron['LD_LIBRARY_PATH'] = tcedir + "/lib/:" + myenviron[ 'LD_LIBRARY_PATH'] if cache_dir: myenviron['POCL_BUILD_KERNEL_CACHE'] = '1' else: myenviron['POCL_BUILD_KERNEL_CACHE'] = '0' if f == None: f = factory.BuildFactory() f.addStep( Git(repourl=repository, mode=Property('git_mode'), ignore_ignores=True, branch=branch)) #clear last test round's kernel cahce. #NB: if you run two slave builds on the same machine, this #will not work! if cache_dir: f.addStep( ShellCommand(command=['rm', '-rf', cache_dir], haltOnFailure=True, name='clean kcache', description='cleaning kcache', descriptionDone='cleaned kcache')) if not cmake: f.addStep( ShellCommand(command=["./autogen.sh"], haltOnFailure=True, name="autoconfig", env=myenviron, description="autoconfiging", descriptionDone="autoconf")) if tests_dir != None: f.addStep( ShellCommand( haltOnFailure=True, command=[ "cp", "-u", tests_dir + AMD_test_pkg, "examples/AMD/" + AMD_test_pkg ], name="copy AMD", description="copying", descriptionDone="copied AMD", #kludge around 'cp' always complaining if source is missing decodeRC={ 0: SUCCESS, 1: SUCCESS })) f.addStep( ShellCommand(haltOnFailure=False, command=[ "cp", "-u", tests_dir + ViennaCL_test_pkg, "examples/ViennaCL/" + ViennaCL_test_pkg ], name="copy ViennaCL", description="copying", descriptionDone="copied ViennaCL", decodeRC={ 0: SUCCESS, 1: SUCCESS })) if cmake: f.addStep( ShellCommand(command=["cmake", "."], env=myenviron, haltOnFailure=True, name="CMake", description="cmaking", descriptionDone="cmade")) else: configOpts = config_opts.split(' ') if pedantic == True: configOpts = configOpts + ['--enable-pedantic'] if buildICD == False: configOpts = configOpts + ['--disable-icd'] f.addStep( ShellCommand(command=["./configure"] + configOpts, haltOnFailure=True, name="configure pocl", env=myenviron, description="configureing", descriptionDone="configure")) f.addStep(Compile(env=myenviron)) if tests_dir != None and not cmake: f.addStep( ShellCommand(command=["make", "prepare-examples"], haltOnFailure=True, name="prepare examples", env=myenviron, description="preparing", descriptionDone="prepare")) if tcedir: f.addStep( ShellCommand(command=["./tools/scripts/run_tta_tests"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, timeout=60 * 60)) else: f.addStep( ShellCommand( command=["make", "check"], haltOnFailure=True, name="checks", env=myenviron, description="testing", descriptionDone="tests", logfiles={"test.log": "tests/testsuite.log"}, #blas3 alone takes 15-20 min. timeout=60 * 60)) #run the test once more, now from the kernel cache dir, if used if cache_dir: f.addStep( ShellCommand(command=["make", "check"], haltOnFailure=True, name="kcache checks", env=myenviron, description="testing kcache", descriptionDone="tested kcache", logfiles={"test.log": "tests/testsuite.log"}, timeout=5)) return f
def startVC(self, branch, revision, patch): if not self.repo_changed(): branch, revision, patch = self.branch, None, None return Git.startVC(self, branch, revision, patch)
def loadConfig(config): ####### CHANGESOURCES # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. Here we point to the buildbot clone of pyflakes. from buildbot.changes.gitpoller import GitPoller from buildbot.changes.filter import ChangeFilter config['change_source'].append( GitPoller( repourl = '[email protected]:ORNL/xdd.git', workdir='gitpoller-workdir-xdd-master', pollinterval=120, branch='master', project='xdd')) xdd_filter = ChangeFilter( project = 'xdd', branch = 'testing') ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which slaves can execute them. Note that any particular build will # only take place on one slave. from buildbot.process.factory import BuildFactory, GNUAutoconf from buildbot.steps.source import Git from buildbot.steps.shell import ShellCommand, Configure, Compile, Test xdd_factory = BuildFactory() # Check out the source xdd_factory.addStep(Git(repourl='[email protected]:ORNL/xdd.git', mode='copy', branch='master')) # Generate the test configuration xdd_factory.addStep(ShellCommand(command=['./contrib/buildbot_gen_test_config.sh'], name="configuring")) # Compile the code xdd_factory.addStep(Compile(description=["compiling"])) # Install the code xdd_factory.addStep(ShellCommand(command=['make', 'install'], name="make install")) # Perform make check xdd_factory.addStep(ShellCommand(command=['make', 'check'], name="make check", maxTime=600)) # Perform make test xdd_factory.addStep(Test(description=["make test"], maxTime=600)) # Perform cleanup xdd_factory.addStep(ShellCommand(command=['pkill', '-f', 'xdd', '||', 'echo ""'], name='process cleanup', maxTime=60)) # Add the XDD Build factory to each of the available builders described in the master.cfg from buildbot.config import BuilderConfig # config['builders'].append(BuilderConfig(name="xdd-rhel5-x86_64", slavenames=["pod7"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-rhel6-x86_64", slavenames=["pod9"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"},category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-sles10-x86_64", slavenames=["pod10"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) config['builders'].append(BuilderConfig(name="xdd-sles11-x86_64", slavenames=["pod11"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) config['builders'].append(BuilderConfig(name="xdd-osx-10-8", slavenames=["natureboy"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) # config['builders'].append(BuilderConfig(name="xdd-rhel6-ppc64", slavenames=["spry02"], factory=xdd_factory, env={"XDDTEST_TIMEOUT": "900"}, category='xdd')) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'runtests' build # Configure the nightly testing so that every test lives in the same buildset from buildbot.schedulers.basic import SingleBranchScheduler from buildbot.schedulers.timed import Periodic,Nightly build_nightly_xdd=Nightly(name="xdd-nightly1", branch = "master", properties={'owner' : ['*****@*****.**']}, builderNames=["xdd-sles11-x86_64", "xdd-osx-10-8"], hour = 2, minute = 3) config['schedulers'].append(build_nightly_xdd) # Configure each force build seperately so that they live in differing buildsets from buildbot.schedulers.forcesched import ForceScheduler # config['schedulers'].append(ForceScheduler(name="xdd-force1", builderNames=["xdd-rhel5-x86_64"])) # config['schedulers'].append(ForceScheduler(name="xdd-force2", builderNames=["xdd-rhel6-x86_64"])) # config['schedulers'].append(ForceScheduler(name="xdd-force3", builderNames=["xdd-sles10-x86_64"])) config['schedulers'].append(ForceScheduler(name="xdd-force4", builderNames=["xdd-sles11-x86_64"])) config['schedulers'].append(ForceScheduler(name="xdd-force6", builderNames=["xdd-osx-10-8"])) # config['schedulers'].append(ForceScheduler(name="xdd-force7", builderNames=["xdd-rhel6-ppc64"])) ####### STATUS TARGETS # 'status' is a list of Status Targets. The results of each build will be # pushed to these targets. buildbot/status/*.py has a variety to choose from, # including web pages, email senders, and IRC bots. from buildbot.status.mail import MailNotifier xddMN = MailNotifier(fromaddr="*****@*****.**", extraRecipients=['*****@*****.**'], categories='xdd', buildSetSummary=True, messageFormatter=xddSummaryMail) config['status'].append(xddMN)
def startVC(self, branch, revision, patch): return Git.startVC(self, self.branch, None, None)
def startVC(self, branch, revision, patch): if not self.repo_changed(): branch,revision,patch = self.branch, None, None return Git.startVC(self, branch, revision, patch)
def CreateLinuxChromeFactory(): """Run chrome tests with the latest dynamorio. TODO(rnk): Run drmemory, not dynamorio. We use a build of chrome produced weekly from a known good revision on the same slave. """ cr_src = '../../linux-cr-builder/build/src' ret = factory.BuildFactory() ret.addStep( Git( repourl=dr_giturl, workdir='dynamorio', mode='update', name='Checkout DynamoRIO')) # If we need to execute 32-bit children, we'll need a full exports package. ret.addStep(Configure(command=['cmake', '..', '-DDEBUG=OFF'], workdir='dynamorio/build', name='Configure release DynamoRIO')) ret.addStep(Compile(command=['make', '-j5'], workdir='dynamorio/build', name='Compile release DynamoRIO')) # Don't follow python children. This should speed up net_unittests, which # spawns a bunch of simple http servers to talk to. ret.addStep(ShellCommand( command=['bin64/drconfig', '-reg', 'python', '-norun', '-v'], workdir='dynamorio/build', name='don\'t follow python', description='don\'t follow python', descriptionDone='don\'t follow python')) # Chromium tests for test in LINUX_CHROME_TESTS: cmd = [ 'xvfb-run', '-a', '../dynamorio/build/bin64/drrun', '-stderr_mask', '12', # Show DR crashes '--', cr_src + '/out/Release/' + test ] if test == 'browser_tests': cmd += ['--gtest_filter=AutofillTest.BasicFormFill'] elif test == 'net_unittests': cmd += ['--gtest_filter=-CertDatabaseNSSTest.ImportCACertHierarchy*'] elif test == 'remoting_unittests': cmd += ['--gtest_filter=' '-VideoFrameCapturerTest.Capture:' 'DesktopProcessTest.DeathTest'] elif test == 'base_unittests': # crbug.com/308273: this test is flaky cmd += ['--gtest_filter=-TraceEventTestFixture.TraceContinuousSampling'] elif test == 'content_shell': cmd += ['--run-layout-test', 'file:///home/chrome-bot/bb.html'] # We used to md5 the output, but that's too brittle. Just dump it to stdout # so humans can verify it. The return code will tell us if we crash. # TODO(rnk): We should run some selection of layout tests if we want to # verify output. ret.addStep(Test(command=cmd, env={'CHROME_DEVEL_SANDBOX': '/opt/chromium/chrome_sandbox'}, name=test, descriptionDone=test, description=test)) return ret