def MakeReleaseBuilder(): f = BuildFactory() f.addStep( MasterShellCommand( name='Upload', command=[ 'bash', 'upload.sh', SCHAT_VERSION, Property('revision'), ], )) f.addStep( MasterShellCommand(name='Create Update Channel', command=[ 'php', 'update.php', '--channel', Property('channel', default='stable'), '--version', SCHAT_VERSION, '--revision', Property('revision'), '--os', Property('os', default='win32,osx,ubuntu'), ])) f.addStep( MasterShellCommand(name='Update Site', command=[ 'php', 'site.php', '--version', SCHAT_VERSION, ])) return f
def createRepository(distribution, repository_path): steps = [] flavour, version = distribution.split('-', 1) if flavour in ("fedora", "centos"): steps.append( MasterShellCommand(name='build-repo-metadata', description=["building", "repo", "metadata"], descriptionDone=["build", "repo", "metadata"], command=["createrepo_c", "."], path=repository_path, haltOnFailure=True)) elif flavour in ("ubuntu", "debian"): steps.append( MasterShellCommand( name='build-repo-metadata', description=["building", "repo", "metadata"], descriptionDone=["build", "repo", "metadata"], # FIXME: Don't use shell here. command= "dpkg-scanpackages --multiversion . | gzip > Packages.gz", path=repository_path, haltOnFailure=True)) else: error("Unknown distritubtion %s in createRepository." % (distribution, )) return steps
def cleanup_master_workdir(self): self.addStep( MasterShellCommand(name="cleanup_master_workdir", command=[ 'bash', '-x', '-c', Interpolate('rm -rf %s' % MASTERWORKDIR) ], alwaysRun=True)) self.addStep( MasterShellCommand(name="cleanup_nuget_tmp", command=['rm', '-rf', '/tmp/nuget'], alwaysRun=True))
def upload_benchmarker(self): self.addStep(FileDownload(Interpolate('%s/benchmarker.tar.gz' % MASTERWORKDIR), 'benchmarker.tar.gz', workdir='.')) self.addStep(ShellCommand(name='md5', command=['md5sum', 'benchmarker.tar.gz'], workdir='.')) self.addStep(ShellCommand(name='unpack_benchmarker', command=['tar', 'xf', 'benchmarker.tar.gz'], workdir='.')) self.addStep(ShellCommand(name='debug2', command=['ls', '-lha', 'benchmarker'], workdir='.')) self.addStep(MasterShellCommand(name="cleanup", command=['rm', '-rf', Interpolate(MASTERWORKDIR)]))
def makeCleanOldBuildsFactory(): """ Remove build results older than 14 days. """ # FIXME we shouldn't hard code this (DRY) basedir = r'/srv/buildmaster/data' path = os.path.join(basedir, "private_html") factory = BuildFactory() factory.addStep( MasterShellCommand([ 'find', path, '-type', 'f', '-mtime', '+14', '-exec', 'unlink', '{}', ';', '-print' ], description=['Removing', 'old', 'results'], descriptionDone=['Remove', 'old', 'results'], name='remove-old-results')) # Vagrant tutorial boxes are created on the Vagrant slave, and # uploaded to S3. However, the boxes must be kept on the slave # for running subsequent tests # (flocker/acceptance/vagrant/centos-7/zfs and # flocker/installed-package/vagrant/centos-7). This means there is # not an obvious place to remove the boxes. So, we periodically # cleanup old boxes here. "Old" is the number of days passed as # parameter to script. factory.addStep( ShellCommand( command=['python', '/home/buildslave/remove-old-boxes.py', '14'], description=['Removing', 'old', 'boxes'], descriptionDone=['Remove', 'old', 'boxes'], name='remove-old-boxes')) return factory
def get_artifact_step(): from buildbot.process.properties import Interpolate from buildbot.steps.transfer import DirectoryUpload from buildbot.steps.master import MasterShellCommand return [ DirectoryUpload( description=['uploading'], descriptionSuffix=['artifact'], descriptionDone=['upload'], slavesrc=Interpolate('%(prop:workdir)s/install'), masterdest=Interpolate( '%(prop:buildername)s/tmp/%(prop:got_revision)s'), compress='gz', locks=[master_upload_lock.access('exclusive')], doStepIf=lambda step: (step.build.getProperty('artifact') and ('external' not in step.build.getProperty('trigger'))), hideStepIf=skipped_or_success), MasterShellCommand( name='artifact', description=['creating artifact'], descriptionDone=['create artifact'], doStepIf=lambda step: (step.build.getProperty('artifact') and ('external' not in step.build.getProperty('trigger'))), hideStepIf=skipped_or_success, command=Interpolate( 'mkdir -p artifacts/by-revision/%(prop:got_revision)s && ' 'mkdir -p artifacts/by-branch/%(src::branch:-master)s && ' 'cd %(prop:buildername)s/tmp && ' '%(prop:archive_command)s %(prop:buildername)s%(prop:artifact_extension)s %(prop:got_revision)s/ && ' 'mv %(prop:buildername)s%(prop:artifact_extension)s ../../artifacts/by-revision/%(prop:got_revision)s && ' 'ln -f ../../artifacts/by-revision/%(prop:got_revision)s/%(prop:buildername)s%(prop:artifact_extension)s ../../artifacts/by-branch/%(src::branch:-master)s/%(prop:buildername)s%(prop:artifact_extension)s && ' 'chmod -R a+rX ../../artifacts/by-revision/%(prop:got_revision)s && ' 'chmod -R a+rX ../../artifacts/by-branch/%(src::branch:-master)s' )), MasterShellCommand( name='clean master', description=['cleaning master'], descriptionDone=['clean master'], alwaysRun=True, hideStepIf=skipped_or_success, command=Interpolate('rm -rf "%(prop:buildername)s/tmp"')) ]
def update_machine_file(self): step = MasterShellCommand( name='cp_machine', command=[ 'bash', '-o', 'pipefail', '-c', Interpolate('cp -v %s/benchmarker/machines/' % MASTERWORKDIR + '*.conf ../machines/') ]) self.addStep(step)
def update_config_file(self): step = MasterShellCommand( name='cp_config', command=[ 'bash', '-c', Interpolate('cp -v %s/benchmarker/configs/' % MASTERWORKDIR + '*.conf ../configs/') ]) self.addStep(step)
def export_benchmark_list(self): step = MasterShellCommand( name="list_benchmarks", command=[ 'bash', '-c', Interpolate( 'mono %s/benchmarker/tools/compare.exe --list-benchmarks | ' % MASTERWORKDIR + 'tee benchmarks.list') ]) self.addStep(step)
def push_to_github(__opts__): cwd = 'sandboxes/{0}/svn2git'.format(project) return [ MasterShellCommand(command=""" cd sandboxes/{0}/svn2git svn2git --rebase --verbose git push origin master""".format(project), description='Pushing commit to GitHub', descriptionDone='Push commit to GitHub (trunk)'), ]
def push_to_github(__opts__): cwd = 'sandboxes/{0}/public'.format(project) return [ MasterShellCommand(command=""" cd sandboxes/{0}/public git pull --rebase private master git push origin master""".format(project), description='Pushing commit to GitHub', descriptionDone='Push commit to GitHub (trunk)'), ]
def SeedTorrent(filename, **kwargs): """Pseudo-class. This build step adds a torrent on the master. Requires a .netrc file to be present on the master containing the transmission-remote authentication credentials. """ return MasterShellCommand(command=[ "transmission-remote", "-a", Interpolate("%s.torrent", filename), "--find", filename ], **kwargs)
def build_sgen_grep_binprot_on_master(self): # self.addStep(MasterShellCommand(name='tree', command=['tree', Interpolate(MASTERWORKDIR)])) self.addStep( MasterShellCommand( name="build_sgen_grep_binprot", command=[ 'bash', '-c', Interpolate( 'bash %s/benchmarker/performancebot/utils/build-sgen-grep-binprot.sh %s `pwd`' % (MASTERWORKDIR, MONO_SGEN_GREP_BINPROT_GITREV)) ]))
def MakeTorrent(filename, **kwargs): "Pseudo-class. This build step creates a torrent on the master." return MasterShellCommand(command=[ "transmission-create", "-t", "udp://tracker.publicbt.com:80", "-t", "udp://tracker.opentrackr.org:1337/announce", "-t", "http://tracker.bittorrent.am/announce", "-t", "udp://tracker.sktorrent.net:6969", "-o", Interpolate("%s.torrent", filename), filename ], **kwargs)
def export_benchmark_list(self, slave_config_hooks): for machine, config_name in sorted(slave_config_hooks): self.addStep( MasterShellCommand( name="list_benchmarks_%s-%s" % (machine, config_name), command=[ 'bash', '-c', '-o', 'pipefail', Interpolate( 'mono --debug %s/benchmarker/tools/compare.exe --list-benchmarks --machine %s --config-file %s/benchmarker/configs/%s.conf | ' % (MASTERWORKDIR, machine, MASTERWORKDIR, config_name) + 'tee benchmarks-%s-%s.list' % (machine, config_name)) ]))
def _add_step_master_command(self, step): """ Add a step for master command. """ name = step.get('name', 'Master command') always_run = step.get('always-run', False) self.addStep( MasterShellCommand( name=name, command=step['command'], haltOnFailure=True, alwaysRun=always_run, ))
def MakeBetaBuilder(): f = BuildFactory() f.addStep( MasterShellCommand(name='Create Update Channel', command=[ 'php', 'update.php', '--channel', Property('channel', default='beta'), '--version', SCHAT_VERSION, '--revision', Property('revision'), '--os', Property('os', default='win32'), ])) return f
def benchmarker_on_master(self): step = MasterShellCommand( name="build_benchmarker", command=[ 'bash', '-x', '-c', Interpolate( 'pwd && ' + 'mkdir -p %s && ' % MASTERWORKDIR + 'cd %s && ' % MASTERWORKDIR + 'git clone --depth 1 -b master https://github.com/xamarin/benchmarker && ' + 'cd benchmarker/tools && (nuget restore tools.sln || nuget restore tools.sln) ' + #nuget crashes sometimes :-( '&& xbuild && ' + 'cd ../.. && tar cvfz benchmarker.tar.gz benchmarker/tools/{*.dll,*.exe} && (md5 benchmarker.tar.gz || md5sum benchmarker.tar.gz)' ) ] ) self.addStep(step)
def benchmarker_on_master(self, benchmarker_branch=BENCHMARKER_BRANCH): step = MasterShellCommand( name="build_benchmarker", command=[ 'bash', '-x', '-c', Interpolate( 'pwd && ' + 'mkdir -p %s && ' % MASTERWORKDIR + 'cd %s && ' % MASTERWORKDIR + 'git clone --depth 1 -b ' + benchmarker_branch + ' https://github.com/xamarin/benchmarker && ' + 'cd benchmarker/tools && (/usr/bin/cli --version || true) && ' + 'bash ../performancebot/utils/nugethack.sh && ' + 'xbuild /t:compare && ' + 'cd ../.. && tar cvfz benchmarker.tar.gz benchmarker/tools/{*.dll,*.exe,*.pdb,Microsoft.BenchView.JSONFormat/} && (md5 benchmarker.tar.gz || md5sum benchmarker.tar.gz)' ) ]) self.addStep(step)
def upload_release(platform): upload_url = "test-data/{buildername[0]}/{got_revision[0][nim]}/" test_directory = 'public_html/' + upload_url nim_exe_source = str(platform.nim_dir / "bin" / platform.nim_exe) nim_exe_dest = gen_dest_filename(platform.nim_exe) return [ MasterShellCommand( command=['mkdir', '-p', FormatInterpolate(test_directory)], path="public_html", hideStepIf=True), FileUpload( slavesrc=nim_exe_source, workdir=str(platform.nim_dir), url=FormatInterpolate(upload_url + nim_exe_dest), masterdest=FormatInterpolate(test_directory + nim_exe_dest), ), ]
def add_broot_steps(factory, arch, branch, env={}): factory.addStep( ShellCommand(command=["./osbuild", "broot", "clean"], description="cleaning", descriptionDone="clean", haltOnFailure=True, env=env)) command = ["./osbuild", "broot", "create", "--arch=%s" % arch] factory.addStep( ShellCommand(command=command, description="creating", descriptionDone="create", haltOnFailure=True, env=env)) factory.addStep( ShellCommand(command=["./osbuild", "broot", "distribute"], description="distributing", descriptionDone="distribute", haltOnFailure=True, env=env)) broot_dir = "~/public_html/broot/" broot_filename = "%(prop:buildername)s-%(prop:buildnumber)s.tar.xz" masterdest = Interpolate(os.path.join(broot_dir, broot_filename)) factory.addStep( FileUpload(slavesrc="build/sugar-build-broot.tar.xz", masterdest=masterdest)) command = Interpolate("%s %s %s %s %s" % (get_command_path("release-broot"), broot_dir, broot_filename, arch, branch)) factory.addStep( MasterShellCommand(command=command, description="releasing", descriptionDone="release"))
def run_testament(platform): test_url = "test-data/{buildername[0]}/{got_revision[0][nim]}/" test_directory = 'public_html/' + test_url html_test_results = 'testresults.html' html_test_results_dest = gen_dest_filename(html_test_results) db_test_results = 'testament.db' db_test_results_dest = gen_dest_filename(db_test_results) return [ ShellCommand(command=['koch', 'test'], workdir=str(platform.nim_dir), env=platform.base_env, haltOnFailure=True, timeout=None, **gen_description('Run', 'Running', 'Run', 'Testament')), MasterShellCommand( command=['mkdir', '-p', FormatInterpolate(test_directory)], path="public_html", hideStepIf=True), FileUpload( slavesrc=html_test_results, workdir=str(platform.nim_dir), url=FormatInterpolate(test_url + html_test_results_dest), masterdest=FormatInterpolate(test_directory + html_test_results_dest), ), FileUpload( slavesrc=db_test_results, workdir=str(platform.nim_dir), url=FormatInterpolate(test_url + db_test_results_dest), masterdest=FormatInterpolate(test_directory + db_test_results_dest), ) ]
def makeInternalDocsFactory(): factory = getFlockerFactory(python="python2.7") factory.addSteps(installDependencies()) factory.addSteps(check_version()) factory.addStep( sphinxBuild("spelling", "build/docs", logfiles={'errors': '_build/spelling/output.txt'}, haltOnFailure=False)) factory.addStep( sphinxBuild( "linkcheck", "build/docs", logfiles={'errors': '_build/linkcheck/output.txt'}, haltOnFailure=False, flunkOnWarnings=False, flunkOnFailure=False, warnOnFailure=True, )) factory.addStep(sphinxBuild("html", "build/docs")) factory.addStep( DirectoryUpload( b"docs/_build/html", resultPath('docs'), url=resultURL('docs'), name="upload-html", )) factory.addStep( MasterShellCommand( name='link-release-documentation', description=["linking", "release", "documentation"], descriptionDone=["link", "release", "documentation"], command=[ "ln", '-nsf', resultPath('docs'), 'doc-dev', ], doStepIf=isMasterBranch('flocker'), )) factory.addStep( MasterShellCommand( name='upload-release-documentation', description=["uploading", "release", "documentation"], descriptionDone=["upload", "release", "documentation"], command=[ "s3cmd", "sync", '--verbose', '--delete-removed', '--no-preserve', # s3cmd needs a trailing slash. Interpolate("%(kw:path)s/", path=resultPath('docs')), Interpolate( "s3://%(kw:bucket)s/%(prop:version)s/", bucket='clusterhq-dev-docs', ), ], doStepIf=isReleaseBranch('flocker'), )) return factory
], haltOnFailure=True, doStepIf=is_branched), ] # Steps to publish the rtdist. publish_rtdist_steps = [ # Upload the stage directory. DirectoryUpload(workersrc="built/stage", masterdest=rtdist_staging_dir, haltOnFailure=True), # Run pmerge. MasterShellCommand(name="pmerge", command=[ config.pmerge_bin, "-i", config.runtime_dir, rtdist_staging_dir ]) ] def MakeTorrent(filename, **kwargs): "Pseudo-class. This build step creates a torrent on the master." return MasterShellCommand(command=[ "transmission-create", "-t", "udp://tracker.publicbt.com:80", "-t", "udp://tracker.opentrackr.org:1337/announce", "-t", "http://tracker.bittorrent.am/announce", "-t", "udp://tracker.sktorrent.net:6969", "-o", Interpolate("%s.torrent", filename), filename ],
'--hookdir %(prop:workdir)s/hooks --override-config'), 'OTHERMIRROR': othermirror }, descriptionDone=['binarydeb', package])) # Upload binarydeb to master f.addStep( FileUpload(name=package + '-uploadbinary', slavesrc=Interpolate('%(prop:workdir)s/' + final_name), masterdest=Interpolate('binarydebs/' + final_name), hideStepIf=success)) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand(name=package + '-includedeb', command=[ 'reprepro-include.bash', debian_pkg, Interpolate(final_name), distro, arch ], descriptionDone=['updated in apt', package])) f.addStep( ShellCommand(name=package + '-clean', command=['rm', '-rf', 'debian/' + debian_pkg], hideStepIf=success)) if spec_list["sync_s3"]: f.addStep( ShellCommand(name=package + '-s3-syncing', command=[ 's3cmd', '--acl-public', '--delete-removed', '--verbose', 'sync', spec_list["local_repo_path"], 's3://{s3_bucket}'.format( s3_bucket=spec_list["s3_bucket"])
repo_lock = MasterLock('reprepro') # Steps to publish the runtime and SDK. publish_deb_steps = [ # Upload the deb package. FileUpload(workersrc=deb_filename, masterdest=deb_upload_filename, mode=0o664, haltOnFailure=True, doStepIf=lambda step:not step.getProperty("optimize", False)), # Create a torrent file and start seeding it. #MakeTorrent(deb_upload_filename), #SeedTorrent(deb_upload_filename), # Upload it to an apt repository. MasterShellCommand(name="reprepro", command=[ "reprepro", "-b", deb_archive_dir, "includedeb", deb_archive_suite, deb_upload_filename], locks=[repo_lock.access('exclusive')], doStepIf=lambda step:not step.getProperty("optimize", False)), ] # Now make the factories. deb_factory = BuildFactory() for step in build_steps + publish_deb_steps: deb_factory.addStep(step) def docker_builder(buildtype, distro, suite, arch): return BuilderConfig(name='-'.join((buildtype, suite, arch)), workernames=config.linux_workers, factory=deb_factory, properties={"buildtype": buildtype, "distro": distro, "suite": suite, "arch": arch, "optimize": False})
def launchpad_debbuild(c, package, version, binaries, url, distro, arch, machines, othermirror, keys, trigger_names = None): f = BuildFactory() # Grab the source package f.addStep( ShellCommand( haltOnFailure = True, name = package+'-getsourcedeb', command = ['dget', '--allow-unauthenticated', url] ) ) # download hooks f.addStep( FileDownload( name = package+'-grab-hooks', mastersrc = 'hooks/D05deps', slavedest = Interpolate('%(prop:workdir)s/hooks/D05deps'), hideStepIf = success, mode = 0777 # make this executable for the cowbuilder ) ) # Update the cowbuilder f.addStep( ShellCommand( command = ['cowbuilder-update.py', distro, arch] + keys, hideStepIf = success ) ) # Build it f.addStep( ShellCommand( haltOnFailure = True, name = package+'-build', command = ['cowbuilder', '--build', package+'_'+version+'.dsc', '--distribution', distro, '--architecture', arch, '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow', '--buildresult', Interpolate('%(prop:workdir)s'), '--hookdir', Interpolate('%(prop:workdir)s/hooks'), '--othermirror', othermirror, '--override-config'], env = {'DIST': distro}, descriptionDone = ['built binary debs', ] ) ) # Upload debs for deb_arch in binaries.keys(): for deb_name in binaries[deb_arch]: debian_pkg = deb_name+'_'+version+'_'+deb_arch+'.deb' f.addStep( FileUpload( name = deb_name+'-upload', slavesrc = Interpolate('%(prop:workdir)s/'+debian_pkg), masterdest = Interpolate('binarydebs/'+debian_pkg), hideStepIf = success ) ) # Add the binarydeb using reprepro updater script on master f.addStep( MasterShellCommand( name = deb_name+'-include', command = ['reprepro-include.bash', deb_name, Interpolate(debian_pkg), distro, deb_arch], descriptionDone = ['updated in apt', debian_pkg] ) ) # Trigger if needed if trigger_names != None: f.addStep( Trigger(schedulerNames = trigger_names, waitForFinish = False) ) # Add to builders c['builders'].append( BuilderConfig( name = package+'_'+distro+'_'+arch+'_debbuild', slavenames = machines, factory = f ) ) # return name of builder created return package+'_'+distro+'_'+arch+'_debbuild'
# Steps to publish the runtime and SDK. publish_deb_steps = [ # Upload the deb package. FileUpload(slavesrc=deb_filename, masterdest=deb_upload_filename, mode=0o664, haltOnFailure=True), # Create a torrent file and start seeding it. MakeTorrent(deb_upload_filename), SeedTorrent(deb_upload_filename), # Upload it to an apt repository. MasterShellCommand(name="reprepro", command=[ "reprepro", "-b", deb_archive_dir, "includedeb", deb_archive_suite, deb_upload_filename ], locks=[repo_lock.access('exclusive')]), ] # Now make the factories. deb_factory = BuildFactory() for step in build_steps + publish_deb_steps: deb_factory.addStep(step) def docker_builder(buildtype, distro, suite, arch): return BuilderConfig(name='-'.join((buildtype, suite, arch)), slavenames=config.linux_slaves, factory=deb_factory, properties={
def add_steps(factory, env={}, clean=False, upload_docs=False, upload_dist=False): log_path = "build/logs/osbuild.log" if clean: step = ShellCommand(command=["./osbuild", "clean", "--broot"], description="cleaning", descriptionDone="clean", haltOnFailure=True, logfiles={"log": log_path}, env=env) factory.addStep(step) factory.addStep( PullCommand(description="pulling", descriptionDone="pull", haltOnFailure=True, logfiles={"log": log_path}, env=env)) factory.addStep( ShellCommand(command=["./osbuild", "build"], description="building", descriptionDone="build", haltOnFailure=True, logfiles={"log": log_path}, env=env)) logfiles = { "log": log_path, "smoketest": "build/logs/check-smoketest.log", "modules": "build/logs/check-modules.log" } factory.addStep( ShellCommand(command=["./osbuild", "check"], description="checking", descriptionDone="check", haltOnFailure=True, logfiles=logfiles, env=env)) factory.addStep( ShellCommand(command=["./osbuild", "docs"], description="docs", descriptionDone="docs", haltOnFailure=True, logfiles={"log": log_path}, env=env)) if upload_docs: docs_url = "http://developer.sugarlabs.org/" factory.addStep( DirectoryUpload(slavesrc="build/out/docs", masterdest="~/public_html/docs", url=docs_url)) factory.addStep( ShellCommand(command=["./osbuild", "dist"], description="distribution", descriptionDone="distribution", haltOnFailure=True, logfiles={"log": log_path}, env=env)) if upload_dist: dist_dir = "~/dist" downloads_dir = "/srv/www-sugarlabs/download/sources/sucrose/glucose" factory.addStep( DirectoryUpload(slavesrc="build/out/dist", masterdest=dist_dir)) command = "%s %s %s" % (get_command_path("release-dist"), dist_dir, downloads_dir) factory.addStep( MasterShellCommand(command=command, description="releasing", descriptionDone="release")) return factory
def sphinx_doc(configurator, options, buildout_slave_path, environ=()): """Adds sphinx doc to the build. For more information, especially about api/autodoc with OpenERP, see http://anybox.fr/blog/sphinx-autodoc-et-modules-openerp (in French, sorry). Available manifest file options: :doc.upload-dir: subdirectory of buildmaster's main doc directory (see ``doc.upload_root`` below) to put that documentation in. If missing, no upload shall be done :doc.upload-root: base directory on buildmaster's host relative to which ``doc.upload_dir`` is evaluated. It is typically set globally by a ``[DEFAULT]`` section (hence the separation, and the fact that its presence alone does not trigger the upload) :doc.version: defaults to a property-based string that uses the ``buildout-tag`` property and defaults itself to ``'current'`` if that property is missing. The resulting string gets used as a sub directory of ``upload-dir``, and can use properties in the same way as ``:class:`WithProperties` does, albeit with ``$`` instead of ``%`` (in order not to confuse :mod:`ConfigParser`, that's used to parse the manifest file) :doc.base-url: doc base URL (example: http://docs.anybox.eu) :doc.sphinx-sourcedir: if specified, then the build will use the standard buildbot Sphinx step with the value as ``sourcedir``. Otherwise, it will issue a simple ``bin/sphinx``, which is what collective.recipe.sphinxbuilder provides (encapsulation with no need of specifying source/build dirs) :doc.sphinx-builddir: *only if* doc.sourcedir is specified: Sphinx build directory, defaults to ``${doc.sourcedir}/_build`` :doc.sphinx-bin: *only if* doc.sourcedir is specified: Sphinx executable, relative to buildout directory; defaults to ``bin/sphinx-build``. :doc.sphinx-mode: (optional) String, one of ``'full'`` or ``'incremental'`` (the default). If set to ``'full'``, indicates to Sphinx to rebuild everything without re-using the previous build results. """ steps = [] sphinx_sourcedir = options.get('doc.sphinx-sourcedir') if sphinx_sourcedir is None: steps.append(ShellCommand(command=['sh', 'bin/sphinx'], description=['build', 'doc'], env=environ)) html_builddir = 'doc/_build/html' else: sphinx_builddir = options.get('doc.sphinx-builddir', os.path.join(sphinx_sourcedir, '_build')) # TODO GR, might want to change that for non-html builds html_builddir = sphinx_builddir sphinx_mode = options.get('doc.sphinx-mode', 'incremental') sphinx_bin = options.get('doc.sphinx-bin', 'bin/sphinx-build') steps.append(Sphinx(sphinx_builddir=sphinx_builddir, sphinx_sourcedir=sphinx_sourcedir, sphinx=sphinx_bin, mode=sphinx_mode, description=['Sphinx'], name='sphinx', env=environ, haltOnFailure=False)) base_dir = options.get('doc.upload-root', '') upload_dir = options.get('doc.upload-dir', '') base_url = options.get('doc.base-url') version = options.get( 'doc.version', '$(buildout-tag:-current)s').replace('$', '%') if upload_dir: sub_path = '/'.join((upload_dir.rstrip('/'), version)) waterfall_url = '/'.join((base_url, sub_path)) if base_url else None upload_dir = upload_dir.rstrip('/') master_doc_path = '/'.join((base_dir, sub_path)) steps.append( DirectoryUpload( slavesrc=html_builddir, haltOnFailure=True, compress='gz', masterdest=WithProperties(master_doc_path), url=WithProperties(waterfall_url) if waterfall_url else None)) # Fixing perms on uploaded files. Yes we could have unmask = 022 in # all slaves, # see note at the end of # <http://buildbot.net/buildbot/docs/0.8.7/full.html # #buildbot.steps.source.buildbot.steps.transfer.DirectoryUpload> # but it's less work to fix the perms from here than to check all of # them steps.append( MasterShellCommand( description=["doc", "read", "permissions"], command=['chmod', '-R', 'a+r', WithProperties(master_doc_path)])) steps.append( MasterShellCommand( description=["doc", "dirx", "permissions"], command=['find', WithProperties(master_doc_path), '-type', 'd', '-exec', 'chmod', '755', '{}', ';'])) return steps