Ejemplo n.º 1
0
 def __init__(self, **kwargs):
     kwargs['slavesrc'] = 'epydoc.out'
     kwargs['masterdest'] = WithProperties('/var/www/TaskCoach-doc/%s',
                                           'buildername')
     kwargs['mode'] = 0755
     kwargs['compress'] = None
     DirectoryUpload.__init__(self, **kwargs)
    def _add_step_directory_upload(self, step):
        """
        Add step for directory upload to master.
        """
        name = step.get('name', 'Directory upload')

        optional = step.get('optional', False)
        always_run = step.get('always-run', False)
        force_name = 'force_' + name
        done_name = name
        if optional:
            done_name = "%s (prop:force_%s)" % (name, name)

        def do_step_if(step):
            if not optional:
                return True
            return step.build.getProperty(force_name)

        self.addStep(
            DirectoryUpload(
                name=done_name,
                slavesrc=step['source'],
                masterdest=step['destination'],
                haltOnFailure=True,
                doStepIf=do_step_if,
                alwaysRun=always_run,
            ))
Ejemplo n.º 3
0
 def finished(self, result):
     result = DirectoryUpload.finished(self, result)
     buildnumber = self.getProperty('buildnumber')
     coverage_index = os.path.expanduser(
         '/app/master/public_html/htmlcov-%d/index.html' % (buildnumber))
     with open(coverage_index) as coverage_file:
         m = self.report_re.search(coverage_file.read())
     self.descriptionDone = '%s test coverage' % m.group(1)
     self.step_status.setText(self.descriptionDone)
     coverage_url = '/htmlcov-%d/' % (buildnumber)
     self.addURL('full report', coverage_url)
     return result
Ejemplo n.º 4
0
def add_artifact_post_build_steps(job):
    job.add_step(
        DirectoryUpload(
            name='collect artifacts',
            slavesrc=Interpolate('%(prop:artifactsdir)s'),
            masterdest=Interpolate(
                join(BrainConfig['artifactsDir'], '%(prop:buildername)s',
                     '%(prop:buildnumber)s')),
            url=Interpolate(
                BuildmasterConfig['buildbotURL'] +
                'artifacts/%(prop:buildername)s/%(prop:buildnumber)s/')))

    job.add_step(PruneOldArtifacts())
Ejemplo n.º 5
0
def get_artifact_step():
    from buildbot.process.properties import Interpolate
    from buildbot.steps.transfer import DirectoryUpload
    from buildbot.steps.master import MasterShellCommand

    return [
        DirectoryUpload(
            description=['uploading'],
            descriptionSuffix=['artifact'],
            descriptionDone=['upload'],
            slavesrc=Interpolate('%(prop:workdir)s/install'),
            masterdest=Interpolate(
                '%(prop:buildername)s/tmp/%(prop:got_revision)s'),
            compress='gz',
            locks=[master_upload_lock.access('exclusive')],
            doStepIf=lambda step:
            (step.build.getProperty('artifact') and
             ('external' not in step.build.getProperty('trigger'))),
            hideStepIf=skipped_or_success),
        MasterShellCommand(
            name='artifact',
            description=['creating artifact'],
            descriptionDone=['create artifact'],
            doStepIf=lambda step:
            (step.build.getProperty('artifact') and
             ('external' not in step.build.getProperty('trigger'))),
            hideStepIf=skipped_or_success,
            command=Interpolate(
                'mkdir -p artifacts/by-revision/%(prop:got_revision)s && '
                'mkdir -p artifacts/by-branch/%(src::branch:-master)s && '
                'cd %(prop:buildername)s/tmp && '
                '%(prop:archive_command)s %(prop:buildername)s%(prop:artifact_extension)s %(prop:got_revision)s/ && '
                'mv %(prop:buildername)s%(prop:artifact_extension)s ../../artifacts/by-revision/%(prop:got_revision)s && '
                'ln -f ../../artifacts/by-revision/%(prop:got_revision)s/%(prop:buildername)s%(prop:artifact_extension)s ../../artifacts/by-branch/%(src::branch:-master)s/%(prop:buildername)s%(prop:artifact_extension)s && '
                'chmod -R a+rX ../../artifacts/by-revision/%(prop:got_revision)s && '
                'chmod -R a+rX ../../artifacts/by-branch/%(src::branch:-master)s'
            )),
        MasterShellCommand(
            name='clean master',
            description=['cleaning master'],
            descriptionDone=['clean master'],
            alwaysRun=True,
            hideStepIf=skipped_or_success,
            command=Interpolate('rm -rf "%(prop:buildername)s/tmp"'))
    ]
Ejemplo n.º 6
0
def makeOmnibusFactory(distribution):
    factory = getFlockerFactory(python="python2.7")
    factory.addSteps(installDependencies())
    factory.addSteps(check_version())
    factory.addStep(
        ShellCommand(name='build-sdist',
                     description=["building", "sdist"],
                     descriptionDone=["build", "sdist"],
                     command=[
                         virtualenvBinary('python'),
                         "setup.py",
                         "sdist",
                     ],
                     haltOnFailure=True))
    factory.addStep(
        ShellCommand(command=[
            virtualenvBinary('python'),
            'admin/build-package',
            '--destination-path',
            'repo',
            '--distribution',
            distribution,
            Interpolate('/flocker/dist/Flocker-%(prop:version)s.tar.gz'),
        ],
                     name='build-package',
                     description=['building', 'package'],
                     descriptionDone=['build', 'package'],
                     haltOnFailure=True))

    repository_path = resultPath('omnibus', discriminator=distribution)

    factory.addStep(
        DirectoryUpload(
            'repo',
            repository_path,
            url=resultURL('omnibus', discriminator=distribution),
            name="upload-repo",
        ))
    factory.addSteps(createRepository(distribution, repository_path))

    return factory
Ejemplo n.º 7
0
def generate_installer(platform):
    script_src = str(platform.current_dir / 'build' / "tools" / "niminst" /
                     'EnvVarUpdate.nsh')
    script_dst = str(platform.current_dir / 'build' / "build")

    dlls_src = str(platform.current_dir / ".." / "dlls")
    dlls_dst = str(platform.current_dir / "build" / "bin")

    upload_src = str(platform.current_dir / 'build' / 'build')
    upload_dst = "installer-data/{buildername[0]}/{got_revision[0][nim]}/"
    upload_url = 'public_html/' + upload_dst

    return [
        ShellCommand(command=['copy', '/Y', script_src, script_dst],
                     env=platform.base_env,
                     workdir=str(platform.current_dir),
                     haltOnFailure=True,
                     **gen_description('Copy', 'Copying', 'Copied',
                                       'NSIS Installer Script')),
        Robocopy(source=dlls_src,
                 destination=dlls_dst,
                 env=platform.base_env,
                 workdir=str(platform.current_dir),
                 haltOnFailure=True,
                 decodeRC={i: SUCCESS
                           for i in range(0, 8)},
                 **gen_description('Copy', 'Copying', 'Copied',
                                   'Installer DLL\'s')),
        ShellCommand(command=['koch', 'nsis', '-d:release'],
                     workdir=str(platform.nim_dir),
                     env=platform.base_env,
                     haltOnFailure=True,
                     **gen_description('Generate', 'Generating', 'Generated',
                                       'NSIS Installer')),
        DirectoryUpload(slavesrc=upload_src,
                        masterdest=upload_dst,
                        workdir=str(platform.current_dir),
                        url=FormatInterpolate(upload_url),
                        compress='bz2')
    ]
Ejemplo n.º 8
0
def _make_factory_step_generator(project_name, project_git_uri, make_command=None, workdir="/srv/buildbot"):
    make_factory_steps = [
            Git(
                name = "Executing %s content fetch" % project_name,
                repourl=project_git_uri,
                mode='incremental'
                ),
            ShellCommand(
                name = "Executing %s: 'make %s'" % ( project_name, make_command ),
                command = [
                    "make",
                    make_command
                    ]
                ),
            DirectoryUpload(
                slavesrc="build",
                masterdest=Interpolate(
                    "/srv/output/%(kw:project_name)s/%(src::branch)s", 
                    ) 
                )
            ]
    return make_factory_steps
Ejemplo n.º 9
0
    # Count the number of commits on the current branch.
    SetPropertyFromCommand(
        "divergence",
        command=[
            "git", "rev-list", "--count",
            Interpolate("%(prop:merge-base)s..%(prop:got_revision)s")
        ],
        haltOnFailure=True,
        doStepIf=is_branched),
]

# Steps to publish the rtdist.
publish_rtdist_steps = [
    # Upload the stage directory.
    DirectoryUpload(workersrc="built/stage",
                    masterdest=rtdist_staging_dir,
                    haltOnFailure=True),

    # Run pmerge.
    MasterShellCommand(name="pmerge",
                       command=[
                           config.pmerge_bin, "-i", config.runtime_dir,
                           rtdist_staging_dir
                       ])
]


def MakeTorrent(filename, **kwargs):
    "Pseudo-class.  This build step creates a torrent on the master."

    return MasterShellCommand(command=[
Ejemplo n.º 10
0
    name="compile",
    command=["ghc", "--make", "-threaded", "site.hs"],
    description=["compiling", "site", "builder"],
    descriptionDone=["compile", "site", "builder"]
))
f.addStep(Compile(
    name="build-site",
    command=["./site", "rebuild"],
    description=["building", "site"],
    descriptionDone=["build", "site"]
))

f.addStep(DirectoryUpload(
    name="deploy",
    slavesrc="_site",
    masterdest="/var/www/forkk.net",
    description=["deploying", "site"],
    descriptionDone=["deploy", "site"]
))


c['builders'].append(
    BuilderConfig(name="master", slavenames=["forkknet"], factory=f))


####### STATUS TARGETS

# 'status' is a list of Status Targets. The results of each build will be
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
Ejemplo n.º 11
0
def ros_docbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs = None):

    # Directory which will be bind-mounted
    binddir = '/tmp/'+job_name+'_'+rosdistro+'_docbuild'

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(
        ShellCommand(
            command = ['rm', '-rf', binddir],
            hideStepIf = success
        )
    )
    # Check out repository (to /tmp)
    f.addStep(
        Git(
            repourl = url,
            branch = branch,
            alwaysUseLatest = True,
            mode = 'full',
            workdir = binddir+'/src/'+job_name+'/'
        )
    )
    # Download testbuild.py script from master
    f.addStep(
        FileDownload(
            name = job_name+'-grab-script',
            mastersrc = 'scripts/docbuild.py',
            slavedest = Interpolate('%(prop:workdir)s/docbuild.py'),
            hideStepIf = success
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Build docs in a cowbuilder
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = job_name+'-docbuild',
            command = ['cowbuilder', '--execute', Interpolate('%(prop:workdir)s/docbuild.py'),
                       '--distribution', distro, '--architecture', arch,
                       '--bindmounts', binddir,
                       '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow',
                       '--override-config', '--othermirror', othermirror,
                       '--', binddir, rosdistro],
            env = {'DIST': distro},
            descriptionDone = ['built docs', ]
        )
    )
    # Upload docs to master
    f.addStep(
        DirectoryUpload(
            name = job_name+'-upload',
            slavesrc = binddir+'/docs',
            masterdest = 'docs/' + rosdistro,
            hideStepIf = success
        )
    )
    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(
                schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-doctrigger' for t in trigger_pkgs],
                waitForFinish = False,
                alwaysRun=True
            )
        )
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(
            name = job_name.replace('_','-')+'-'+rosdistro+'-doctrigger',
            builderNames = [job_name+'_'+rosdistro+'_docbuild',]
        )
    )
    # Add builder config
    c['builders'].append(
        BuilderConfig(
            name = job_name+'_'+rosdistro+'_docbuild',
            slavenames = machines,
            factory = f
        )
    )
    # return the name of the job created
    return job_name+'_'+rosdistro+'_docbuild'
Ejemplo n.º 12
0
def createPackageBuildFactory():
    """ Generates a build factory for a lustre tarball builder.
    Returns:
        BuildFactory: Build factory with steps for a lustre tarball builder.
    """
    bf = util.BuildFactory()

    # download our tarball and extract it
    bf.addStep(FileDownload(
        workdir="build/lustre",
        slavedest=util.Interpolate("%(prop:tarball)s"),
        mastersrc=tarballMasterDest))

    bf.addStep(ShellCommand(
        workdir="build/lustre",
        command=["tar", "-xvzf", util.Interpolate("%(prop:tarball)s"), "--strip-components=1"],
        haltOnFailure=True,
        logEnviron=False,
        lazylogfiles=True,
        description=["extracting tarball"],
        descriptionDone=["extract tarball"]))

    # update dependencies
    bf.addStep(ShellCommand(
        command=dependencyCommand,
        decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED },
        haltOnFailure=True,
        logEnviron=False,
        doStepIf=do_step_installdeps,
        hideStepIf=hide_if_skipped,
        description=["installing dependencies"],
        descriptionDone=["installed dependencies"]))

    # build spl and zfs if necessary
    bf.addStep(ShellCommand(
        command=buildzfsCommand,
        decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED },
        haltOnFailure=True,
        logEnviron=False,
        doStepIf=do_step_zfs,
        hideStepIf=hide_if_skipped,
        description=["building spl and zfs"],
        descriptionDone=["built spl and zfs"]))

    # Build Lustre 
    bf.addStep(ShellCommand(
        workdir="build/lustre",
        command=configureCmd,
        haltOnFailure=True,
        logEnviron=False,
        hideStepIf=hide_if_skipped,
        lazylogfiles=True,
        description=["configuring lustre"],
        descriptionDone=["configure lustre"]))

    bf.addStep(ShellCommand(
        workdir="build/lustre",
        command=makeCmd,
        haltOnFailure=True,
        logEnviron=False,
        hideStepIf=hide_if_skipped,
        lazylogfiles=True,
        description=["making lustre"],
        descriptionDone=["make lustre"]))

    # Build Products
    bf.addStep(ShellCommand(
        workdir="build/lustre",
        command=collectProductsCmd,
        haltOnFailure=True,
        logEnviron=False,
        doStepIf=do_step_collectpacks,
        hideStepIf=hide_if_skipped,
        lazylogfiles=True,
        description=["collect deliverables"],
        descriptionDone=["collected deliverables"]))

    # Build repo
    bf.addStep(ShellCommand(
        workdir="build/lustre/deliverables",
        command=buildRepoCmd,
        haltOnFailure=True,
        logEnviron=False,
        doStepIf=do_step_buildrepo,
        hideStepIf=hide_if_skipped,
        lazylogfiles=True,
        description=["building repo"],
        descriptionDone=["build repo"]))

    # Upload repo to master
    bf.addStep(DirectoryUpload(
        workdir="build/lustre",
        doStepIf=do_step_collectpacks,
        hideStepIf=hide_if_skipped,
        slavesrc="deliverables",
        masterdest=repoMasterDest,
        url=repoUrl))

    # Cleanup
    bf.addStep(ShellCommand(
        workdir="build",
        command=["sh", "-c", "rm -rvf ./* /tmp/rpmbuild-*"],
        haltOnFailure=True,
        logEnviron=False,
        lazylogfiles=True,
        alwaysRun=True,
        description=["cleaning up"],
        descriptionDone=["clean up"]))

    return bf
def install_modules_nose(configurator, options, buildout_slave_path,
                         environ=()):
    """Install addons, run nose tests, upload result.

    Warning: this works only for addons that use the trick in main
    __init__ that avoids executing the models definition twice.

    Available manifest file options:

      :openerp-addons: comma-separated list of addons to test
      :install-as-upgrade: use the upgrade script to install the project

        If this is False, the step will simply issue a start_<PART> -i on
        openerp-addons

      :upgrade.script: name of the upgrade script (defaults to
        ``bin/upgrade_<PART>``)
      :nose.tests: goes directly to command line; list directories to find
        tests here.
      :nose.coverage: boolean, if true, will run coverage for the listed
        addons
      :nose.cover-options: additional options for nosetests invocation
      :nose.upload-path: path on master to upload files produced by nose
      :nose.upload-url: URL to present files produced by nose in waterfall

    In upload-path and upload-url, one may use properties as in the
    steps definitions, with $ instead of %, to avoid ConfigParser interpret
    them.
    """

    environ = dict(environ)

    steps = []

    steps.append(ShellCommand(command=['rm', '-f', 'install.log'],
                              name="Log cleanup",
                              descriptionDone=['Cleaned', 'logs'],
                              ))
    addons = comma_list_sanitize(options.get('openerp-addons', ''))

    buildout_part = options.get('buildout-part', DEFAULT_BUILDOUT_PART)
    if options.get('install-as-upgrade', 'false').lower().strip() == 'true':
        install_cmd = [
            options.get('upgrade.script',
                        'bin/upgrade_' + buildout_part).strip(),
            '--init-load-demo-data',
            '--log-file', 'install.log']
    else:
        # openerp --logfile does not work with relative paths !
        install_cmd = [
            options.get('start-command', 'bin/start_' + buildout_part),
            '--stop-after-init', '-i',
            addons if addons else 'all',
            WithProperties(
                '--logfile=%(workdir)s/build/install.log')]

    steps.append(ShellCommand(
        command=install_cmd,
        name='install',
        description='install modules',
        descriptionDone='installed modules',
        logfiles=dict(log='install.log'),
        haltOnFailure=True,
        env=environ,
    ))

    steps.append(ShellCommand(
        command=["python", "analyze_oerp_tests.py", "install.log"],
        name='check',
        description="check install log",
        descriptionDone="checked install log",
    ))

    addons = addons.split(',')
    nose_output_dir = 'nose_output'
    nose_cmd = ["bin/nosetests", "-v"]
    nose_cmd.extend(options.get('nose.tests', '').split())
    upload = False

    if bool_opt(options, 'nose.coverage'):
        upload = True
        nose_cmd.append('--with-coverage')
        nose_cmd.append('--cover-html')
        nose_cmd.append('--cover-html-dir=%s' % os.path.join(
            nose_output_dir, 'coverage'))
        nose_cmd.extend(options.get(
            'nose.cover-options',
            '--cover-erase --cover-branches').split())

        for addon in addons:
            nose_cmd.extend(('--cover-package', addon))

    if bool_opt(options, 'nose.profile'):
        upload = True
        nose_cmd.extend(('--with-profile',
                         '--profile-stats-file',
                         os.path.join(nose_output_dir, 'profile.stats')))

        # sadly, restrict if always interpreted by nose as a string
        # it can't be used to limit the number of displayed lines
        # putting a default value here would make no sense.
        restrict = options.get('nose.profile-restrict')
        if restrict:
            nose_cmd.extend(('--profile-restrict', restrict))

    if bool_opt(options, 'nose.cprofile'):
        upload = True
        nose_cmd.extend(('--with-cprofile', '--cprofile-stats-erase',
                         '--cprofile-stats-file',
                         os.path.join(nose_output_dir, 'cprofile.stats')))

    if upload:
        steps.append(ShellCommand(command=['mkdir', '-p', nose_output_dir],
                                  name='mkdir',
                                  description='prepare nose output',
                                  haltOnFailure=True,
                                  env=environ))

    steps.append(ShellCommand(
        command=nose_cmd,
        name='tests',
        description="nose tests",
        haltOnFailure=True,
        env=environ,
        timeout=3600 * 4,
    ))

    if upload:
        upload_path = options.get('nose.upload-path', '').replace('$', '%')
        upload_url = options.get('nose.upload-url', '').replace('$', '%')
        steps.append(DirectoryUpload(slavesrc=nose_output_dir,
                                     haltOnFailure=True,
                                     compress='gz',
                                     masterdest=WithProperties(upload_path),
                                     url=WithProperties(upload_url)))

        # Fixing perms on uploaded files. Yes we could have unmask = 022 in
        # all slaves, see note at the end of
        # http://buildbot.net/buildbot/docs/0.8.7/full.html#
        #     buildbot.steps.source.buildbot.steps.transfer.DirectoryUpload
        # but it's less work to fix the perms from here than to check all of
        # them
        steps.append(MasterShellCommand(
            description=["nose", "output", "read", "permissions"],
            command=['chmod', '-R', 'a+r',
                     WithProperties(upload_path)]))
        steps.append(MasterShellCommand(
            description=["nose", "output", "dirx", "permissions"],
            command=['find', WithProperties(upload_path),
                     '-type', 'd', '-exec',
                     'chmod', '755', '{}', ';']))
    return steps
Ejemplo n.º 14
0
    def start(self):
        DirectoryUpload.start(self)

        self.addURL('Documentation',
                    'http://www.fraca7.net/TaskCoach-doc/%s/index.html' % (self.getProperty('buildername')))
Ejemplo n.º 15
0
 def __init__(self, **kwargs):
     kwargs['slavesrc'] = 'epydoc.out'
     kwargs['masterdest'] = WithProperties('/var/www/TaskCoach-doc/%s', 'buildername')
     DirectoryUpload.__init__(self, **kwargs)
Ejemplo n.º 16
0
 def __init__(self, **kwargs):
     kwargs['slavesrc'] = 'tests/coverage.out'
     kwargs['masterdest'] = WithProperties('/var/www/TaskCoach-coverage/%s', 'buildername')
     DirectoryUpload.__init__(self, **kwargs)
Ejemplo n.º 17
0
def makeInternalDocsFactory():
    factory = getFlockerFactory(python="python2.7")
    factory.addSteps(installDependencies())
    factory.addSteps(check_version())
    factory.addStep(
        sphinxBuild("spelling",
                    "build/docs",
                    logfiles={'errors': '_build/spelling/output.txt'},
                    haltOnFailure=False))
    factory.addStep(
        sphinxBuild(
            "linkcheck",
            "build/docs",
            logfiles={'errors': '_build/linkcheck/output.txt'},
            haltOnFailure=False,
            flunkOnWarnings=False,
            flunkOnFailure=False,
            warnOnFailure=True,
        ))
    factory.addStep(sphinxBuild("html", "build/docs"))
    factory.addStep(
        DirectoryUpload(
            b"docs/_build/html",
            resultPath('docs'),
            url=resultURL('docs'),
            name="upload-html",
        ))
    factory.addStep(
        MasterShellCommand(
            name='link-release-documentation',
            description=["linking", "release", "documentation"],
            descriptionDone=["link", "release", "documentation"],
            command=[
                "ln",
                '-nsf',
                resultPath('docs'),
                'doc-dev',
            ],
            doStepIf=isMasterBranch('flocker'),
        ))
    factory.addStep(
        MasterShellCommand(
            name='upload-release-documentation',
            description=["uploading", "release", "documentation"],
            descriptionDone=["upload", "release", "documentation"],
            command=[
                "s3cmd",
                "sync",
                '--verbose',
                '--delete-removed',
                '--no-preserve',
                # s3cmd needs a trailing slash.
                Interpolate("%(kw:path)s/", path=resultPath('docs')),
                Interpolate(
                    "s3://%(kw:bucket)s/%(prop:version)s/",
                    bucket='clusterhq-dev-docs',
                ),
            ],
            doStepIf=isReleaseBranch('flocker'),
        ))
    return factory
def sphinx_doc(configurator, options,
               buildout_slave_path, environ=()):
    """Adds sphinx doc to the build.

    For more information, especially about api/autodoc with OpenERP, see
    http://anybox.fr/blog/sphinx-autodoc-et-modules-openerp (in French, sorry).

    Available manifest file options:

       :doc.upload-dir: subdirectory of buildmaster's main doc
          directory (see ``doc.upload_root`` below) to put that
          documentation in. If missing, no upload shall be done
       :doc.upload-root: base directory on buildmaster's host relative to
           which  ``doc.upload_dir`` is evaluated. It is typically set
           globally by a ``[DEFAULT]`` section (hence the separation, and the
           fact that its presence alone does not trigger the upload)
       :doc.version: defaults to a property-based string that uses the
                     ``buildout-tag`` property and defaults itself to
                     ``'current'`` if that property is missing.
                     The resulting string gets used as a sub directory of
                     ``upload-dir``, and can use properties in the same way as
                     ``:class:`WithProperties` does,
                     albeit with ``$`` instead of ``%``
                     (in order not to confuse :mod:`ConfigParser`,
                     that's used to parse the manifest file)
       :doc.base-url: doc base URL (example: http://docs.anybox.eu)
       :doc.sphinx-sourcedir: if specified, then the build will use the
                              standard buildbot Sphinx step with the value as
                              ``sourcedir``. Otherwise,
                              it will issue a simple ``bin/sphinx``, which is
                              what collective.recipe.sphinxbuilder provides
                              (encapsulation with no need of specifying
                              source/build dirs)
       :doc.sphinx-builddir: *only if* doc.sourcedir is specified: Sphinx build
                             directory, defaults to ``${doc.sourcedir}/_build``
       :doc.sphinx-bin: *only if* doc.sourcedir is specified: Sphinx
                        executable, relative to buildout directory; defaults
                        to ``bin/sphinx-build``.
       :doc.sphinx-mode: (optional) String, one of ``'full'`` or
                         ``'incremental'`` (the default). If set to
                         ``'full'``, indicates to Sphinx to rebuild
                         everything without re-using the previous build
                         results.
    """
    steps = []
    sphinx_sourcedir = options.get('doc.sphinx-sourcedir')
    if sphinx_sourcedir is None:
        steps.append(ShellCommand(command=['sh', 'bin/sphinx'],
                                  description=['build', 'doc'],
                                  env=environ))
        html_builddir = 'doc/_build/html'
    else:
        sphinx_builddir = options.get('doc.sphinx-builddir',
                                      os.path.join(sphinx_sourcedir, '_build'))
        # TODO GR, might want to change that for non-html builds
        html_builddir = sphinx_builddir
        sphinx_mode = options.get('doc.sphinx-mode', 'incremental')
        sphinx_bin = options.get('doc.sphinx-bin', 'bin/sphinx-build')
        steps.append(Sphinx(sphinx_builddir=sphinx_builddir,
                            sphinx_sourcedir=sphinx_sourcedir,
                            sphinx=sphinx_bin,
                            mode=sphinx_mode,
                            description=['Sphinx'],
                            name='sphinx',
                            env=environ,
                            haltOnFailure=False))

    base_dir = options.get('doc.upload-root', '')
    upload_dir = options.get('doc.upload-dir', '')
    base_url = options.get('doc.base-url')
    version = options.get(
        'doc.version', '$(buildout-tag:-current)s').replace('$', '%')
    if upload_dir:
        sub_path = '/'.join((upload_dir.rstrip('/'), version))
        waterfall_url = '/'.join((base_url, sub_path)) if base_url else None
        upload_dir = upload_dir.rstrip('/')
        master_doc_path = '/'.join((base_dir, sub_path))
        steps.append(
            DirectoryUpload(
                slavesrc=html_builddir,
                haltOnFailure=True,
                compress='gz',
                masterdest=WithProperties(master_doc_path),
                url=WithProperties(waterfall_url) if waterfall_url else None))

        # Fixing perms on uploaded files. Yes we could have unmask = 022 in
        # all slaves,
        # see note at the end of
        #  <http://buildbot.net/buildbot/docs/0.8.7/full.html
        #   #buildbot.steps.source.buildbot.steps.transfer.DirectoryUpload>
        # but it's less work to fix the perms from here than to check all of
        # them
        steps.append(
            MasterShellCommand(
                description=["doc", "read", "permissions"],
                command=['chmod', '-R', 'a+r',
                         WithProperties(master_doc_path)]))
        steps.append(
            MasterShellCommand(
                description=["doc", "dirx", "permissions"],
                command=['find', WithProperties(master_doc_path),
                         '-type', 'd', '-exec',
                         'chmod', '755', '{}', ';']))

    return steps
Ejemplo n.º 19
0
def add_steps(factory,
              env={},
              clean=False,
              upload_docs=False,
              upload_dist=False):
    log_path = "build/logs/osbuild.log"

    if clean:
        step = ShellCommand(command=["./osbuild", "clean", "--broot"],
                            description="cleaning",
                            descriptionDone="clean",
                            haltOnFailure=True,
                            logfiles={"log": log_path},
                            env=env)
        factory.addStep(step)

    factory.addStep(
        PullCommand(description="pulling",
                    descriptionDone="pull",
                    haltOnFailure=True,
                    logfiles={"log": log_path},
                    env=env))

    factory.addStep(
        ShellCommand(command=["./osbuild", "build"],
                     description="building",
                     descriptionDone="build",
                     haltOnFailure=True,
                     logfiles={"log": log_path},
                     env=env))

    logfiles = {
        "log": log_path,
        "smoketest": "build/logs/check-smoketest.log",
        "modules": "build/logs/check-modules.log"
    }

    factory.addStep(
        ShellCommand(command=["./osbuild", "check"],
                     description="checking",
                     descriptionDone="check",
                     haltOnFailure=True,
                     logfiles=logfiles,
                     env=env))

    factory.addStep(
        ShellCommand(command=["./osbuild", "docs"],
                     description="docs",
                     descriptionDone="docs",
                     haltOnFailure=True,
                     logfiles={"log": log_path},
                     env=env))

    if upload_docs:
        docs_url = "http://developer.sugarlabs.org/"
        factory.addStep(
            DirectoryUpload(slavesrc="build/out/docs",
                            masterdest="~/public_html/docs",
                            url=docs_url))

    factory.addStep(
        ShellCommand(command=["./osbuild", "dist"],
                     description="distribution",
                     descriptionDone="distribution",
                     haltOnFailure=True,
                     logfiles={"log": log_path},
                     env=env))

    if upload_dist:
        dist_dir = "~/dist"
        downloads_dir = "/srv/www-sugarlabs/download/sources/sucrose/glucose"

        factory.addStep(
            DirectoryUpload(slavesrc="build/out/dist", masterdest=dist_dir))

        command = "%s %s %s" % (get_command_path("release-dist"), dist_dir,
                                downloads_dir)

        factory.addStep(
            MasterShellCommand(command=command,
                               description="releasing",
                               descriptionDone="release"))

    return factory