Example #1
0
    def __init__(self,
                 isOptional=False,
                 patchDir=".",
                 workdir="build",
                 **kwargs):
        """arguments:
        @type  patchDir:    string
        @param patchDir:    The directory on the master that holds the patches
                            This directory is relative to the base buildmaster
                            directory.
                            ie. /home/buildmaster/project
                            Defaults to '.'
        'workdir' is assumed to be 'build' and should be passed if it is
        anything else.
        'isOptional' is assumed to be False; if the patch is optional, pass True.
        """
        FileDownload.__init__(self,
                              mastersrc=".",
                              slavedest=".",
                              workdir=workdir,
                              **kwargs)
        self.addFactoryArguments(isOptional=isOptional, patchDir=patchDir)

        self.isOptional = isOptional
        self.patchDir = patchDir
        self.super_class = FileDownload
Example #2
0
 def __init__(self, build_dir, basename, extension, prop_name, **kwargs):
     FileDownload.__init__(self, **kwargs)
     self.addFactoryArguments(build_dir=build_dir,
                              basename=prefix,
                              extension=extension,
                              prop_name=prop_name)
     self.build_dir = build_dir
     self.basename = prefix
     self.extension = extension
     self.prop_name = prop_name
Example #3
0
 def __init__(self, mastersrc=None, patchDir=".", **kwargs):
     """arguments:
     @type  patchDir:   string
     @param patchDir:   The directory on the master that holds the mozconfig
                         This directory is relative to the base buildmaster
                         directory.
                         ie. /home/buildmaster/project
                         Defaults to '.'
     """
     self.workdir = "mozilla/"
     kwargs['workdir'] = "mozilla/"
     self.patchDir = patchDir
     # masterscr and slavedest get overridden in start()
     FileDownload.__init__(self, mastersrc=mastersrc, slavedest=".mozconfig",
                           **kwargs)
    def start(self):
        changes = self.step_status.build.getChanges()
        args = parseSendchangeArguments(changes[0].files)

        # if we were passed in a mozconfig and also have an uploaded one
        # they need to be combined, with the uploaded one overwriting any
        # settings set by the passed in one
        try:
            uploadedFile = path.join(self.patchDir, args['mozconfig'])
            os.stat(uploadedFile)
            oldMasterSrc = self.mastersrc
            self.mastersrc = uploadedFile
            try:
                os.stat(oldMasterSrc)
                # we have both a passed in and uploaded mozconfig
                self.mastersrc = "%s-%s" % (uploadedFile,
                                            self.getProperty("slavename"))

                # read in both configs
                initialConfig = open(oldMasterSrc)
                newConfig = initialConfig.read()
                initialConfig.close()
                uploadedConfig = open(uploadedFile)
                newConfig += "\n"
                newConfig += uploadedConfig.read()
                uploadedConfig.close()

                # now write out the whole new thing
                mozconfig = open(self.mastersrc, "w")
                mozconfig.write(newConfig)
                mozconfig.close()
            except (OSError, TypeError, KeyError):
                # no passed in mozconfig, mastersrc set above
                try:
                    os.stat(self.mastersrc)
                except (OSError, TypeError, KeyError):
                    return SKIPPED
        except (OSError, TypeError, KeyError):
            # no uploaded mozconfig
            try:
                os.stat(self.mastersrc)
                # if this succeeds, the passed in mastersrc is valid
            except (OSError, TypeError, KeyError):
                # nothing to transfer, skip
                return SKIPPED

        # everything is set up, download the file
        FileDownload.start(self)
Example #5
0
    def start(self):
        changes = self.step_status.build.getChanges()
        args = parseSendchangeArguments(changes[0].files)

        # if we were passed in a mozconfig and also have an uploaded one
        # they need to be combined, with the uploaded one overwriting any
        # settings set by the passed in one
        try:
            uploadedFile = path.join(self.patchDir, args['mozconfig'])
            os.stat(uploadedFile)
            oldMasterSrc = self.mastersrc
            self.mastersrc = uploadedFile
            try:
                os.stat(oldMasterSrc)
                # we have both a passed in and uploaded mozconfig
                self.mastersrc = "%s-%s" % (uploadedFile,
                                            self.getProperty("slavename"))

                # read in both configs
                initialConfig = open(oldMasterSrc)
                newConfig = initialConfig.read()
                initialConfig.close()
                uploadedConfig = open(uploadedFile)
                newConfig += "\n"
                newConfig += uploadedConfig.read()
                uploadedConfig.close()

                # now write out the whole new thing
                mozconfig = open(self.mastersrc, "w")
                mozconfig.write(newConfig)
                mozconfig.close()
            except (OSError, TypeError, KeyError):
                # no passed in mozconfig, mastersrc set above
                try:
                    os.stat(self.mastersrc)
                except (OSError, TypeError, KeyError):
                    return SKIPPED
        except (OSError, TypeError, KeyError):
            # no uploaded mozconfig
            try:
                os.stat(self.mastersrc)
                # if this succeeds, the passed in mastersrc is valid
            except (OSError, TypeError, KeyError):
                # nothing to transfer, skip
                return SKIPPED

        # everything is set up, download the file
        FileDownload.start(self)
Example #6
0
    def start(self):
        changes = self.step_status.build.getChanges()

        if len(changes) < 1:
            return SKIPPED

        args = parseSendchangeArguments(changes[0].files)

        if not 'infoFile' in args and self.isOptional:
            return SKIPPED

        self.mastersrc = "%s/%s" % (self.patchDir, args['infoFile'])
        self.slavedest = "%s" % (args['infoFile'])

        # now that everything is set-up, download the file
        FileDownload.start(self)
Example #7
0
    def upload_benchmarker(self):
        self.addStep(FileDownload(Interpolate('%s/benchmarker.tar.gz' % MASTERWORKDIR), 'benchmarker.tar.gz', workdir='.'))

        self.addStep(ShellCommand(name='md5', command=['md5sum', 'benchmarker.tar.gz'], workdir='.'))
        self.addStep(ShellCommand(name='unpack_benchmarker', command=['tar', 'xf', 'benchmarker.tar.gz'], workdir='.'))
        self.addStep(ShellCommand(name='debug2', command=['ls', '-lha', 'benchmarker'], workdir='.'))
        self.addStep(MasterShellCommand(name="cleanup", command=['rm', '-rf', Interpolate(MASTERWORKDIR)]))
Example #8
0
    def start(self):
        recent_build = None
        recent_mtime = 0

        for entry in os.listdir(self.build_dir):
            full_path = os.path.abspath(os.path.join(self.build_dir, entry))

            if (os.path.isfile(full_path) and
                entry.startswith(self.basename) and
                entry.endswith("." + self.extension)):

                time = os.path.getmtime(full_path)

                if time > recent_mtime:
                    recent_mtime = time
                    recent_build = full_path

        if recent_build:
            self.setProperty(self.prop_name, recent_build,
                             "DownloadLatestBuild")
            self.mastersrc = recent_build
            self.slavedest = os.path.join(os.path.abspath(self.slavedest),
                                          os.path.dirname(recent_build))
            return FileDownload.start(self)

        self.step_status.setColor("red")
        self.step_status.setText("build not found")
        self.finished(FAILURE)
Example #9
0
def hg_tag_buildout(self, options, cfg_tokens, manifest_dir):
    """Steps to retrieve the buildout dir as a Mercurial tag.

    Useful for release/packaging oriented builds.
    The tag name is read from build properties.
    The clone is made outside of the main build/ directory, that must
    stay pristine to test the produced packages.

    See module docstring for signature and return values.
    """

    if len(cfg_tokens) != 2:
        raise ValueError("Wrong hgtag buildout specification: %r" % cfg_tokens)

    url, conf_path = cfg_tokens
    tag = Property('buildout-tag')
    return conf_path, (FileDownload(mastersrc=os.path.join(
        BUILD_UTILS_PATH, 'buildout_hg_dl.py'),
                                    slavedest='buildout_hg_dl.py',
                                    workdir='src',
                                    haltOnFailure=True),
                       ShellCommand(
                           command=[
                               'python', 'buildout_hg_dl.py', '-t', 'tag', url,
                               tag
                           ],
                           workdir='./src',
                           description=(
                               "Retrieve buildout",
                               "tag",
                               tag,
                               "from hg",
                           ),
                           haltOnFailure=True,
                       ))
Example #10
0
 def __init__(self, mastersrc=None, patchDir=".", workdir="mozilla", slavedest=".mozconfig",
              **kwargs):
     """arguments:
     @type  patchDir:   string
     @param patchDir:   The directory on the master that holds the mozconfig
                         This directory is relative to the base buildmaster
                         directory.
                         ie. /home/buildmaster/project
                         Defaults to '.'
     """
     # mastersrc and slavedest get overridden in start()
     FileDownload.__init__(self, mastersrc=mastersrc,
                           workdir=workdir, slavedest=slavedest, **kwargs)
     self.addFactoryArguments(patchDir=patchDir)
     self.patchDir = patchDir
     self.super_class = FileDownload
Example #11
0
def hg_buildout(self, options, cfg_tokens, manifest_dir):
    """Steps to retrieve the buildout using Mercurial.

    See module docstring for signature and return values.
    manifest_dir is not used in this downloader.
    """
    if len(cfg_tokens) != 3:
        raise ValueError("Wrong standalong buildout specification: %r" %
                         cfg_tokens)

    url, branch, conf_path = cfg_tokens
    return conf_path, (FileDownload(mastersrc=os.path.join(
        BUILD_UTILS_PATH, 'buildout_hg_dl.py'),
                                    slavedest='buildout_hg_dl.py',
                                    haltOnFailure=True),
                       ShellCommand(
                           command=[
                               'python', 'buildout_hg_dl.py', url, branch
                           ],
                           description=(
                               "Retrieve buildout",
                               "from hg",
                           ),
                           haltOnFailure=True,
                       ))
Example #12
0
def mkdocsfactory():
    f = factory.BuildFactory()
    f.addSteps([
        gitStep,
        FileDownload(mastersrc="virtualenv.py",
                     slavedest="virtualenv.py",
                     flunkOnFailure=True),

        # run docs tools in their own virtualenv, otherwise we end up documenting
        # the version of Buildbot running the metabuildbot!
        VirtualenvSetup(name='virtualenv setup',
                        virtualenv_packages=[
                            'sphinx==1.2.2', 'Pygments==2.0.1',
                            '--editable=master[docs]', '--editable=worker'
                        ],
                        virtualenv_dir='sandbox',
                        haltOnFailure=True),

        # manual
        ShellCommand(command=Interpolate(
            textwrap.dedent("""\
        export VERSION=latest &&
        . sandbox/bin/activate &&
        gmake docs
        """)),
                     name="create docs"),
    ])
    return f
def steps_odoo_port_reservation(configurator, options, environ=()):
    """Return steps for port reservation.

    The chosen port is stored in ``openerp_port`` property.

    Available manifest file options:

      :odoo.http-port-min: minimal value for the HTTP port (defaults to 6069)
      :odoo.http-port-max: maximal value for the HTTP port (defaults to 7069)
      :odoo.http-port-step: increment value for the HTTP port (defaults to 5)
    """

    return (
        FileDownload(
            mastersrc=os.path.join(BUILD_UTILS_PATH, 'port_reserve.py'),
            slavedest='port_reserve.py'),

        SetPropertyFromCommand(
            property='openerp_port',
            description=['Port', 'reservation'],
            locks=[port_lock.access('exclusive')],
            command=[
                'python', 'port_reserve.py',
                '--port-min=' + options.get('odoo.http-port-min', '6069'),
                '--port-max=' + options.get('odoo.http-port-max', '7068'),
                '--step=' + options.get('odoo.http-port-step', '5'),
            ])
    )
Example #14
0
def bzr_buildout(self, options, cfg_tokens, manifest_dir, subdir=None):
    """Steps to retrieve the buildout using Bazaar.

    See module docstring for signature and return values.
    manifest_dir is not used in this downloader.

    :param subdir: if not ``None``, then branch will be set aside and
                   the default workdir, 'build' will be set as a link
                   to the specified subdir in branch.
    """
    def conf_error(cfg_tokens):
        raise ValueError("Wrong bzr buildout specification: %r" % cfg_tokens)

    subdir = None
    if len(cfg_tokens) > 2:
        options = cfg_tokens[2:]
        for opt in options:
            split = opt.split('=')
            if split[0].strip() == 'subdir':
                subdir = split[1].strip()
            else:
                conf_error(cfg_tokens)

    if len(cfg_tokens) < 2:
        conf_error(cfg_tokens)

    url, conf_path = cfg_tokens[:2]
    steps = [
        FileDownload(mastersrc=os.path.join(BUILD_UTILS_PATH,
                                            'buildout_bzr_dl.py'),
                     slavedest='buildout_bzr_dl.py',
                     haltOnFailure=True),
    ]
    if subdir is None:
        steps.append(
            ShellCommand(
                command=['python', 'buildout_bzr_dl.py', url],
                description=(
                    "Retrieve buildout",
                    "from bzr",
                ),
                haltOnFailure=True,
            ))
    else:
        steps.append(
            ShellCommand(
                command=[
                    'python', 'build/buildout_bzr_dl.py', url, '--subdir',
                    subdir, '--subdir-target', 'build', '--force-remove-subdir'
                ],
                description=(
                    "Retrieve buildout",
                    "from bzr",
                ),
                haltOnFailure=True,
                workdir='.',
            ))

    return conf_path, steps
Example #15
0
 def __init__(self, mastersrc=None, patchDir=".", **kwargs):
     """arguments:
     @type  patchDir:   string
     @param patchDir:   The directory on the master that holds the mozconfig
                         This directory is relative to the base buildmaster
                         directory.
                         ie. /home/buildmaster/project
                         Defaults to '.'
     """
     self.workdir = "mozilla/"
     kwargs['workdir'] = "mozilla/"
     self.patchDir = patchDir
     # masterscr and slavedest get overridden in start()
     FileDownload.__init__(self,
                           mastersrc=mastersrc,
                           slavedest=".mozconfig",
                           **kwargs)
    def steps_unibootstrap(self,
                           buildout_slave_path,
                           options,
                           eggs_cache,
                           dump_options_to=None,
                           **step_kw):
        """return a list of steps for buildout bootstrap, using uniform script.

        The uniform script is ``unibootstrap.py``. For now it ships with
        build_utils and is downloaded from the buildmaster.

        options prefixed with 'bootstrap-' are applied

        :param dump_options_to: kept for backwards compatibility,
                                (unibootstrap will dump them in all cases).
        :param step_kw: will be passed to the step constructor. Known use-case:
                        change workdir in packaging step.
        """
        boot_opts = {}
        if options.get('virtualenv', 'true').strip().lower() == 'true':
            boot_opts['--python'] = Interpolate(
                '%(prop:cap_python_venv:-~/openerp-env)s'
                '/bin/python')

        bv = options.get('bootstrap-version')
        if bv is not None:
            boot_opts['--buildout-version'] = bv.strip()

        command = [
            Property('cap_python_bin',
                     default='python'), 'unibootstrap.py', '--dists-directory',
            WithProperties(eggs_cache), '--buildout-config',
            buildout_slave_path
        ]
        if dump_options_to is None:
            command.append('--no-output-bootstrap-config')
        else:
            boot_opts['--output-bootstrap-config'] = dump_options_to

        for o, v in boot_opts.items():
            command.extend((o, v))
        command.append('.')

        return [
            FileDownload(mastersrc=os.path.join(BUILD_UTILS_PATH,
                                                'unibootstrap.py'),
                         slavedest='unibootstrap.py',
                         name="download",
                         description=['download', 'unibootstrap'],
                         **step_kw),
            ShellCommand(command=command,
                         name='bootstrap',
                         description="bootstrapping",
                         descriptionDone="bootstrapped",
                         locks=[buildout_caches_lock.access('exclusive')],
                         haltOnFailure=True,
                         **step_kw)
        ]
Example #17
0
 def upload_sgen_grep_binprot(self):
     self.addStep(
         FileDownload(MONO_SGEN_GREP_BINPROT_FILENAME,
                      'sgen-grep-binprot',
                      workdir='benchmarker'))
     self.addStep(
         ShellCommand(name='chmod',
                      command=['chmod', 'a+x', 'sgen-grep-binprot'],
                      workdir='benchmarker'))
Example #18
0
    def __init__(self, isOptional=False, patchDir=".", **kwargs):
        """arguments:
        @type  patchDir:    string
        @param patchDir:    The directory on the master that holds the patches
                            This directory is relative to the base buildmaster
                            directory.
                            ie. /home/buildmaster/project
                            Defaults to '.'
        'workdir' is assumed to be 'build' and should be passed if it is
        anything else.
        'isOptional' is assumed to be False; if the patch is optional, pass True.
        """

        self.patchDir = patchDir
        self.isOptional = isOptional
        # mastersrc and slavedest get overridden in start()
        if not 'workdir' in kwargs:
            kwargs['workdir'] = "build"
        FileDownload.__init__(self, mastersrc=".", slavedest=".", **kwargs)
Example #19
0
    def __init__(self, isOptional=False, patchDir=".", workdir="build",
                 **kwargs):
        """arguments:
        @type  patchDir:    string
        @param patchDir:    The directory on the master that holds the patches
                            This directory is relative to the base buildmaster
                            directory.
                            ie. /home/buildmaster/project
                            Defaults to '.'
        'workdir' is assumed to be 'build' and should be passed if it is
        anything else.
        'isOptional' is assumed to be False; if the patch is optional, pass True.
        """
        FileDownload.__init__(self, mastersrc=".", slavedest=".", 
                              workdir=workdir, **kwargs)
        self.addFactoryArguments(isOptional=isOptional, patchDir=patchDir)

        self.isOptional = isOptional
        self.patchDir = patchDir
        self.super_class = FileDownload
Example #20
0
def ros_debbuild(c, job_name, packages, url, distro, arch, rosdistro, version, machines, othermirror, keys, trigger_pkgs = None):
    gbp_args = ['-uc', '-us', '--git-ignore-branch', '--git-ignore-new',
                '--git-verbose', '--git-dist='+distro, '--git-arch='+arch]
    f = BuildFactory()
    # Remove the build directory.
    f.addStep(
        RemoveDirectory(
            name = job_name+'-clean',
            dir = Interpolate('%(prop:workdir)s'),
            hideStepIf = success,
        )
    )
    # Check out the repository master branch, since releases are tagged and not branched
    f.addStep(
        Git(
            repourl = url,
            branch = 'master',
            alwaysUseLatest = True, # this avoids broken builds when schedulers send wrong tag/rev
            mode = 'full' # clean out old versions
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Need to build each package in order
    for package in packages:
        debian_pkg = 'ros-'+rosdistro+'-'+package.replace('_','-')  # debian package name (ros-groovy-foo)
        branch_name = 'debian/'+debian_pkg+'_%(prop:release_version)s_'+distro  # release branch from bloom
        deb_name = debian_pkg+'_%(prop:release_version)s'+distro
        final_name = debian_pkg+'_%(prop:release_version)s-%(prop:datestamp)s'+distro+'_'+arch+'.deb'
        # Check out the proper tag. Use --force to delete changes from previous deb stamping
        f.addStep(
            ShellCommand(
                haltOnFailure = True,
                name = package+'-checkout',
                command = ['git', 'checkout', Interpolate(branch_name), '--force'],
                hideStepIf = success
            )
        )
        # Download script for building the source deb
        f.addStep(
            FileDownload(
                name = job_name+'-grab-build-source-deb-script',
                mastersrc = 'scripts/build_source_deb.py',
                slavedest = Interpolate('%(prop:workdir)s/build_source_deb.py'),
                mode = 0755,
                hideStepIf = success
            )
        )
Example #21
0
    def upload_benchmarker(self):
        self.addStep(
            FileDownload(Interpolate('%s/benchmarker.tar.gz' % MASTERWORKDIR),
                         'benchmarker.tar.gz',
                         workdir='.'))

        self.addStep(
            ShellCommand(name='md5',
                         command=['md5sum', 'benchmarker.tar.gz'],
                         workdir='.'))
        self.addStep(
            ShellCommand(name='unpack_benchmarker',
                         command=['tar', 'xf', 'benchmarker.tar.gz'],
                         workdir='.'))
Example #22
0
    def __init__(self, **kwargs):
        if 'command' in kwargs:
            if 'mastersrc' in kwargs:
                raise ValueError,"Unexpected 'mastersrc' argument."
            if 'slavedest' in kwargs:
                raise ValueError,"Unexpected 'slavedest' argument."

            # This is the initial construction, create a temporary
            # batch file to run the command.
            import os
            import tempfile

            command = kwargs.pop('command')
            tf = tempfile.NamedTemporaryFile(delete=False)
            print >>tf, '@echo on'
            print >>tf, ' '.join('"%s"' % a for a in command)
            tf.close()

            remotename = kwargs.get('name', 'batched-command')
            kwargs['mastersrc'] = os.path.abspath(tf.name)
            kwargs['slavedest'] = '%s.bat' % remotename

        FileDownload.__init__(self, **kwargs)
Example #23
0
    def __init__(self, **kwargs):
        if "command" in kwargs:
            if "mastersrc" in kwargs:
                raise ValueError, "Unexpected 'mastersrc' argument."
            if "slavedest" in kwargs:
                raise ValueError, "Unexpected 'slavedest' argument."

            # This is the initial construction, create a temporary
            # batch file to run the command.
            import os
            import tempfile

            command = kwargs.pop("command")
            tf = tempfile.NamedTemporaryFile(delete=False)
            print >> tf, "@echo on"
            print >> tf, " ".join('"%s"' % a for a in command)
            tf.close()

            remotename = kwargs.get("name", "batched-command")
            kwargs["mastersrc"] = os.path.abspath(tf.name)
            kwargs["slavedest"] = "%s.bat" % remotename

        FileDownload.__init__(self, **kwargs)
Example #24
0
 def __init__(self,
              mastersrc=None,
              patchDir=".",
              workdir="mozilla",
              slavedest=".mozconfig",
              **kwargs):
     """arguments:
     @type  patchDir:   string
     @param patchDir:   The directory on the master that holds the mozconfig
                         This directory is relative to the base buildmaster
                         directory.
                         ie. /home/buildmaster/project
                         Defaults to '.'
     """
     # mastersrc and slavedest get overridden in start()
     FileDownload.__init__(self,
                           mastersrc=mastersrc,
                           workdir=workdir,
                           slavedest=slavedest,
                           **kwargs)
     self.addFactoryArguments(patchDir=patchDir)
     self.patchDir = patchDir
     self.super_class = FileDownload
Example #25
0
def standalone_buildout(configurator, options, cfg_tokens, manifest_dir):
    """Simple download from master of a self-contained buildout conf file.

    See module docstring for signature and return values.
    """
    if len(cfg_tokens) != 1:
        raise ValueError("Wrong standalong buildout specification: %r" %
                         cfg_tokens)

    if 'bootstrap-script' in options:
        warnings.warn(
            "The option 'boostrap-script' is now ignored, all "
            "bootstraps are done with unibootstrap.py", DeprecationWarning)
    conf_path = cfg_tokens[0]
    conf_name = os.path.split(conf_path)[-1]
    conf_path = os.path.join(manifest_dir, conf_path)
    return conf_name, (FileDownload(
        mastersrc=conf_path,
        slavedest=conf_name,
        name="download",
        description=["Download", "buildout", "conf"],
        haltOnFailure=True), )
Example #26
0
	def initFactory(self,arch):
		if arch != "linux-64": return None
		f = factory.BuildFactory()
		f.addStep(FileDownload(mastersrc="/var/www/pips/get-pips4u.sh",
                            slavedest="get-pips4u.sh"))
		f.addStep(Cleaning(command=["rm","-rf","/tmp/pipsget"]))
		f.addStep(GetPipsTarball(
			command=["sh","get-pips4u.sh","--devel",
				"--srcdir", "/tmp/pipsget/src",
				"--prefix", "/tmp/pipsget/root",
				"--force",
				"--debug"])
			)
		f.addStep(Cleaning(command=["rm","-rf","/tmp/pipsget"]))
		f.addStep(GetPipsSvn(
			command=["sh","get-pips4u.sh",
				"--srcdir", "/tmp/pipsget/src",
				"--prefix", "/tmp/pipsget/root",
				"--force",
				"--debug"])
			)
		return f
Example #27
0
def disable_intel_turbo_steps():
    steps = []
    steps.append(
        ShellCommand(
            name="disableintelturbo",
            command=[
                'bash', '-c',
                '(echo 0 | sudo /usr/bin/tee /sys/devices/system/cpu/cpufreq/boost) || (echo "only supported on Intel CPUs" && exit 1)'
            ],
            haltOnFailure=True))

    class AlwaysSuccessShellCommand(ShellCommand):
        def __init__(self, *args, **kwargs):
            ShellCommand.__init__(self, *args, **kwargs)

        def finished(self, _):
            ShellCommand.finished(self, SUCCESS)

    # cf. http://pm-blog.yarda.eu/2011/10/deeper-c-states-and-increased-latency.html
    # by keeping the file descriptor alive, we make sure that this setting is used.
    # after closing the file descriptor, the old setting will be restored by the
    # kernel module.
    steps.append(FileDownload('forcec0state.sh', 'forcec0state.sh'))

    # `setsid' is used in to escape the process group, otherwise it will be
    # killed by the timeout logic of AlwaysSuccessShellCommand. since the
    # parent process gets killed by it, we always force it to be
    # successful. (I wish there would be a nicer way to do it).
    steps.append(
        AlwaysSuccessShellCommand(name="forceC0state",
                                  command=[
                                      'sudo', '-b', '/bin/bash', '-c',
                                      'setsid bash -x ./forcec0state.sh'
                                  ],
                                  haltOnFailure=False,
                                  flunkOnFailure=False,
                                  timeout=5))

    return steps
Example #28
0
         command=[
             'gbp', 'dch', '-a', '--ignore-branch', '--verbose', '-N',
             Interpolate('%(prop:release_version)s-%(prop:datestamp)s' +
                         distro)
         ],
         descriptionDone=[
             'stamped changelog',
             Interpolate('%(prop:release_version)s'),
             Interpolate('%(prop:datestamp)s')
         ]))
 # download hooks
 f.addStep(
     FileDownload(
         name=package + '-grab-hooks',
         mastersrc='hooks/D05deps',
         slavedest=Interpolate('%(prop:workdir)s/hooks/D05deps'),
         hideStepIf=success,
         mode=0777  # make this executable for the cowbuilder
     ))
 # Download script for building the binary deb
 f.addStep(
     FileDownload(
         name=job_name + '-grab-build-binary-deb-script',
         mastersrc='scripts/build_binary_deb.py',
         slavedest=Interpolate('%(prop:workdir)s/build_binary_deb.py'),
         mode=0755,
         hideStepIf=success))
 # build the binary from the git working copy
 f.addStep(
     ShellCommand(
         haltOnFailure=True,
Example #29
0
    ShellCommand(command=["sed", "-i", "s/\"version\": SDK\\[\"PYTHONVERSION\"\\]\\[6:\\],/\"version\": SDK[\"PYTHONVERSION\"][6:].rstrip('dmu'),/", "makepanda/makepandacore.py"]),

    # Decode the version number from the dtool/PandaVersion.pp file.
    SetPropertyFromCommand("version", command=[
        "python3", "makepanda/getversion.py", buildtype_flag],
        haltOnFailure=True),

    # Delete the built dir, if requested.
    ShellCommand(name="clean", command=get_clean_command(),
                 haltOnFailure=False, doStepIf=lambda step:step.getProperty("clean", False)),

    # These steps fill in properties used to determine upstream_version.
    ] + whl_version_steps + [

    # Download the Dockerfile for this distribution.
    FileDownload(mastersrc=Interpolate("dockerfiles/%(prop:suite)s-%(prop:arch)s"), workerdest="Dockerfile", workdir="context"),

    # Make sure the base distribution is up-to-date.
    ShellCommand(command=cloudimg_cmd, workdir="context"),

    # Build the Docker image.
    ShellCommand(name="setup", command=setup_cmd, workdir="context", haltOnFailure=True),

    # Invoke makepanda.
    Compile(name="compile py2",
            command=get_build_command(2),
            haltOnFailure=True,
            doStepIf=is_branch("release/1.10.x")),
    Compile(name="compile py3",
            command=get_build_command(3),
            haltOnFailure=True),
Example #30
0
def ros_testbuild(c,
                  job_name,
                  url,
                  branch,
                  distro,
                  arch,
                  rosdistro,
                  machines,
                  othermirror,
                  keys,
                  token=None):

    # Change source is either GitPoller or GitPRPoller
    # TODO: make this configurable for svn/etc
    project_name = ''
    if token:
        project_name = '_'.join([job_name, rosdistro, 'prtestbuild'])
        c['change_source'].append(
            GitPRPoller(
                name=rosdistro + "_pr_poller",
                repourl=url,  # this may pose some problems
                project=project_name,
                token=token,
                pollInterval=15))
        # parse repo_url git@github:author/repo.git to repoOwner, repoName
        r_owner, r_name = (url.split(':')[1])[:-4].split('/')
        c['status'].append(
            status.GitHubStatus(token=token,
                                repoOwner=r_owner,
                                repoName=r_name))
    else:
        project_name = '_'.join([job_name, rosdistro, 'testbuild'])
        c['change_source'].append(
            NamedGitPoller(repourl=url,
                           name=rosdistro,
                           branch=branch,
                           project=project_name))

    c['schedulers'].append(
        basic.SingleBranchScheduler(
            name=project_name,
            builderNames=[
                project_name,
            ],
            change_filter=ChangeFilter(project=project_name)))

    # Directory which will be bind-mounted
    binddir = '/tmp/' + project_name

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success))
    # Check out repository (to /tmp)
    f.addStep(
        Git(repourl=util.Property('repository', default=url),
            branch=util.Property('branch', default=branch),
            alwaysUseLatest=True,
            mode='full',
            workdir=binddir + '/src/' + job_name))
    # Download testbuild.py script from master
    f.addStep(
        FileDownload(name=job_name + '-grab-script',
                     mastersrc='scripts/testbuild.py',
                     slavedest=Interpolate('%(prop:workdir)s/testbuild.py'),
                     hideStepIf=success))
    # Update the cowbuilder
    f.addStep(
        ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys,
                     hideStepIf=success))
    # Make and run tests in a cowbuilder
    f.addStep(
        TestBuild(name=job_name + '-build',
                  command=[
                      'sudo', 'cowbuilder', '--execute',
                      Interpolate('%(prop:workdir)s/testbuild.py'),
                      '--distribution', distro, '--architecture', arch,
                      '--bindmounts', binddir, '--basepath',
                      '/var/cache/pbuilder/base-' + distro + '-' + arch +
                      '.cow', '--override-config', '--othermirror',
                      othermirror, '--', binddir, rosdistro
                  ],
                  logfiles={'tests': binddir + '/testresults'},
                  descriptionDone=['make and test', job_name]))
    c['builders'].append(
        BuilderConfig(name=project_name, slavenames=machines, factory=f))
    # return the name of the job created
    return project_name
Example #31
0
def ros_docbuild(c,
                 job_name,
                 url,
                 branch,
                 rosdistro,
                 machines,
                 trigger_pkgs=None):

    # Directory which will be bind-mounted
    binddir = job_name + '_' + rosdistro + '_docbuild'

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success))
    # Check out repository (to /tmp)
    f.addStep(
        Git(repourl=url,
            branch=branch,
            alwaysUseLatest=True,
            mode='full'
            #workdir = binddir+'/src/'+job_name+'/'
            ))
    # Download  script from master
    f.addStep(
        FileDownload(name=job_name + '-grab-script',
                     mastersrc='scripts/docbuild.py',
                     workerdest=Interpolate('%(prop:builddir)s/docbuild.py'),
                     hideStepIf=success))

    f.addStep(
        FileDownload(
            name=job_name + '-grab-script',
            mastersrc='scripts/unique_docker_doc.py',
            workerdest=Interpolate('%(prop:builddir)s/unique_docker_doc.py'),
            hideStepIf=success))

    f.addStep(
        FileDownload(
            name=job_name + '-grab-script',
            mastersrc='docker_components/Dockerfile_doc',
            workerdest=Interpolate('%(prop:builddir)s/Dockerfile_doc'),
            hideStepIf=success))

    f.addStep(
        FileDownload(name=job_name + '-grab-script',
                     mastersrc='docker_components/docker-compose-doc.yaml',
                     workerdest=Interpolate(
                         '%(prop:builddir)s/docker-compose-doc.yaml'),
                     hideStepIf=success))
    # reedit docker-compose-doc.yaml
    f.addStep(
        ShellCommand(
            haltOnFailure=True,
            name=job_name + '-reedit-docker-compose',
            command=[
                'python', 'unique_docker_doc.py',
                Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'),
                Interpolate(job_name)
            ],
            workdir=Interpolate('%(prop:builddir)s'),
            descriptionDone=['reedit docker-compose', job_name]))
    # Build docker image for creating doc
    f.addStep(
        ShellCommand(
            # haltOnFailure = True,
            name=job_name + '-create_docker',
            command=[
                'docker-compose', '-f',
                Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'),
                'build'
            ],
            workdir=Interpolate('%(prop:builddir)s'),
            descriptionDone=['create_doc', job_name]))

    # creating doc in docker
    f.addStep(
        ShellCommand(
            # haltOnFailure=True,
            name=job_name + '-create_doc',
            command=[
                'docker',
                'run',
                # '-v', 'ros-repository-docker_deb_repository:/home/package',
                '--name',
                Interpolate('doc_' + job_name),
                Interpolate('scalable-doc:' + job_name),
                'python',
                '/root/docbuild.py',
                '/tmp/',
                rosdistro
            ],
            descriptionDone=['create doc', job_name]))

    f.addStep(
        ShellCommand(name=job_name + '-copydocs',
                     command=[
                         'docker', 'cp',
                         Interpolate('doc_' + job_name + ':' + '/tmp/docs'),
                         '/docs'
                     ],
                     workdir=Interpolate('%(prop:builddir)s'),
                     descriptionDone=['copydocs', job_name]))

    # rm container
    f.addStep(
        ShellCommand(name=job_name + '-rm_container',
                     command=['docker', 'rm',
                              Interpolate('doc_' + job_name)],
                     descriptionDone=['remove docker container', job_name]))

    # rm image
    f.addStep(
        ShellCommand(name=job_name + '-rm_image',
                     command=[
                         'docker', 'image', 'rm',
                         Interpolate('scalable-doc:' + job_name)
                     ],
                     descriptionDone=['remove docker image', job_name]))

    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(schedulerNames=[
                t.replace('_', '-') + '-' + rosdistro + '-doctrigger'
                for t in trigger_pkgs
            ],
                    waitForFinish=False,
                    alwaysRun=True))
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(name=job_name.replace('_', '-') + '-' +
                                rosdistro + '-doctrigger',
                                builderNames=[
                                    job_name + '_' + rosdistro + '_docbuild',
                                ]))
    # Add builder config
    c['builders'].append(
        BuilderConfig(name=job_name + '_' + rosdistro + '_docbuild',
                      workernames=machines,
                      factory=f))
    # return the name of the job created
    return job_name + '_' + rosdistro + '_docbuild'
Example #32
0
 def __init__(self, **kwargs):
     FileDownload.__init__(self, mastersrc="virtualenv.py", slavedest="virtualenv.py")
Example #33
0
 def __init__(self, mastersrc=None, slavedest=None, **kwargs):
     global moduledir
     FileDownload.__init__(self,
             mastersrc= "%s/catalyst.common.ctest" % moduledir,
             slavedest=Interpolate("%(prop:builddir)s/catalyst.common.ctest"),
             **kwargs)
Example #34
0
 def upload_credentials(self):
     self.addStep(
         FileDownload('benchmarkerCredentials',
                      'benchmarkerCredentials',
                      workdir='benchmarker'))
Example #35
0
]

build_steps = [
    Git(config.git_url, getDescription={'match': 'v*'}),

    # Decode the version number from the dtool/PandaVersion.pp file.
    SetPropertyFromCommand("version",
                           command=["python", "makepanda/getversion.py"],
                           haltOnFailure=True),

    # Steps to figure out which .whl version to use.
] + whl_version_steps + [

    # Download the Dockerfile for this distribution.
    FileDownload(mastersrc=Interpolate("dockerfiles/manylinux1-%(prop:arch)s"),
                 slavedest="Dockerfile",
                 workdir="context"),

    # And the build scripts.
    FileDownload(mastersrc="build_scripts/build.sh",
                 slavedest="build_scripts/build.sh",
                 workdir="context"),
    FileDownload(mastersrc="build_scripts/build_utils.sh",
                 slavedest="build_scripts/build_utils.sh",
                 workdir="context"),

    # Build the Docker image.
    ShellCommand(
        name="setup", command=setup_cmd, workdir="context",
        haltOnFailure=True),
Example #36
0
def launchpad_debbuild(c, package, version, binaries, url, distro, arch, machines, othermirror, keys, trigger_names = None):
    f = BuildFactory()
    # Grab the source package
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = package+'-getsourcedeb',
            command = ['dget', '--allow-unauthenticated', url]
        )
    )
    # download hooks
    f.addStep(
        FileDownload(
            name = package+'-grab-hooks',
            mastersrc = 'hooks/D05deps',
            slavedest = Interpolate('%(prop:workdir)s/hooks/D05deps'),
            hideStepIf = success,
            mode = 0777 # make this executable for the cowbuilder
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Build it
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = package+'-build',
            command = ['cowbuilder',
                       '--build', package+'_'+version+'.dsc',
                       '--distribution', distro, '--architecture', arch,
                       '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow',
                       '--buildresult', Interpolate('%(prop:workdir)s'),
                       '--hookdir', Interpolate('%(prop:workdir)s/hooks'),
                       '--othermirror', othermirror,
                       '--override-config'],
            env = {'DIST': distro},
            descriptionDone = ['built binary debs', ]
        )
    )
    # Upload debs
    for deb_arch in binaries.keys():
        for deb_name in binaries[deb_arch]:
            debian_pkg = deb_name+'_'+version+'_'+deb_arch+'.deb'
            f.addStep(
                FileUpload(
                    name = deb_name+'-upload',
                    slavesrc = Interpolate('%(prop:workdir)s/'+debian_pkg),
                    masterdest = Interpolate('binarydebs/'+debian_pkg),
                    hideStepIf = success
                )
            )
            # Add the binarydeb using reprepro updater script on master
            f.addStep(
                MasterShellCommand(
                    name = deb_name+'-include',
                    command = ['reprepro-include.bash', deb_name, Interpolate(debian_pkg), distro, deb_arch],
                    descriptionDone = ['updated in apt', debian_pkg]
                )
            )
    # Trigger if needed
    if trigger_names != None:
        f.addStep( Trigger(schedulerNames = trigger_names, waitForFinish = False) )
    # Add to builders
    c['builders'].append(
        BuilderConfig(
            name = package+'_'+distro+'_'+arch+'_debbuild',
            slavenames = machines,
            factory = f
        )
    )
    # return name of builder created
    return package+'_'+distro+'_'+arch+'_debbuild'
Example #37
0
def ros_branch_build(c,
                     job_name,
                     packages,
                     url,
                     branch,
                     distro,
                     arch,
                     rosdistro,
                     machines,
                     othermirror,
                     keys,
                     trigger_pkgs=None):
    gbp_args = [
        '-uc', '-us', '--git-ignore-branch', '--git-ignore-new',
        '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch
    ]

    with open(os.path.dirname(os.path.realpath(__file__)) +
              "/spec.yaml") as file:
        spec_list = yaml.full_load(file)

    f = BuildFactory()

    # Remove the build directory.
    f.addStep(
        RemoveDirectory(
            name=job_name + '-clean',
            dir=Interpolate('%(prop:workdir)s'),
            hideStepIf=success,
        ))
    # Check out the repository master branch, since releases are tagged and not branched
    f.addStep(
        Git(
            repourl=url,
            branch=branch,
            alwaysUseLatest=
            True,  # this avoids broken builds when schedulers send wrong tag/rev
            mode='full',  # clean out old versions
            getDescription={'tags': True}))
    # Update the cowbuilder
    f.addStep(
        ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys,
                     hideStepIf=success))
    # Generate the changelog for the package
    f.addStep(
        ShellCommand(haltOnFailure=True,
                     name='catkin_generate_changelog',
                     command=['catkin_generate_changelog', '-y'],
                     descriptionDone=['catkin_generate_changelog']))
    # Add all files including untracked ones
    f.addStep(
        ShellCommand(haltOnFailure=True,
                     name='add_changelogs',
                     command=['git', 'add', '.'],
                     descriptionDone=['add_changelogs']))
    # Commit the changelog after updating it
    f.addStep(
        ShellCommand(haltOnFailure=True,
                     name='update_changelogs',
                     command=['git', 'commit', '-m', '\"Updated changelogs\"'],
                     descriptionDone=['update_changelogs']))
    # Prepare the release without pushing it
    f.addStep(
        ShellCommand(haltOnFailure=True,
                     name='catkin_prepare_release',
                     command=[
                         'catkin_prepare_release', '--bump', 'minor',
                         '--no-push', '-y'
                     ],
                     descriptionDone=['catkin_prepare_release']))
    #
    f.addStep(
        ShellCommand(
            haltOnFailure=True,
            name='git_bloom_generate_release',
            command=['git-bloom-generate', '-y', 'rosrelease', rosdistro],
        ))
    f.addStep(
        ShellCommand(
            haltOnFailure=True,
            name='git_bloom_generate_debian',
            command=[
                'git-bloom-generate', '-y', 'rosdebian', '-a', '-p', 'release',
                rosdistro
            ],
        ))
    # Get the tag number for the lastest commit
    f.addStep(
        SetPropertyFromCommand(
            command="git describe --tags",
            property="release_version",
            name='latest_tag',
        ))
    # Need to build each package in order
    for package in packages:
        debian_pkg = 'ros-' + rosdistro + '-' + package.replace(
            '_', '-')  # debian package name (ros-groovy-foo)
        branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s-0_' + distro
        deb_name = debian_pkg + '_%(prop:release_version)s-0' + distro
        final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb'
        # Check out the proper tag. Use --force to delete changes from previous deb stamping
        f.addStep(
            ShellCommand(haltOnFailure=True,
                         name=package + '-checkout',
                         command=[
                             'git', 'checkout',
                             Interpolate(branch_name), '--force'
                         ],
                         hideStepIf=success))
        # A hack for generating the debian folder so we could build the lastest commit of the specified branch
        # f.addStep(
        #     ShellCommand(
        #         haltOnFailure = True,
        #         name = package+'-bloom_generate',
        #         command= ['bloom-generate', 'rosdebian'],
        #         descriptionDone = ['bloom_generate', package]
        #     )
        # )
        # Download script for building the source deb
        f.addStep(
            FileDownload(
                name=job_name + '-grab-build-source-deb-script',
                mastersrc='scripts/build_source_deb.py',
                slavedest=Interpolate('%(prop:workdir)s/build_source_deb.py'),
                mode=0755,
                hideStepIf=success))
Example #38
0
def archive_buildout(self, options, cfg_tokens, manifest_dir):
    """Steps to retrieve an archive (tarball, zip...) buildout from the master.

    Currently only .tar.bz2 is supported.

    The path of the archive to retrieve is made of:
         - a base directory from the same option as upload options for
           packaging subfactory (``packaging.upload_dir``)
         - a subdir and an archive name property, both specified as tokens in
           the buildout option. Archive name MUST NOT contain the archive type
           suffix (e.g, '.tar.bz2')

    Therefore, this is meant to work on a wide range of archives, not tied
    to a particular project

    Typically this would be for a triggered build that would do some
    further packaging or testing.

    For example, one could use this for a generic binary builder that produces
    a docker image based on debian:7.7 for any archive produced by this master.
    """
    archive_type = '.tar.bz2'
    subdir_prop, archive_prop, conf_name = cfg_tokens
    master_path = os.path.join(options['packaging.root-dir'],
                               '%%(prop:%s)s' % subdir_prop,
                               '%%(prop:%s)s' % archive_prop + archive_type)
    slave_name_unpacked = '%%(prop:%s)s' % archive_prop
    slave_fname = slave_name_unpacked + archive_type
    slave_path = '../' + slave_fname
    return conf_name, [
        ShellCommand(command=[
            'find', '.', '-maxdepth', '1', '-name', '*' + archive_type + '*',
            '-delete'
        ],
                     workdir='.',
                     name='clean_arch',
                     description=['remove', 'prev', 'downloads']),
        FileDownload(slavedest=Interpolate(slave_path),
                     mastersrc=Interpolate(master_path)),
        FileDownload(slavedest=Interpolate(slave_path + '.md5'),
                     mastersrc=Interpolate(master_path + '.md5')),
        ShellCommand(
            command=['md5sum', '-c',
                     Interpolate(slave_fname + '.md5')],
            workdir='.',
            name="checksum",
            haltOnFailure=True),
        ShellCommand(command=['tar', 'xjf',
                              Interpolate(slave_fname)],
                     workdir='.',
                     name="untar",
                     description=['unpacking', 'archive'],
                     descriptionDone=['unpacked', 'archive'],
                     haltOnFailure=True),
        ShellCommand(command=['rm', '-rf', 'build'],
                     workdir='.',
                     name='clean',
                     description=['removing', 'previous', 'build'],
                     descriptionDone=['removed', 'previous', 'build']),
        ShellCommand(command=['mv',
                              Interpolate(slave_name_unpacked), 'build'],
                     workdir='.',
                     name='mv',
                     description=['setting', 'at', 'build/'])
    ]
Example #39
0
def ros_docbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs = None):

    # Directory which will be bind-mounted
    binddir = '/tmp/'+job_name+'_'+rosdistro+'_docbuild'

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(
        ShellCommand(
            command = ['rm', '-rf', binddir],
            hideStepIf = success
        )
    )
    # Check out repository (to /tmp)
    f.addStep(
        Git(
            repourl = url,
            branch = branch,
            alwaysUseLatest = True,
            mode = 'full',
            workdir = binddir+'/src/'+job_name+'/'
        )
    )
    # Download testbuild.py script from master
    f.addStep(
        FileDownload(
            name = job_name+'-grab-script',
            mastersrc = 'scripts/docbuild.py',
            slavedest = Interpolate('%(prop:workdir)s/docbuild.py'),
            hideStepIf = success
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Build docs in a cowbuilder
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = job_name+'-docbuild',
            command = ['cowbuilder', '--execute', Interpolate('%(prop:workdir)s/docbuild.py'),
                       '--distribution', distro, '--architecture', arch,
                       '--bindmounts', binddir,
                       '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow',
                       '--override-config', '--othermirror', othermirror,
                       '--', binddir, rosdistro],
            env = {'DIST': distro},
            descriptionDone = ['built docs', ]
        )
    )
    # Upload docs to master
    f.addStep(
        DirectoryUpload(
            name = job_name+'-upload',
            slavesrc = binddir+'/docs',
            masterdest = 'docs/' + rosdistro,
            hideStepIf = success
        )
    )
    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(
                schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-doctrigger' for t in trigger_pkgs],
                waitForFinish = False,
                alwaysRun=True
            )
        )
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(
            name = job_name.replace('_','-')+'-'+rosdistro+'-doctrigger',
            builderNames = [job_name+'_'+rosdistro+'_docbuild',]
        )
    )
    # Add builder config
    c['builders'].append(
        BuilderConfig(
            name = job_name+'_'+rosdistro+'_docbuild',
            slavenames = machines,
            factory = f
        )
    )
    # return the name of the job created
    return job_name+'_'+rosdistro+'_docbuild'