Beispiel #1
0
class CrossbowReport(CrossbowBuilder):
    steps = Extend([
        SetPropertyFromCommand(
            'crossbow_job_id',
            extract_fn=lambda stdout, stderr: stdout.strip(),
            command=Crossbow(
                args=[
                    '--github-token', util.Secret('ursabot/github_token'),
                    'latest-prefix', crossbow_prefix
                ]
            ),
            workdir='arrow/dev/tasks'
        ),
        Crossbow(
            args=util.FlattenList([
                '--github-token', util.Secret('ursabot/github_token'),
                'report',
                '--send',
                '--poll',
                '--poll-max-minutes', 120,
                '--poll-interval-minutes', 15,
                '--sender-name', 'Crossbow',
                '--sender-email', '*****@*****.**',
                '--recipient-email', '*****@*****.**',
                '--smtp-user', util.Secret('crossbow/smtp_user'),
                '--smtp-password', util.Secret('crossbow/smtp_password'),
                util.Property('crossbow_job_id')
            ]),
            workdir='arrow/dev/tasks'
        )
    ])
Beispiel #2
0
def loadSigningKey():
    pathFrom = "s3://{{ s3_private_bucket }}/{{ groups['master'][0] }}/key/signing.key"
    pathTo = "-"
    command = 'cp'
    return shellCommand(
        command=util.Interpolate("aws --endpoint-url {{ s3_host }} s3 " + command + " " + pathFrom + " " + pathTo + " | gpg --import"),
        env={
            "AWS_ACCESS_KEY_ID": util.Secret("s3.private_access_key"),
            "AWS_SECRET_ACCESS_KEY": util.Secret("s3.private_secret_key")
        },
        name="Load signing key")
Beispiel #3
0
def getLatestBuildRevision():
    pathFrom = "s3://{{ s3_public_bucket }}/builds/%(prop:branch_pretty)s/latest.txt"
    pathTo = "-"
    command = 'cp'
    return steps.SetPropertyFromCommand(
        command=['aws', '--endpoint-url', '{{ s3_host }}', 's3', command, util.Interpolate(pathFrom), util.Interpolate(pathTo)],
        env={
            "AWS_ACCESS_KEY_ID": util.Secret("s3.public_access_key"),
            "AWS_SECRET_ACCESS_KEY": util.Secret("s3.public_secret_key")
        },
        # Note: We're overwriting this value to set it to the built revision rather than whatever it defaults to
        property="got_revision",
        flunkOnFailure=True,
        haltOnFailure=True,
        name="Get latest build version")
Beispiel #4
0
class CrossbowTrigger(DockerBuilder):
    tags = ['crossbow']
    steps = [
        GitHub(name='Clone Arrow',
               repourl=util.Property('repository'),
               workdir='arrow',
               mode='full'),
        GitHub(
            name='Clone Crossbow',
            # TODO(kszucs): read it from the comment and set as a property
            repourl='https://github.com/ursa-labs/crossbow',
            workdir='crossbow',
            branch='master',
            mode='full',
            # quite misleasing option, but it prevents checking out the branch
            # set in the sourcestamp by the pull request, which refers to arrow
            alwaysUseLatest=True),
        Crossbow(args=util.FlattenList([
            '--github-token',
            util.Secret('ursabot/github_token'), 'submit', '--output',
            'job.yml', '--job-prefix', 'ursabot', '--arrow-remote',
            util.Property('repository'),
            util.Property('crossbow_args', [])
        ]),
                 workdir='arrow/dev/tasks',
                 result_file='job.yml')
    ]
    images = images.filter(name='crossbow', tag='worker')
def nix_build_config(
    worker_names: list[str], enable_cachix: bool
) -> util.BuilderConfig:
    """
    Builds one nix flake attribute.
    """
    factory = util.BuildFactory()
    factory.addStep(
        NixBuildCommand(
            env={},
            name="Build flake attr",
            command=[
                "nix",
                "build",
                "-L",
                "--out-link",
                util.Interpolate("result-%(prop:attr)s"),
                util.Property("drv_path"),
            ],
            haltOnFailure=True,
        )
    )
    if enable_cachix:
        factory.addStep(
            steps.ShellCommand(
                name="Upload cachix",
                env=dict(CACHIX_AUTH_TOKEN=util.Secret("cachix-token")),
                command=[
                    "cachix",
                    "push",
                    util.Secret("cachix-name"),
                    util.Interpolate("result-%(prop:attr)s"),
                ],
            )
        )
    factory.addStep(UpdateBuildOutput(name="Update build output", branches=["master"]))
    return util.BuilderConfig(
        name="nix-build",
        workernames=worker_names,
        properties=[],
        collapseRequests=False,
        env={},
        factory=factory,
    )
Beispiel #6
0
    def test_constructor_secrets(self):
        fake_storage_service = FakeSecretStorage()

        secret_service = SecretManager()
        secret_service.services = [fake_storage_service]
        yield secret_service.setServiceParent(self.master)

        fake_storage_service.reconfigService(secretdict={"passkey": "1234"})

        bs = yield self.createWorker('bot', util.Secret('passkey'))
        yield bs.startService()
        self.assertEqual(bs.password, '1234')
Beispiel #7
0
    def setUp(self):
        self.setUpTestReactor()

        fakeStorageService = FakeSecretStorage()
        fakeStorageService.reconfigService(
            secretdict={"secret_key": self._SECRET})

        secretService = SecretManager()
        secretService.services = [fakeStorageService]

        self.changeHook = _prepare_github_change_hook(
            self, strict=True, secret=util.Secret("secret_key"))
        self.changeHook.master.addService(secretService)
Beispiel #8
0
    def setUp(self):
        self.setup_test_reactor()
        self.master = fakeMasterForHooks(self)

        fakeStorageService = FakeSecretStorage()
        fakeStorageService.reconfigService(secretdict={"secret_key": self._SECRET})

        self.secretService = SecretManager()
        self.secretService.services = [fakeStorageService]
        self.master.addService(self.secretService)

        self.changeHook = change_hook.ChangeHookResource(
            dialects={'gitlab': {'secret': util.Secret("secret_key")}},
            master=self.master)
Beispiel #9
0
def AWSStep(command, name, doStepIf=True, hideStepIf=False, access=util.Secret("s3.public_access_key"), secret=util.Secret("s3.public_secret_key")):
    commandAry = list()
    commandAry.extend(['aws', '--endpoint-url', '{{ s3_host }}']),
    if type(command) == list:
        commandAry.extend(command)
    else:
        commandAry.append(command)
    return shellCommand(
        command=commandAry,
        env={
            "AWS_ACCESS_KEY_ID": access,
            "AWS_SECRET_ACCESS_KEY": secret
        },
        name=name,
        doStepIf=doStepIf,
        hideStepIf=hideStepIf)
Beispiel #10
0
def writeBuildResultsToDatabase(**kwargs):
    """Call the script to save results to the database"""
    return [
        steps.SetPropertyFromCommand(
            name="Save test results to the database",
            command=[
                util.Interpolate(
                    "%(prop:builddir)s/scripts/write_build_results.py"),
                "--run-id",
                util.Property("buildId"),
                util.Property("jsonResultsFile"), "--database-info",
                util.Secret("dataBaseInfo.json")
            ],
            extract_fn=extractDatabaseBuildid,
            **kwargs)
    ]
Beispiel #11
0
class CrossbowSubmit(CrossbowBuilder):
    """Submit crossbow jobs

    This builder is driven via buildbot properties, the `crossbow_args`
    property is either set by the github hook which parses the github comments
    like `@ursabot package -g conda` (ror more see commands.py) or by
    explicitly passing by NightlySchedulers.
    """
    steps = Extend([
        Crossbow(args=util.FlattenList([
            '--output-file', 'result.yaml', '--github-token',
            util.Secret('kszucs/github_status_token'), 'submit',
            '--arrow-remote', arrow_repository, '--job-prefix',
            crossbow_prefix,
            util.Property('crossbow_args', [])
        ]),
                 workdir='arrow/dev/tasks',
                 result_file='result.yaml')
    ])
Beispiel #12
0
def masterConfig(use_interpolation):
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import schedulers, steps, util

    c['services'] = [
        FakeSecretReporter('http://example.com/hook',
                           auth=('user', Interpolate('%(secret:httppasswd)s')))
    ]
    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]

    c['secretsProviders'] = [
        FakeSecretStorage(
            secretdict={
                "foo": "secretvalue",
                "something": "more",
                'httppasswd': 'myhttppasswd'
            })
    ]
    f = BuildFactory()

    if use_interpolation:
        if os.name == "posix":
            # on posix we can also check whether the password was passed to the command
            command = Interpolate(
                'echo %(secret:foo)s | ' +
                'sed "s/secretvalue/The password was there/"')
        else:
            command = Interpolate('echo %(secret:foo)s')
    else:
        command = ['echo', util.Secret('foo')]

    f.addStep(steps.ShellCommand(command=command))

    c['builders'] = [
        BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]
    return c
Beispiel #13
0
class CrossbowTrigger(DockerBuilder):
    tags = ['crossbow']
    env = {
        'GIT_COMMITTER_NAME': 'ursabot',
        'GIT_COMMITTER_EMAIL': '*****@*****.**'
    }
    steps = [
        GitHub(
            name='Clone Arrow',
            repourl=util.Property('repository'),
            workdir='arrow',
            mode='full'
        ),
        GitHub(
            name='Clone Crossbow',
            repourl=util.Property('crossbow_repo'),
            workdir='crossbow',
            branch='master',
            mode='full',
            # quite misleasing option, but it prevents checking out the branch
            # set in the sourcestamp by the pull request, which refers to arrow
            alwaysUseLatest=True
        ),
        Crossbow(
            args=util.FlattenList([
                '--github-token', util.Secret('ursabot/github_token'),
                'submit',
                '--output', 'job.yml',
                '--job-prefix', 'ursabot',
                '--arrow-remote', util.Property('repository'),
                util.Property('crossbow_args', [])
            ]),
            workdir='arrow/dev/tasks',
            result_file='job.yml'
        )
    ]
    images = images.filter(
        name='crossbow',
        tag='worker'
    )
Beispiel #14
0
def createBuildfactory():
    factory = util.BuildFactory()
    factory.addSteps(common.cloneRepository())
    factory.addStep(steps.ShellCommand(
        name=util.Interpolate("Register in the Docker Registry %(prop:dockerRegistryUkrl)s"),
        command=["docker", "login", util.Property("dockerRegistryUrl"),
                 "--username", constants.DOCKER_REGISTRY_USER_NAME,
                 "--password", util.Secret("dockerRegistryPassword")
                 ],
        haltOnFailure=True
    ))
    factory.addSteps(common.downloadAndRunScript(
        name=util.Interpolate("Build docker image for %(prop:target)s"),
        scriptName="build_maxscale_docker_image.py",
        args=[
            "--product", util.Property("mdbciProductName"),
            "--product-version", util.Property("target"),
            "--name", util.Property("dockerProductName"),
            "--tag", util.Property("target"),
            "--registry", util.Property("dockerRegistryUrl")
        ],
        workdir=util.Interpolate("%(prop:builddir)s/build/maxscale/"),
    ))
    return factory
def getBuildPipeline():

    debsClone = steps.Git(repourl="{{ source_deb_repo_url }}",
                          branch=util.Property('branch'),
                          alwaysUseLatest=True,
                          mode="full",
                          method="fresh",
                          flunkOnFailure=True,
                          haltOnFailure=True,
                          name="Cloning deb packaging configs")

    debsVersion = steps.SetPropertyFromCommand(
        command="git rev-parse HEAD",
        property="deb_script_rev",
        flunkOnFailure=True,
        haltOnFailure=True,
        workdir="build",
        name="Get Debian script revision")

    removeSymlinks = common.shellCommand(
        command=['rm', '-rf', 'binaries', 'outputs'],
        alwaysRun=True,
        name="Prep cloned repo for CI use")

    debsFetch = common.syncAWS(
        pathFrom="s3://{{ s3_public_bucket }}/builds/{{ builds_fragment }}",
        pathTo=
        "binaries/%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/",
        name="Fetch build from S3")

    debsBuild = common.shellSequence(commands=[
        common.shellArg(command=[
            'dch', '--changelog', 'opencast/debian/changelog', '--newversion',
            util.Interpolate(
                '%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s-%(prop:buildnumber)s-%(prop:short_revision)s'
            ), '-b', '-D', 'unstable', '-u', 'low', '--empty',
            util.Interpolate(
                'Opencast revision %(prop:got_revision)s, packaged with Debian scripts version %(prop:deb_script_rev)s'
            )
        ],
                        logname='dch'),
        common.shellArg(command=[
            'rm', '-f',
            util.Interpolate(
                "binaries/%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/revision.txt"
            )
        ],
                        logname='cleanup'),
        common.shellArg(command=util.Interpolate(
            'echo "source library.sh\ndoOpencast %(prop:pkg_major_version)s.%(prop:pkg_minor_version)s %(prop:branch)s %(prop:got_revision)s" | tee build.sh'
        ),
                        logname='write'),
        common.shellArg(command=util.Interpolate(
            'ln -s opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s.orig.tar.xz opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s-%(prop:buildnumber)s.orig.tar.xz'
        ),
                        logname='link'),
        common.shellArg(command=['bash', 'build.sh'], logname='build'),
        common.shellArg(command=util.Interpolate(
            'echo "Opencast version %(prop:got_revision)s packaged with version %(prop:deb_script_rev)s" | tee outputs/%(prop:oc_commit)s/revision.txt'
        ),
                        logname='revision')
    ],
                                     env={
                                         "NAME":
                                         "Buildbot",
                                         "EMAIL":
                                         "buildbot@{{ groups['master'][0] }}",
                                         "SIGNING_KEY":
                                         util.Interpolate(
                                             "%(prop:signing_key)s")
                                     },
                                     name="Build debs")

    debRepoClone = steps.Git(repourl="{{ source_deb_packaging_repo_url }}",
                             branch="{{ deb_packaging_repo_branch }}",
                             alwaysUseLatest=True,
                             mode="full",
                             method="fresh",
                             flunkOnFailure=True,
                             haltOnFailure=True,
                             name="Cloning deb repo configs")

    debRepoLoadKeys = common.shellCommand(command=['./build-keys'],
                                          name="Loading signing keys")

    debRepoCreate = common.shellCommand(
        command=[
            './create-branch',
            util.Interpolate("%(prop:pkg_major_version)s.x")
        ],
        name=util.Interpolate(
            "Ensuring %(prop:pkg_major_version)s.x repos exist"))

    debRepoIngest = common.shellCommand(
        command=[
            './include-binaries',
            util.Interpolate("%(prop:pkg_major_version)s.x"),
            util.Interpolate("%(prop:repo_component)s"),
            util.Interpolate(
                "outputs/%(prop:revision)s/opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.x-%(prop:buildnumber)s-%(prop:short_revision)s_amd64.changes"
            )
        ],
        name=util.Interpolate(
            f"Adding build to %(prop:pkg_major_version)s.x-%(prop:repo_component)s"
        ))

    debRepoPrune = common.shellCommand(
        command=[
            './clean-unstable-repo',
            util.Interpolate("%(prop:pkg_major_version)s.x")
        ],
        name=util.Interpolate(
            f"Pruning %(prop:pkg_major_version)s.x unstable repository"))

    debRepoPublish = common.shellCommand(
        command=[
            "./publish-branch",
            util.Interpolate("%(prop:pkg_major_version)s.x"),
            util.Interpolate("%(prop:signing_key)s")
        ],
        name=util.Interpolate("Publishing %(prop:pkg_major_version)s.x"),
        env={
            "AWS_ACCESS_KEY_ID": util.Secret("s3.public_access_key"),
            "AWS_SECRET_ACCESS_KEY": util.Secret("s3.public_secret_key")
        })

    f_package_debs = util.BuildFactory()
    f_package_debs.addStep(common.getPreflightChecks())
    f_package_debs.addStep(debsClone)
    f_package_debs.addStep(debsVersion)
    f_package_debs.addStep(common.getLatestBuildRevision())
    f_package_debs.addStep(common.getShortBuildRevision())
    f_package_debs.addStep(removeSymlinks)
    f_package_debs.addStep(debsFetch)
    f_package_debs.addStep(common.loadSigningKey())
    f_package_debs.addStep(debsBuild)
    f_package_debs.addStep(debRepoClone)
    f_package_debs.addStep(debRepoLoadKeys)
    f_package_debs.addStep(common.deployS3fsSecrets())
    f_package_debs.addStep(common.mountS3fs())
    f_package_debs.addStep(debRepoCreate)
    f_package_debs.addStep(debRepoIngest)
    f_package_debs.addStep(debRepoPrune)
    f_package_debs.addStep(debRepoPublish)
    f_package_debs.addStep(common.unloadSigningKey())
    f_package_debs.addStep(common.unmountS3fs())
    f_package_debs.addStep(common.cleanupS3Secrets())
    f_package_debs.addStep(common.getClean())

    return f_package_debs
def nix_eval_config(
    worker_names: list[str], github_token_secret: str
) -> util.BuilderConfig:
    """
    Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
    For each evaluated attribute a new build pipeline is started.
    If all builds succeed and the build was for a PR opened by the flake update bot,
    this PR is merged.
    """
    factory = util.BuildFactory()
    # check out the source
    factory.addStep(
        steps.GitHub(
            repourl=util.Property("repository"), method="clean", submodules=True
        )
    )

    factory.addStep(
        NixEvalCommand(
            env={},
            name="Eval flake",
            command=[
                "nix",
                "run",
                "github:nix-community/nix-eval-jobs",
                "--",
                "--workers",
                "8",
                "--gc-roots-dir",
                # FIXME: don't hardcode this
                "/var/lib/buildbot-worker/gcroot",
                "--flake",
                ".#hydraJobs",
            ],
            haltOnFailure=True,
        )
    )
    # Merge flake-update pull requests if CI succeeds
    factory.addStep(
        MergePr(
            name="Merge pull-request",
            env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
            github_token_secret=util.Secret(github_token_secret),
            base_branches=["master"],
            owners=["doctor-cluster-bot"],
            command=[
                "gh",
                "pr",
                "merge",
                "--repo",
                util.Property("project"),
                "--rebase",
                util.Property("pullrequesturl"),
            ],
        )
    )

    return util.BuilderConfig(
        name="nix-eval",
        workernames=worker_names,
        factory=factory,
        properties=dict(virtual_builder_name="nix-eval"),
    )
def nix_update_flake_config(
    worker_names: list[str], projectname: str, github_token_secret: str
) -> util.BuilderConfig:
    """
    Updates the flake an opens a PR for it.
    """
    factory = util.BuildFactory()
    url_with_secret = util.Interpolate(
        f"https://*****:*****@github.com/{projectname}"
    )
    factory.addStep(
        steps.Git(
            repourl=url_with_secret,
            method="clean",
            submodules=True,
            haltOnFailure=True,
        )
    )
    factory.addStep(
        steps.ShellCommand(
            name="Update flakes",
            env=dict(
                GIT_AUTHOR_NAME="buildbot",
                GIT_AUTHOR_EMAIL="*****@*****.**",
                GIT_COMMITTER_NAME="buildbot",
                GIT_COMMITTER_EMAIL="*****@*****.**",
            ),
            command=[
                "nix",
                "flake",
                "update",
                "--commit-lock-file",
                "--commit-lockfile-summary",
                "flake.lock: Update",
            ],
            haltOnFailure=True,
        )
    )
    factory.addStep(
        steps.ShellCommand(
            name="Force-Push to update_flake_lock branch",
            command=[
                "git",
                "push",
                "--force",
                "origin",
                "HEAD:refs/heads/update_flake_lock",
            ],
            haltOnFailure=True,
        )
    )
    factory.addStep(
        CreatePr(
            name="Create pull-request",
            env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
            command=[
                "gh",
                "pr",
                "create",
                "--repo",
                projectname,
                "--title",
                "flake.lock: Update",
                "--body",
                "Automatic buildbot update",
                "--head",
                "refs/heads/update_flake_lock",
                "--base",
                "master",
            ],
        )
    )
    return util.BuilderConfig(
        name="nix-update-flake",
        workernames=worker_names,
        factory=factory,
        properties=dict(virtual_builder_name="nix-update-flake"),
    )
Beispiel #18
0
def ros_testbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, 
                  othermirror, keys, source=True, locks=[]):

    # Create a Job for Source
    
    if source:
        project_name = '_'.join([job_name, rosdistro, 'source_build'])
        c['change_source'].append(
            GitPoller(
                repourl=url,
                name=url,
                branch=branch,
                category=project_name,
                pollAtLaunch=True,
            )
        )
        c['schedulers'].append(
            schedulers.SingleBranchScheduler(
                name=project_name,
                builderNames=[project_name,],
                change_filter=util.ChangeFilter(category=project_name)
            )
        )
        
        c['schedulers'].append(
            schedulers.Nightly(
                name = project_name+'-nightly-master',
                codebases = {url:{'repository':url,'branch':'master'}},
                builderNames = [project_name,],
                hour=3,
                minute=0,
            )
        )
        
        c['schedulers'].append(
            schedulers.Nightly(
                name = project_name+'-nightly-develop',
                codebases = {url:{'repository':url,'branch':'develop'}},
                builderNames = [project_name,],
                hour=5,
                minute=0,
            )
        )
        
        c['schedulers'].append(
            schedulers.ForceScheduler(
                name=project_name+'-force',
                codebases = [util.CodebaseParameter("", 
                        branch=util.ChoiceStringParameter(
                            name="branch",
                            choices=["master", "devel"],
                            default="master"),
                        repository=util.FixedParameter(name="repository", default=url),
                        )],
                builderNames=[project_name,],
            )
        )
    else:
        r_owner, r_name = (url.split(':')[1])[:-4].split('/')
        project_name = '_'.join([job_name, rosdistro, 'pr_build'])
        c['change_source'].append(
            GitPRPoller(
                owner=r_owner,
                repo=r_name,
                category=project_name,
                branches=[branch],
                pollInterval=10*60,
                pollAtLaunch=True,
                token=util.Secret("OathToken"),
                repository_type='ssh'
            )
        )

        c['schedulers'].append(
            schedulers.SingleBranchScheduler(
                name=project_name,
                builderNames=[project_name,],
                change_filter=util.ChangeFilter(category=project_name)
            )
        )
        
    # Directory which will be bind-mounted
    binddir = '/tmp/'+project_name
    dockerworkdir = '/tmp/test/'


    f = BuildFactory()
    # Remove any old crud in build/src folder
    f.addStep(
        ShellCommand(
            name='rm src',
            command=['rm', '-rf', 'build/src'],
            hideStepIf=success,
            workdir=Interpolate('%(prop:builddir)s')
        )
    )
    # Check out repository (to /build/src)
    f.addStep(
        Git(
            repourl=util.Property('repository', default=url),
            branch=util.Property('branch', default=branch),
            alwaysUseLatest=True,
            mode='full',
            workdir=Interpolate('%(prop:builddir)s/build/src')
        )
    )

    # Download testbuild_docker.py script from master
    f.addStep(
        FileDownload(
            name=job_name+'-grab-script',
            mastersrc='scripts/testbuild_docker.py',
            workerdest=('testbuild_docker.py'),
            hideStepIf=success
        )
    )
    # Download Dockerfile_test script from master
    f.addStep(
        FileDownload(
            name=job_name+'-grab-script',
            mastersrc='docker_components/Dockerfile_test',
            workerdest=('Dockerfile_test'),
            hideStepIf=success
        )
    )
    # Download docker-compose.py script from master
    f.addStep(
        FileDownload(
            name=job_name+'-grab-script',
            mastersrc='docker_components/docker-compose-test.yaml',
            workerdest=('docker-compose-test.yaml'),
            hideStepIf=success
        )
    )

    f.addStep(
        FileDownload(
            name=job_name+'-grab-script',
            mastersrc='docker_components/rosdep_private.yaml',
            workerdest=('rosdep_private.yaml'),
            hideStepIf=success
        )
    )

    f.addStep(
        FileDownload(
            name=job_name+'-grab-script',
            mastersrc='scripts/docker-container.py',
            workerdest=('docker-container.py'),
            hideStepIf=success
        )
    )

    # create docker work environment
    f.addStep(
        ShellCommand(
            command=['python','docker-container.py', job_name],
            hideStepIf=success,
            workdir=Interpolate('%(prop:builddir)s/build/')
        )
    )

    # Make and run tests in a docker container
    f.addStep(
        ShellCommand(
            name=job_name+'-build',
            command=['docker', 'run', 
                    '-v',  'ros-buildbot-docker_deb_repository:/home/package',
                    '--name='+project_name,
                    'scalable-env:'+job_name,
                     'python', '/tmp/build/testbuild_docker.py', binddir,
                    rosdistro],
            descriptionDone=['make and test', job_name]
        )
    )

    f.addStep(
        ShellCommand(
            name=job_name+'-copytestresults',
            command=['docker', 'cp', project_name + ':' +binddir + '/testresults',
                     'testresults'],
            logfiles={'tests': 'testresults'},
            descriptionDone=['testresults', job_name]
        )
    )

    f.addStep(
        ShellCommand(
            name=job_name+'-rm_container',
            command=['docker', 'rm', project_name],
            descriptionDone=['remove docker container', job_name]
        )
    )

    f.addStep(
        ShellCommand(
            name=job_name+'-rm_image',
            command=['docker', 'image', 'rm', 'scalable-env:'+job_name],
            descriptionDone=['remove docker image', job_name]
        )
    )

    c['builders'].append(
        BuilderConfig(
            name=project_name,
            workernames=machines,
            factory=f,
            locks=locks
        )
    )
    # return the name of the job created
    return project_name