def trigger(self, job_or_jobs):
     """Adds a build step which triggers execution of another job."""
     if type(job_or_jobs) is list:
         self.add_step(Trigger(
             schedulerNames=[scheduler_name(j, 'trigger') for j in job_or_jobs],
             waitForFinish=True))
     else:
         self.add_step(Trigger(
             schedulerNames=[scheduler_name(job_or_jobs, 'trigger')],
             waitForFinish=True))
Example #2
0
def make_dolphin_debian_build(mode="normal"):
    f = BuildFactory()

    mode = mode.split(",")

    debug = "debug" in mode
    pr = "pr" in mode
    fifoci_golden = "fifoci_golden" in mode

    f.addStep(
        GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git",
                    progress=True,
                    mode="incremental"))

    f.addStep(
        ShellCommand(command=["mkdir", "-p", "build"],
                     logEnviron=False,
                     description="mkbuilddir",
                     descriptionDone="mkbuilddir"))

    cmake_cmd = ["cmake", "..", "-GNinja"]
    if debug:
        cmake_cmd.append("-DFASTLOG=ON")
    cmake_cmd.append("-DDISTRIBUTOR=dolphin-emu.org")
    f.addStep(
        ShellCommand(command=cmake_cmd,
                     workdir="build/build",
                     description="configuring",
                     descriptionDone="configure",
                     haltOnFailure=True))

    f.addStep(
        Compile(command=["ninja"],
                workdir="build/build",
                description="building",
                descriptionDone="build",
                haltOnFailure=True))

    f.addStep(
        Test(command=["ninja", "unittests"],
             workdir="build/build",
             description="testing",
             descriptionDone="test",
             haltOnFailure=True))

    if fifoci_golden:
        if pr:
            f.addStep(
                Trigger(schedulerNames=["pr-fifoci-lin"],
                        copy_properties=[
                            "pr_id", "repo", "headrev", "branchname",
                            "shortrev"
                        ]))
        else:
            f.addStep(
                TriggerIfBranch(schedulerNames=["fifoci-lin"],
                                branchList=["master"],
                                copy_properties=["shortrev"]))
    return f
Example #3
0
def test_get_last_step_build_requests_return_brids():
    step = steps.ReduceTriggerProperties(reducefn=lambda: None)
    trigger = Trigger(waitForFinish=True, schedulerNames=["NA"])
    trigger.brids = object()

    class FakeBuild:
        executedSteps = [trigger, step]

    step.build = FakeBuild()

    assert step.get_last_step_build_requests() is trigger.brids
Example #4
0
def build_triggerer(c, distro, arch, rosdistro, machines, ordered_repos):
    f = BuildFactory()
    for repos in ordered_repos:
        f.addStep(
            Trigger(schedulerNames=[
                t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' +
                arch + '-debtrigger' for t in repos
            ],
                    waitForFinish=True,
                    alwaysRun=True))
    # Add to builders
    c['builders'].append(
        BuilderConfig(name='build_triggerer' + '_' + rosdistro + '_' + distro +
                      '_' + arch,
                      slavenames=machines,
                      factory=f))
    return 'build_triggerer' + '_' + rosdistro + '_' + distro + '_' + arch
    def _add_step_sequential_group(self, step):
        """
        Run all builders from group one after another.
        """
        set_properties = step.get('set_properties', {})
        copy_properties = step.get('copy_properties', [])
        self._update_github_status(step, set_properties)

        target_group = step['target']
        for target in self._project.getGroupMembersBuilderNames(target_group):
            step = Trigger(
                schedulerNames=[target],
                waitForFinish=True,
                updateSourceStamp=True,
                set_properties=set_properties,
                copy_properties=copy_properties,
            )
            self.addStep(step)
    def _add_step_parallel_group(self, step):
        """
        Run all builders from group in parallel.
        """
        set_properties = step.get('set_properties', {})
        copy_properties = step.get('copy_properties', [])
        self._update_github_status(step, set_properties)

        target_group = step['target']
        targets = self._project.getGroupMembersBuilderNames(target_group)
        self.addStep(
            Trigger(
                schedulerNames=targets,
                waitForFinish=True,
                updateSourceStamp=True,
                set_properties=set_properties,
                copy_properties=copy_properties,
                haltOnFailure=True,
                flunkOnFailure=True,
            ))
    def __init__(self, target_builder_names, steps):
        super(ParallelFactory, self).__init__()

        copy_properties = ['test']
        for step in steps:
            name = step.get('name', None)
            if not name:
                continue
            optional = step.get('optional', False)
            if optional:
                copy_properties.append('force_' + name)

        self.addStep(
            Trigger(
                schedulerNames=target_builder_names,
                waitForFinish=True,
                updateSourceStamp=True,
                set_properties={},
                copy_properties=copy_properties,
                haltOnFailure=True,
                flunkOnFailure=True,
            ))
Example #8
0
builders.append(MyBuilderConfig(
    name = "System_Update",
    factory = system_update_factory
))

# JoCoCo

# TODO: IBM JDK, different Maven versions
jacoco_its_factory = BuildFactory(steps = [
    SVN(svnurl = "http://eclemma.svn.sourceforge.net/svnroot/eclemma/jacoco/trunk"),
    Maven(description = "jdk 1.5", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.5"),
    Maven(description = "jdk 1.6", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.6"),
    Maven(description = "jdk 1.7", command = "mvn -V -e --file org.jacoco.build/pom.xml clean install -Djdk.version=1.7"),
    TreeSize(),
    Trigger(schedulerNames = ['JaCoCo_Deploy'])
])

# TODO: site at SourceForge
jacoco_deploy_factory = BuildFactory(steps = [
    SVN(svnurl = "http://eclemma.svn.sourceforge.net/svnroot/eclemma/jacoco/trunk"),
    Maven(description = "deploy", command = "mvn -V -e --file org.jacoco.build/pom.xml clean deploy -Djdk.version=1.5"),
    ShellCommand(command = "scp org.jacoco.doc/target/jacoco-*.zip mandrikov,[email protected]:/home/frs/project/e/ec/eclemma/07_JaCoCo/trunk"),
    TreeSize()
])

builders.append(MyBuilderConfig(
    name = "JaCoCo_ITs_Linux",
    factory = jacoco_its_factory
))
Example #9
0
def makeHomebrewRecipeCreationFactory():
    """Create the Homebrew recipe from a source distribution.

    This is separate to the recipe testing, to allow it to be done on a
    non-Mac platform.  Once complete, this triggers the Mac testing.
    """
    factory = getFlockerFactory(python="python2.7")
    factory.addSteps(installDependencies())
    factory.addSteps(check_version())

    # Create suitable names for files hosted on Buildbot master.

    sdist_file = Interpolate('Flocker-%(prop:version)s.tar.gz')
    sdist_path = resultPath('python', discriminator=sdist_file)
    sdist_url = resultURL('python', discriminator=sdist_file, isAbsolute=True)

    recipe_file = Interpolate('Flocker%(kw:revision)s.rb',
                              revision=flockerRevision)
    recipe_path = resultPath('homebrew', discriminator=recipe_file)
    recipe_url = resultURL('homebrew', discriminator=recipe_file)

    # Build source distribution
    factory.addStep(
        ShellCommand(name='build-sdist',
                     description=["building", "sdist"],
                     descriptionDone=["build", "sdist"],
                     command=[
                         virtualenvBinary('python'),
                         "setup.py",
                         "sdist",
                     ],
                     haltOnFailure=True))

    # Upload source distribution to master
    factory.addStep(
        FileUpload(
            name='upload-sdist',
            slavesrc=Interpolate('dist/Flocker-%(prop:version)s.tar.gz'),
            masterdest=sdist_path,
            url=sdist_url,
        ))

    # Build Homebrew recipe from source distribution URL
    factory.addStep(
        ShellCommand(
            name='make-homebrew-recipe',
            description=["building", "recipe"],
            descriptionDone=["build", "recipe"],
            command=[
                virtualenvBinary('python'),
                "-m",
                "admin.homebrew",
                # We use the Git commit SHA for the version here, since
                # admin.homebrew doesn't handle the version generated by
                # arbitrary commits.
                "--flocker-version",
                flockerRevision,
                "--sdist",
                sdist_url,
                "--output-file",
                recipe_file
            ],
            haltOnFailure=True))

    # Upload new .rb file to BuildBot master
    factory.addStep(
        FileUpload(
            name='upload-homebrew-recipe',
            slavesrc=recipe_file,
            masterdest=recipe_path,
            url=recipe_url,
        ))

    # Trigger the homebrew-test build
    factory.addStep(
        Trigger(
            name='trigger/created-homebrew',
            schedulerNames=['trigger/created-homebrew'],
            set_properties={
                # lint_revision is the commit that was merged against,
                # if we merged forward, so have the triggered build
                # merge against it as well.
                'merge_target': Property('lint_revision')
            },
            updateSourceStamp=True,
            waitForFinish=False,
        ))

    return factory
Example #10
0
def ros_debbuild(c,
                 job_name,
                 packages,
                 url,
                 distro,
                 arch,
                 rosdistro,
                 version,
                 machines,
                 othermirror,
                 keys,
                 trigger_pkgs=None,
                 locks=[]):
    gbp_args = [
        '-uc', '-us', '--git-ignore-branch', '--git-ignore-new',
        '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch
    ]
    f = BuildFactory()
    # Remove the build directory.
    f.addStep(
        RemoveDirectory(
            name=job_name + '-clean',
            dir=Interpolate('%(prop:builddir)s'),
            hideStepIf=success,
        ))

    # Check out the repository master branch, since releases are tagged and not branched
    f.addStep(
        Git(
            repourl=url,
            branch='master',
            alwaysUseLatest=
            True,  # this avoids broken builds when schedulers send wrong tag/rev
            mode='full'  # clean out old versions
        ))

    # Need to build each package in order
    for package in packages:
        debian_pkg = 'ros-' + rosdistro + '-' + package.replace(
            '_', '-')  # debian package name (ros-groovy-foo)
        branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro  # release branch from bloom debian/ros-groovy-foo_0.0.1_kinetic
        deb_name = debian_pkg + '_%(prop:release_version)s' + distro
        final_name = debian_pkg + '_%(prop:release_version)s' + distro + '_' + arch + '.deb'
        #        final_name = debian_pkg+'_%(prop:release_version)s-%(prop:datestamp)s'+distro+'_'+arch+'.deb'
        # Check out the proper tag. Use --force to delete changes from previous deb stamping
        f.addStep(
            ShellCommand(haltOnFailure=True,
                         name=package + '-checkout',
                         command=[
                             'git', 'checkout',
                             Interpolate(branch_name), '--force'
                         ],
                         hideStepIf=success))
        # Download script for building the source deb
        f.addStep(
            FileDownload(name=job_name + '-grab-docker-compose-debian',
                         mastersrc='docker_components/docker-compose-deb.yaml',
                         workerdest=Interpolate(
                             '%(prop:builddir)s/docker-compose-deb.yaml'),
                         mode=0o755,
                         hideStepIf=success))

        f.addStep(
            FileDownload(
                name=job_name + '-grab-dockerfile-debian',
                mastersrc='docker_components/Dockerfile_deb',
                workerdest=Interpolate('%(prop:builddir)s/Dockerfile_deb'),
                mode=0o755,
                hideStepIf=success))

        f.addStep(
            FileDownload(
                name=job_name + '-grab-build-deb-shell',
                mastersrc='shell/builddebian.sh',
                workerdest=Interpolate('%(prop:builddir)s/builddebian.sh'),
                mode=0o755,
                hideStepIf=success))

        f.addStep(
            FileDownload(name=job_name + '-grab-rosdep-private',
                         mastersrc='docker_components/rosdep_private.yaml',
                         workerdest=Interpolate(
                             '%(prop:builddir)s/rosdep_private.yaml'),
                         mode=0o755,
                         hideStepIf=success))

        f.addStep(
            FileDownload(name=job_name + '-grab-rosdep-private',
                         mastersrc='scripts/unique_docker_deb.py',
                         workerdest=Interpolate(
                             '%(prop:builddir)s/unique_docker_deb.py'),
                         mode=0o755,
                         hideStepIf=success))

        # reedit docker-compose-deb.yaml
        f.addStep(
            ShellCommand(
                haltOnFailure=True,
                name=package + '-reedit-docker-compose',
                command=[
                    'python', 'unique_docker_deb.py',
                    Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'),
                    Interpolate(package)
                ],
                workdir=Interpolate('%(prop:builddir)s'),
                descriptionDone=['reedit docker-compose', package]))

        # Build docker image for creating debian
        f.addStep(
            ShellCommand(
                #haltOnFailure = True,
                name=package + '-buildsource',
                command=[
                    'docker-compose', '-f',
                    Interpolate('%(prop:builddir)s/docker-compose-deb.yaml'),
                    'build'
                ],
                workdir=Interpolate('%(prop:builddir)s'),
                descriptionDone=['sourcedeb', package]))

        # build debian package
        f.addStep(
            ShellCommand(
                #haltOnFailure=True,
                name=job_name + '-build',
                command=[
                    'docker', 'run', '-v',
                    'ros-buildbot-docker_deb_repository:/home/package',
                    '--name',
                    Interpolate(package),
                    Interpolate('scalable-deb:' + package), 'bash',
                    '/usr/local/sbin/builddeb.sh'
                ],
                descriptionDone=['build debian package', job_name]))

        # update to local repository
        f.addStep(
            ShellCommand(name=job_name + '-upload',
                         command=[
                             'docker', 'exec', '-e',
                             Interpolate('package=' + debian_pkg + '*'),
                             'local-repository', 'bash',
                             '/tmp/debian-upload.sh'
                         ],
                         descriptionDone=['release package', job_name]))

        # rm container
        f.addStep(
            ShellCommand(name=job_name + '-rm_container',
                         command=['docker', 'rm',
                                  Interpolate(package)],
                         descriptionDone=['remove docker container',
                                          job_name]))

        # rm image
        f.addStep(
            ShellCommand(name=job_name + '-rm_image',
                         command=[
                             'docker', 'image', 'rm',
                             Interpolate('scalable-deb:' + package)
                         ],
                         descriptionDone=['remove docker image', job_name]))

    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(schedulerNames=[
                t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' +
                arch + '-debtrigger' for t in trigger_pkgs
            ],
                    waitForFinish=False,
                    alwaysRun=True))
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(
            name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro +
            '-' + arch + '-debtrigger',
            builderNames=[
                job_name + '_' + rosdistro + '_' + distro + '_' + arch +
                '_debbuild',
            ]))
    # Add to builders
    c['builders'].append(
        BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' +
                      arch + '_debbuild',
                      properties={'release_version': version},
                      workernames=machines,
                      factory=f,
                      locks=locks))
    # return name of builder created
    return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
Example #11
0
def make_dolphin_win_build(build_type, mode="normal"):
    f = BuildFactory()

    mode = mode.split(",")
    normal = "normal" in mode
    debug = "debug" in mode
    wip = "wip" in mode
    pr = "pr" in mode
    fifoci_golden = "fifoci_golden" in mode

    f.addStep(
        GitNoBranch(repourl="https://github.com/dolphin-emu/dolphin.git",
                    progress=True,
                    mode="incremental"))
    f.addStep(RemoveDirectory(dir="build/Binary"))

    branch = WithProperties("%s", "branchname")
    env = {"DOLPHIN_BRANCH": branch, "DOLPHIN_DISTRIBUTOR": "dolphin-emu.org"}
    if normal:
        env["DOLPHIN_DEFAULT_UPDATE_TRACK"] = "beta"
    f.addStep(
        Compile(command=[
            "msbuild.exe", "/v:m", "/p:Platform=x64",
            "/p:Configuration=%s" % build_type, "dolphin-emu.sln"
        ],
                env=env,
                workdir="build/Source",
                description="building",
                descriptionDone="build",
                haltOnFailure=True))
    f.addStep(
        Test(command=[
            "msbuild.exe", "/v:m", "/p:Platform=x64",
            "/p:Configuration=%s" % build_type, "/p:RunUnitTests=true",
            "dolphin-emu.sln"
        ],
             env=env,
             workdir="build/Source",
             description="testing",
             descriptionDone="test",
             haltOnFailure=True))

    dolphin_name = "DolphinD" if debug else "Dolphin"

    f.addStep(
        ShellCommand(command=[
            "C:\\buildbot\\signbin.bat",
            "Binary\\x64\\%s.exe" % dolphin_name
        ],
                     logEnviron=False,
                     description="signing binary",
                     descriptionDone="sign binary"))

    f.addStep(
        ShellCommand(
            command=["xcopy", "Binary\\x64", "Dolphin-x64", "/S", "/I", "/Y"],
            logEnviron=False,
            description="copying output",
            descriptionDone="output copy"))

    out_filename = WithProperties("Dolphin-%s-%s-x64.7z", "branchname",
                                  "shortrev")
    f.addStep(
        ShellCommand(command=["7z", "a", "-r", out_filename, "Dolphin-x64"],
                     logEnviron=False,
                     description="compressing",
                     descriptionDone="compression"))

    if debug:
        fn_arch = "dbg-x64"
    else:
        fn_arch = "x64"

    if "normal" in mode:
        master_filename = WithProperties(
            "/srv/http/dl/builds/dolphin-%%s-%%s-%s.7z" % fn_arch,
            "branchname", "shortrev")
        url = WithProperties(
            "https://dl.dolphin-emu.org/builds/dolphin-%%s-%%s-%s.7z" %
            fn_arch, "branchname", "shortrev")
    elif wip:
        master_filename = WithProperties(
            "/srv/http/dl/wips/%%s-dolphin-%%s-%%s-%s.7z" % fn_arch, "author",
            "branchname", "shortrev")
        url = WithProperties(
            "https://dl.dolphin-emu.org/wips/%%s-dolphin-%%s-%%s-%s.7z" %
            fn_arch, "author", "branchname", "shortrev")
    elif pr:
        master_filename = WithProperties(
            "/srv/http/dl/prs/%%s-dolphin-latest-%s.7z" % fn_arch,
            "branchname")
        url = WithProperties(
            "https://dl.dolphin-emu.org/prs/%%s-dolphin-latest-%s.7z" %
            fn_arch, "branchname")
    else:
        master_filename = url = ""

    f.addStep(SetProperty(property="build_url", value=url))

    if master_filename and url:
        f.addStep(
            FileUpload(workersrc=out_filename,
                       masterdest=master_filename,
                       url=url,
                       keepstamp=True,
                       mode=0o644))

    if fifoci_golden:
        if pr:
            f.addStep(
                Trigger(schedulerNames=["pr-fifoci-win"],
                        copy_properties=[
                            "pr_id", "headrev", "branchname", "shortrev",
                            "build_url"
                        ]))
        else:
            f.addStep(
                TriggerIfBranch(schedulerNames=["fifoci-win"],
                                branchList=["master"],
                                copy_properties=["shortrev", "build_url"]))

    if "normal" in mode and "debug" not in mode:
        f.addStep(
            MasterShellCommand(
                command=
                "/home/buildbot/venv/bin/python /home/buildbot/bin/send_build.py",
                env={
                    "BRANCH": WithProperties("%s", "branchname"),
                    "SHORTREV": WithProperties("%s", "shortrev"),
                    "HASH": WithProperties("%s", "revision"),
                    "AUTHOR": WithProperties("%s", "author"),
                    "DESCRIPTION": WithProperties("%s", "description"),
                    "TARGET_SYSTEM": "Windows x64",
                    "USER_OS_MATCHER": "win",
                    "BUILD_URL": url,
                },
                description="notifying website",
                descriptionDone="website notice"))

        f.addStep(
            MasterShellCommand(command=[
                "/home/buildbot/venv/bin/python",
                "/home/buildbot/bin/make_manifest.py", "--input",
                master_filename, "--version_hash",
                WithProperties("%s", "revision"), "--output-manifest-store",
                "/data/nas/update/manifest", "--output-content-store",
                "/data/nas/update/content", "--signing-key",
                "/home/buildbot/update.signing.key"
            ],
                               description="writing update manifest",
                               descriptionDone="update manifest write"))

    f.addStep(
        ShellCommand(command=["del", "/F", "/S", "/Q", out_filename],
                     logEnviron=False,
                     description="cleaning up files",
                     descriptionDone="cleanup files"))

    f.addStep(
        ShellCommand(command=["rmdir", "/S", "/Q", "Dolphin-x64"],
                     logEnviron=False,
                     description="cleaning up dirs",
                     descriptionDone="cleanup dirs"))

    return f
Example #12
0
    def getGlobalBuilders(self):
        ret = list()

        f = factory.BuildFactory()
        f.useProgress = False
        f.addStep(
            Git(
                mode="incremental",
                workdir=".",
                repourl=self.giturl,
                branch=self.branch,
                locks=[self.lock_src.access("exclusive")],
            ))
        if len(self.PATCHES):
            f.addStep(
                steps.Patch(
                    patches=self.PATCHES,
                    workdir=".",
                    locks=[self.lock_src.access("exclusive")],
                ))
        if self.nightly is not None:
            # Trigger nightly scheduler to let it know the source stamp
            f.addStep(
                Trigger(name="Updating source stamp",
                        hideStepIf=(lambda r, s: r == results.SUCCESS),
                        schedulerNames=["nightly-{0}".format(self.name)]))
        f.addStep(
            Trigger(name="Building all platforms",
                    schedulerNames=[self.name],
                    copy_properties=['got_revision', 'clean', 'package'],
                    updateSourceStamp=True,
                    waitForFinish=True))

        ret.append(
            BuilderConfig(
                name="fetch-{0}".format(self.name),
                # This is specific
                workername='fetcher',
                workerbuilddir="/data/src/{0}".format(self.name),
                factory=f,
                tags=["fetch"],
            ))

        if self.nightly is not None:
            f = factory.BuildFactory()
            f.addStep(
                Trigger(schedulerNames=[self.name],
                        copy_properties=['got_revision'],
                        updateSourceStamp=True,
                        waitForFinish=True,
                        set_properties={
                            'clean': True,
                            'package': True
                        }))

            ret.append(
                BuilderConfig(
                    name="nightly-{0}".format(self.name),
                    # TODO: Fix this
                    workername='fetcher',
                    workerbuilddir="/data/triggers/nightly-{0}".format(
                        self.name),
                    factory=f,
                    tags=["nightly"],
                    locks=[self.lock_src.access("counting")]))

        return ret
Example #13
0
def ros_docbuild(c, job_name, url, branch, distro, arch, rosdistro, machines, othermirror, keys, trigger_pkgs = None):

    # Directory which will be bind-mounted
    binddir = '/tmp/'+job_name+'_'+rosdistro+'_docbuild'

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(
        ShellCommand(
            command = ['rm', '-rf', binddir],
            hideStepIf = success
        )
    )
    # Check out repository (to /tmp)
    f.addStep(
        Git(
            repourl = url,
            branch = branch,
            alwaysUseLatest = True,
            mode = 'full',
            workdir = binddir+'/src/'+job_name+'/'
        )
    )
    # Download testbuild.py script from master
    f.addStep(
        FileDownload(
            name = job_name+'-grab-script',
            mastersrc = 'scripts/docbuild.py',
            slavedest = Interpolate('%(prop:workdir)s/docbuild.py'),
            hideStepIf = success
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Build docs in a cowbuilder
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = job_name+'-docbuild',
            command = ['cowbuilder', '--execute', Interpolate('%(prop:workdir)s/docbuild.py'),
                       '--distribution', distro, '--architecture', arch,
                       '--bindmounts', binddir,
                       '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow',
                       '--override-config', '--othermirror', othermirror,
                       '--', binddir, rosdistro],
            env = {'DIST': distro},
            descriptionDone = ['built docs', ]
        )
    )
    # Upload docs to master
    f.addStep(
        DirectoryUpload(
            name = job_name+'-upload',
            slavesrc = binddir+'/docs',
            masterdest = 'docs/' + rosdistro,
            hideStepIf = success
        )
    )
    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(
                schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-doctrigger' for t in trigger_pkgs],
                waitForFinish = False,
                alwaysRun=True
            )
        )
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(
            name = job_name.replace('_','-')+'-'+rosdistro+'-doctrigger',
            builderNames = [job_name+'_'+rosdistro+'_docbuild',]
        )
    )
    # Add builder config
    c['builders'].append(
        BuilderConfig(
            name = job_name+'_'+rosdistro+'_docbuild',
            slavenames = machines,
            factory = f
        )
    )
    # return the name of the job created
    return job_name+'_'+rosdistro+'_docbuild'
Example #14
0
def trigger(**kwargs):
    waitForFinish = kwargs.pop("waitForFinish", True)
    return Trigger(waitForFinish=waitForFinish, **kwargs)
Example #15
0
                        logEnviron=False)
step_configure = Configure(command=["./configure"], logEnviron=False)
step_configure_64 = Configure(command=["./configure", "--enable-64bit"],
                              logEnviron=False)
step_compile_all = Compile(command=["make", "clean", "all"], logEnviron=False)
step_compile_txt = Compile(command=["make", "clean", "txt"],
                           description="compiling txt",
                           descriptionDone="compile txt",
                           logEnviron=False)
step_compile_sql = Compile(command=["make", "clean", "sql"],
                           description="compiling sql",
                           descriptionDone="compile sql",
                           logEnviron=False)
step_compile_VS10 = Compile(
    command=["devenv.com", "eAthena-10.sln", "/REBUILD"], logEnviron=False)
step_trigger_tests = Trigger(
    waitForFinish=True, schedulerNames=["test-Ubuntu-12.04-x64-scheduler"])
step_test_txt = Test(command=[
    "gdb", "map-server", "-ex=run --run-once", "-ex=bt full", "-ex=kill",
    "-ex=quit"
],
                     warningPattern="\[(Error|Warning)\]",
                     description="testing txt",
                     descriptionDone="test txt",
                     logEnviron=False)
step_test_sql = Test(command=[
    "gdb", "map-server_sql", "-ex=run --run-once", "-ex=bt full", "-ex=kill",
    "-ex=quit"
],
                     warningPattern="\[(Error|Warning)\]",
                     description="testing sql",
                     descriptionDone="test sql",
Example #16
0
def ros_debbuild(c,
                 job_name,
                 packages,
                 url,
                 distro,
                 arch,
                 rosdistro,
                 version,
                 machines,
                 othermirror,
                 keys,
                 trigger_pkgs=None):
    gbp_args = [
        '-uc', '-us', '--git-ignore-branch', '--git-ignore-new',
        '--git-verbose', '--git-dist=' + distro, '--git-arch=' + arch
    ]
    f = BuildFactory()
    # Remove the build directory.
    f.addStep(
        RemoveDirectory(
            name=job_name + '-clean',
            dir=Interpolate('%(prop:workdir)s'),
            hideStepIf=success,
        ))
    # Check out the repository master branch, since releases are tagged and not branched
    f.addStep(
        Git(
            repourl=url,
            branch='master',
            alwaysUseLatest=
            True,  # this avoids broken builds when schedulers send wrong tag/rev
            mode='full'  # clean out old versions
        ))
    # Update the cowbuilder
    f.addStep(
        ShellCommand(command=['cowbuilder-update.py', distro, arch] + keys,
                     hideStepIf=success))
    # Need to build each package in order
    for package in packages:
        debian_pkg = 'ros-' + rosdistro + '-' + package.replace(
            '_', '-')  # debian package name (ros-groovy-foo)
        branch_name = 'debian/' + debian_pkg + '_%(prop:release_version)s_' + distro  # release branch from bloom
        deb_name = debian_pkg + '_%(prop:release_version)s' + distro
        final_name = debian_pkg + '_%(prop:release_version)s-%(prop:datestamp)s' + distro + '_' + arch + '.deb'
        # Check out the proper tag. Use --force to delete changes from previous deb stamping
        f.addStep(
            ShellCommand(haltOnFailure=True,
                         name=package + '-checkout',
                         command=[
                             'git', 'checkout',
                             Interpolate(branch_name), '--force'
                         ],
                         hideStepIf=success))
        # Build the source deb
        f.addStep(
            ShellCommand(haltOnFailure=True,
                         name=package + '-buildsource',
                         command=['git-buildpackage', '-S'] + gbp_args,
                         descriptionDone=['sourcedeb', package]))
        # Upload sourcedeb to master (currently we are not actually syncing these with a public repo)
        f.addStep(
            FileUpload(
                name=package + '-uploadsource',
                slavesrc=Interpolate('%(prop:workdir)s/' + deb_name + '.dsc'),
                masterdest=Interpolate('sourcedebs/' + deb_name + '.dsc'),
                hideStepIf=success))
        # Stamp the changelog, in a similar fashion to the ROS buildfarm
        f.addStep(
            SetPropertyFromCommand(command="date +%Y%m%d-%H%M-%z",
                                   property="datestamp",
                                   name=package + '-getstamp',
                                   hideStepIf=success))
        f.addStep(
            ShellCommand(
                haltOnFailure=True,
                name=package + '-stampdeb',
                command=[
                    'git-dch', '-a', '--ignore-branch', '--verbose', '-N',
                    Interpolate('%(prop:release_version)s-%(prop:datestamp)s' +
                                distro)
                ],
                descriptionDone=[
                    'stamped changelog',
                    Interpolate('%(prop:release_version)s'),
                    Interpolate('%(prop:datestamp)s')
                ]))
        # download hooks
        f.addStep(
            FileDownload(
                name=package + '-grab-hooks',
                mastersrc='hooks/D05deps',
                slavedest=Interpolate('%(prop:workdir)s/hooks/D05deps'),
                hideStepIf=success,
                mode=0777  # make this executable for the cowbuilder
            ))
        # build the binary from the git working copy
        f.addStep(
            ShellCommand(
                haltOnFailure=True,
                name=package + '-buildbinary',
                command=[
                    'git-buildpackage', '--git-pbuilder', '--git-export=WC',
                    Interpolate('--git-export-dir=%(prop:workdir)s')
                ] + gbp_args,
                env={
                    'DIST':
                    distro,
                    'GIT_PBUILDER_OPTIONS':
                    Interpolate(
                        '--hookdir %(prop:workdir)s/hooks --override-config'),
                    'OTHERMIRROR':
                    othermirror
                },
                descriptionDone=['binarydeb', package]))
        # Upload binarydeb to master
        f.addStep(
            FileUpload(name=package + '-uploadbinary',
                       slavesrc=Interpolate('%(prop:workdir)s/' + final_name),
                       masterdest=Interpolate('binarydebs/' + final_name),
                       hideStepIf=success))
        # Add the binarydeb using reprepro updater script on master
        f.addStep(
            MasterShellCommand(name=package + 'includedeb',
                               command=[
                                   'reprepro-include.bash', debian_pkg,
                                   Interpolate(final_name), distro, arch
                               ],
                               descriptionDone=['updated in apt', package]))
    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(schedulerNames=[
                t.replace('_', '-') + '-' + rosdistro + '-' + distro + '-' +
                arch + '-debtrigger' for t in trigger_pkgs
            ],
                    waitForFinish=False,
                    alwaysRun=True))
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(
            name=job_name.replace('_', '-') + '-' + rosdistro + '-' + distro +
            '-' + arch + '-debtrigger',
            builderNames=[
                job_name + '_' + rosdistro + '_' + distro + '_' + arch +
                '_debbuild',
            ]))
    # Add to builders
    c['builders'].append(
        BuilderConfig(name=job_name + '_' + rosdistro + '_' + distro + '_' +
                      arch + '_debbuild',
                      properties={'release_version': version},
                      slavenames=machines,
                      factory=f))
    # return name of builder created
    return job_name + '_' + rosdistro + '_' + distro + '_' + arch + '_debbuild'
Example #17
0
def createTarballFactory(gerrit_repo):
    """ Generates a build factory for a tarball generating builder.
    Returns:
        BuildFactory: Build factory with steps for generating tarballs.
    """
    bf = util.BuildFactory()

    # are we building a tag or a patchset?
    bf.addStep(SetProperty(
        property='category',
        value=buildCategory, 
        hideStepIf=hide_except_error))

    # update dependencies
    bf.addStep(ShellCommand(
        command=dependencyCommand,
        decodeRC={0 : SUCCESS, 1 : FAILURE, 2 : WARNINGS, 3 : SKIPPED },
        haltOnFailure=True,
        logEnviron=False,
        doStepIf=do_step_installdeps,
        hideStepIf=hide_if_skipped,
        description=["installing dependencies"],
        descriptionDone=["installed dependencies"]))

    # Pull the patch from Gerrit
    bf.addStep(Gerrit(
        repourl=gerrit_repo,
        workdir="build/lustre",
        mode="full",
        method="fresh",
        retry=[60,60],
        timeout=3600,
        logEnviron=False,
        getDescription=True,
        haltOnFailure=True,
        description=["cloning"],
        descriptionDone=["cloned"]))

    # make tarball
    bf.addStep(ShellCommand(
        command=['sh', './autogen.sh'],
        haltOnFailure=True,
        description=["autogen"],
        descriptionDone=["autogen"],
        workdir="build/lustre"))

    bf.addStep(Configure(
        command=['./configure', '--enable-dist'],
        workdir="build/lustre"))

    bf.addStep(ShellCommand(
        command=['make', 'dist'],
        haltOnFailure=True,
        description=["making dist"],
        descriptionDone=["make dist"],
        workdir="build/lustre"))

    # upload it to the master
    bf.addStep(SetPropertyFromCommand(
        command=['sh', '-c', 'echo *.tar.gz'],
        property='tarball',
        workdir="build/lustre",
        hideStepIf=hide_except_error,
        haltOnFailure=True))

    bf.addStep(FileUpload(
        workdir="build/lustre",
        slavesrc=util.Interpolate("%(prop:tarball)s"),
        masterdest=tarballMasterDest,
        url=tarballUrl))

    # trigger our builders to generate packages
    bf.addStep(Trigger(
        schedulerNames=["package-builders"],
        copy_properties=['tarball', 'category'],
        waitForFinish=False))

    return bf
Example #18
0
def launchpad_debbuild(c, package, version, binaries, url, distro, arch, machines, othermirror, keys, trigger_names = None):
    f = BuildFactory()
    # Grab the source package
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = package+'-getsourcedeb',
            command = ['dget', '--allow-unauthenticated', url]
        )
    )
    # download hooks
    f.addStep(
        FileDownload(
            name = package+'-grab-hooks',
            mastersrc = 'hooks/D05deps',
            slavedest = Interpolate('%(prop:workdir)s/hooks/D05deps'),
            hideStepIf = success,
            mode = 0777 # make this executable for the cowbuilder
        )
    )
    # Update the cowbuilder
    f.addStep(
        ShellCommand(
            command = ['cowbuilder-update.py', distro, arch] + keys,
            hideStepIf = success
        )
    )
    # Build it
    f.addStep(
        ShellCommand(
            haltOnFailure = True,
            name = package+'-build',
            command = ['cowbuilder',
                       '--build', package+'_'+version+'.dsc',
                       '--distribution', distro, '--architecture', arch,
                       '--basepath', '/var/cache/pbuilder/base-'+distro+'-'+arch+'.cow',
                       '--buildresult', Interpolate('%(prop:workdir)s'),
                       '--hookdir', Interpolate('%(prop:workdir)s/hooks'),
                       '--othermirror', othermirror,
                       '--override-config'],
            env = {'DIST': distro},
            descriptionDone = ['built binary debs', ]
        )
    )
    # Upload debs
    for deb_arch in binaries.keys():
        for deb_name in binaries[deb_arch]:
            debian_pkg = deb_name+'_'+version+'_'+deb_arch+'.deb'
            f.addStep(
                FileUpload(
                    name = deb_name+'-upload',
                    slavesrc = Interpolate('%(prop:workdir)s/'+debian_pkg),
                    masterdest = Interpolate('binarydebs/'+debian_pkg),
                    hideStepIf = success
                )
            )
            # Add the binarydeb using reprepro updater script on master
            f.addStep(
                MasterShellCommand(
                    name = deb_name+'-include',
                    command = ['reprepro-include.bash', deb_name, Interpolate(debian_pkg), distro, deb_arch],
                    descriptionDone = ['updated in apt', debian_pkg]
                )
            )
    # Trigger if needed
    if trigger_names != None:
        f.addStep( Trigger(schedulerNames = trigger_names, waitForFinish = False) )
    # Add to builders
    c['builders'].append(
        BuilderConfig(
            name = package+'_'+distro+'_'+arch+'_debbuild',
            slavenames = machines,
            factory = f
        )
    )
    # return name of builder created
    return package+'_'+distro+'_'+arch+'_debbuild'
Example #19
0
        # upload the tarball (to the build master)
        self.addStep(
            FileUpload(slavesrc=WithProperties("%(filename)s"),
                       masterdest=WithProperties("%(filename)s"),
                       mode=0644,
                       haltOnFailure=True))

        # tell the master to upload the file to sourceforge
        self.addStep(
            Trigger(schedulerNames=["sourceforge-upload"],
                    waitForFinish=True,
                    set_properties={
                        "masterdir": WithProperties("%(masterdir)s"),
                        "target-os": WithProperties("%(target-os)s"),
                        "filename": WithProperties("%(filename)s"),
                        "destname": WithProperties("%(destname)s"),
                        "datestamp": WithProperties("%(datestamp:-)s"),
                        "path": WithProperties("%(path:-)s"),
                        "is_nightly": WithProperties("%(is_nightly:-)s")
                    }))

    def _step_AdditionalProperties(self):
        pass

    def _step_Archive(self):
        command = [
            "tar", "cjf",
            WithProperties("../../%(filename)s"), "--owner", "0", "--group",
            "0", "--checkpoint", "--exclude=.svn", "."
        ]
Example #20
0
def getPhaseBuilderFactory(config, phase, next_phase, stages):
    from buildbot.steps.transfer import JSONPropertiesDownload
    # Create the build factory.
    f = buildbot.process.factory.BuildFactory()
    f.addStep(buildbot.steps.shell.ShellCommand(
              command=['echo', WithProperties('%(phase_id:-)s')]))
    # constuct a new phase_id if phase_id is not already set
    phaseid = WithProperties('%(get_phase_id)s',
                             get_phase_id = determine_phase_id)
    setProperty(f, 'phase_id', phaseid)
    setProperty(f, 'next_phase', next_phase)
    f.addStep(JSONPropertiesDownload(slavedest='build-properties.json'))
    f.addStep(buildbot.steps.shell.SetProperty(
                name = 'get.build.properties',
                command = ['cat', 'build-properties.json'],
                extract_fn = _extract_changelist))
    # Buildbot uses got_revision instead of revision to identify builds.
    # We set it below so that the revision shows up in the html status pages.
    setProperty(f, 'got_revision', WithProperties('%(revision)s'))
    # this generates URLs we can use to link back to the builder which
    # triggered downstream builds
    master_url = set_config_option('Master Options', 'master_url',
                                   'http://localhost')
    this_str = '/'.join([master_url, 'builders', '%(buildername)s', 'builds',
                        '%(buildnumber)s'])
    setProperty(f, 'trigger', WithProperties(this_str))
    # Properties we always copy...
    copy_properties = [ 'phase_id', 'revision', 'got_revision', 'trigger' ]
    # Add the trigger for the next phase.
    changes = WithProperties('%(forward_changes)s',
                             forward_changes = _load_changelist)
    # Add the triggers for each stage...
    for i, (normal, experimental) in enumerate(stages):
        # Add the experimental trigger, if used, but don't wait or fail for it.
        if experimental:
            scheduler = 'phase%d-stage%d-experimental' % (phase['number'], i)
            f.addStep(Trigger(name = 'trigger.%s' % scheduler,
                                   schedulerNames = [scheduler],
                                   waitForFinish = False,
                                   updateSourceStamp = False,
                                   set_properties = {
                                      'triggeredBuilders' : [b['name']
                                                             for b in normal],
                                   },
                                   copy_properties = copy_properties))
        # Add the normal build trigger, if used.
        if normal:
            scheduler = 'phase%d-stage%d' % (phase['number'], i)
            f.addStep(Trigger(name = 'trigger.%s' % scheduler,
                                   schedulerNames = [scheduler],
                                   waitForFinish = True, haltOnFailure = True,
                                   updateSourceStamp = False,
                                   set_properties = {
                                      'triggeredBuilders' : [b['name']
                                                             for b in normal],
                                   },
                                   copy_properties = copy_properties))
    f.addStep(MasterShellCommand(
        name='trigger.next_phase', haltOnFailure = True,
        command = ['./process_changelist.py', next_phase,
                   WithProperties('%(scheduler)s_changes.txt')],
        description = ['Trigger', next_phase],
        descriptionDone = ['Trigger', next_phase]))
    # We have successfully sent the changes to the next phase, so it is  now
    # safe to erase the file and 'forget' the changes passed to this phase to
    # date.
    f.addStep(MasterShellCommand(
        name='clear.changelist', haltOnFailure = True,
        command = ['rm', '-fv', WithProperties('%(scheduler)s_changes.txt')],
        description = ['Clear changelist'],
        descriptionDone = ['Clear changelist']))
    return f
Example #21
0
             descriptionDone = ['updated in apt', package]
         )
     )
     f.addStep(
         ShellCommand(
             name = package+'-clean',
             command = ['rm', '-rf', 'debian/'+debian_pkg],
             hideStepIf = success
         )
     )
 # Trigger if needed
 if trigger_pkgs != None:
     f.addStep(
         Trigger(
             schedulerNames = [t.replace('_','-')+'-'+rosdistro+'-'+distro+'-'+arch+'-debtrigger' for t in trigger_pkgs],
             waitForFinish = False,
             alwaysRun=True
         )
     )
 # Create trigger
 c['schedulers'].append(
     triggerable.Triggerable(
         name = job_name.replace('_','-')+'-'+rosdistro+'-'+distro+'-'+arch+'-debtrigger',
         builderNames = [job_name+'_'+rosdistro+'_'+distro+'_'+arch+'_debbuild',]
     )
 )
 # Add to builders
 c['builders'].append(
     BuilderConfig(
         name = job_name+'_'+rosdistro+'_'+distro+'_'+arch+'_debbuild',
         properties = {'release_version' : version},
Example #22
0
 class FakeBuild:
     executedSteps = [
         Trigger(waitForFinish=False, schedulerNames=["NA"]), step
     ]
Example #23
0
def ros_docbuild(c,
                 job_name,
                 url,
                 branch,
                 rosdistro,
                 machines,
                 trigger_pkgs=None):

    # Directory which will be bind-mounted
    binddir = job_name + '_' + rosdistro + '_docbuild'

    f = BuildFactory()
    # Remove any old crud in /tmp folder
    f.addStep(ShellCommand(command=['rm', '-rf', binddir], hideStepIf=success))
    # Check out repository (to /tmp)
    f.addStep(
        Git(repourl=url,
            branch=branch,
            alwaysUseLatest=True,
            mode='full'
            #workdir = binddir+'/src/'+job_name+'/'
            ))
    # Download  script from master
    f.addStep(
        FileDownload(name=job_name + '-grab-script',
                     mastersrc='scripts/docbuild.py',
                     workerdest=Interpolate('%(prop:builddir)s/docbuild.py'),
                     hideStepIf=success))

    f.addStep(
        FileDownload(
            name=job_name + '-grab-script',
            mastersrc='scripts/unique_docker_doc.py',
            workerdest=Interpolate('%(prop:builddir)s/unique_docker_doc.py'),
            hideStepIf=success))

    f.addStep(
        FileDownload(
            name=job_name + '-grab-script',
            mastersrc='docker_components/Dockerfile_doc',
            workerdest=Interpolate('%(prop:builddir)s/Dockerfile_doc'),
            hideStepIf=success))

    f.addStep(
        FileDownload(name=job_name + '-grab-script',
                     mastersrc='docker_components/docker-compose-doc.yaml',
                     workerdest=Interpolate(
                         '%(prop:builddir)s/docker-compose-doc.yaml'),
                     hideStepIf=success))
    # reedit docker-compose-doc.yaml
    f.addStep(
        ShellCommand(
            haltOnFailure=True,
            name=job_name + '-reedit-docker-compose',
            command=[
                'python', 'unique_docker_doc.py',
                Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'),
                Interpolate(job_name)
            ],
            workdir=Interpolate('%(prop:builddir)s'),
            descriptionDone=['reedit docker-compose', job_name]))
    # Build docker image for creating doc
    f.addStep(
        ShellCommand(
            # haltOnFailure = True,
            name=job_name + '-create_docker',
            command=[
                'docker-compose', '-f',
                Interpolate('%(prop:builddir)s/docker-compose-doc.yaml'),
                'build'
            ],
            workdir=Interpolate('%(prop:builddir)s'),
            descriptionDone=['create_doc', job_name]))

    # creating doc in docker
    f.addStep(
        ShellCommand(
            # haltOnFailure=True,
            name=job_name + '-create_doc',
            command=[
                'docker',
                'run',
                # '-v', 'ros-repository-docker_deb_repository:/home/package',
                '--name',
                Interpolate('doc_' + job_name),
                Interpolate('scalable-doc:' + job_name),
                'python',
                '/root/docbuild.py',
                '/tmp/',
                rosdistro
            ],
            descriptionDone=['create doc', job_name]))

    f.addStep(
        ShellCommand(name=job_name + '-copydocs',
                     command=[
                         'docker', 'cp',
                         Interpolate('doc_' + job_name + ':' + '/tmp/docs'),
                         '/docs'
                     ],
                     workdir=Interpolate('%(prop:builddir)s'),
                     descriptionDone=['copydocs', job_name]))

    # rm container
    f.addStep(
        ShellCommand(name=job_name + '-rm_container',
                     command=['docker', 'rm',
                              Interpolate('doc_' + job_name)],
                     descriptionDone=['remove docker container', job_name]))

    # rm image
    f.addStep(
        ShellCommand(name=job_name + '-rm_image',
                     command=[
                         'docker', 'image', 'rm',
                         Interpolate('scalable-doc:' + job_name)
                     ],
                     descriptionDone=['remove docker image', job_name]))

    # Trigger if needed
    if trigger_pkgs != None:
        f.addStep(
            Trigger(schedulerNames=[
                t.replace('_', '-') + '-' + rosdistro + '-doctrigger'
                for t in trigger_pkgs
            ],
                    waitForFinish=False,
                    alwaysRun=True))
    # Create trigger
    c['schedulers'].append(
        triggerable.Triggerable(name=job_name.replace('_', '-') + '-' +
                                rosdistro + '-doctrigger',
                                builderNames=[
                                    job_name + '_' + rosdistro + '_docbuild',
                                ]))
    # Add builder config
    c['builders'].append(
        BuilderConfig(name=job_name + '_' + rosdistro + '_docbuild',
                      workernames=machines,
                      factory=f))
    # return the name of the job created
    return job_name + '_' + rosdistro + '_docbuild'
Example #24
0
    def __init__(self, **kwargs):

        factory.BuildFactory.__init__(self, **kwargs)

        # set properties about this builder
        self.addStep(
            SetProperty(property="masterdir", command=["echo",
                                                       os.getcwd()]))
        self.addStep(
            SetProperty(property="basedir",
                        command=["bash", "-c", "builtin pwd"]))
        self.addStep(
            SetProperty(property="gmp_version",
                        command=["echo",
                                 gConfig.get("libraries", "gmp")],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("gmp_version"))))
        self.addStep(
            SetProperty(property="mpfr_version",
                        command=["echo",
                                 gConfig.get("libraries", "mpfr")],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("mpfr_version"))))
        self.addStep(
            SetProperty(property="mpc_version",
                        command=["echo",
                                 gConfig.get("libraries", "mpc")],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("mpc_version"))))
        self.addStep(
            SetProperty(property="binutils_branch",
                        command=["echo", "trunk"],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("binutils_branch"))))
        self.addStep(
            SetProperty(property="gcc_branch",
                        command=["echo", "trunk"],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("gcc_branch"))))
        self.addStep(
            SetProperty(property="mingw_branch",
                        command=["echo", "trunk"],
                        doStepIf=lambda step:
                        (not step.build.hasProperty("mingw_branch"))))
        self.addStep(
            SetProperty(property="filename",
                        command=[
                            "echo",
                            Property("src_archive",
                                     default="mingw-w64-src.tar.bz2")
                        ]))
        self.addStep(
            SetProperty(
                property="srcname_format",
                command=["echo", "mingw-w64-src%(datestamp:-)s.tar.bz2"],
                doStepIf=lambda step:
                (not step.build.hasProperty("srcname_format"))))
        #self.addStep(M64NightlyRev)

        if self.clobber:
            self.addStep(
                ShellCommand(name="clobber",
                             command=[
                                 "rm", "-rfv", "build", "src",
                                 Property("filename")
                             ],
                             haltOnFailure=False,
                             description=["clobber all"],
                             descriptionDone=["clobbered"]))

        self.addStep(
            ShellCommand(
                name="makefile-checkout",
                description=["makefile", "checkout"],
                descriptionDone=["checked out", "makefile"],
                command=[
                    "curl", "-o", "mingw-makefile",
                    "https://svn.code.sf.net/p/mingw-w64/code/experimental/buildsystem/makebuildroot.mk"
                ],
                haltOnFailure=True))

        #    self.addStep(SVN(mode="export",
        #                     svnurl="https://mingw-w64.svn.sourceforge.net/svnroot/mingw-w64/experimental/buildsystem",
        #                     extra_args=["--trust-server-cert"]))

        self.addStep(
            FileUpload(masterdest="mingw-makefile",
                       slavesrc="mingw-makefile",
                       maxsize=102400,
                       mode=0600))

        self.addStep(
            ShellCommand(
                name="patch-pull",
                command=["make", "-f", "mingw-makefile", "patch-pull"],
                description=["patches", "pull"],
                descriptionDone=["pulled", "patches"]))

        # download binutils
        self.addStep(
            Compile(name="binutils-pull",
                    description=["binutils", "pull"],
                    descriptionDone=["pulled", "binutils"],
                    command=["make", "-f", "mingw-makefile", "binutils-pull"],
                    env={
                        "BINUTILS_REVISION":
                        Property("binutils_revision", default="head"),
                        "BINUTILS_BRANCH":
                        Property("binutils_branch")
                    }))

        self.addStep(
            ShellCommand(
                name="binutils-patch",
                description=["patch", "binutils"],
                descriptionDone=["binutils", "patched"],
                doStepIf=lambda step: (step.getProperty("scheduler") != "try"),
                workdir="build/src/binutils",
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../patches/binutils/*.patch )" ] ; then
                                            for i in ../patches/binutils/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                            done ;
                                          fi""".replace("\n", " ")
                ]))

        # download gcc
        self.addStep(
            Compile(name="gcc-pull",
                    description=["gcc", "pull"],
                    descriptionDone=["pulled", "gcc"],
                    command=["make", "-f", "mingw-makefile", "gcc-pull"],
                    env={
                        "GCC_REVISION": Property("gcc_revision",
                                                 default="head"),
                        "GCC_BRANCH": Property("gcc_branch")
                    }))
        self.addStep(
            ShellCommand(
                name="gcc-patch",
                description=["patch", "gcc"],
                descriptionDone=["gcc", "patched"],
                doStepIf=lambda step: (step.getProperty("scheduler") != "try"),
                workdir="build/src/gcc/src",
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../../patches/gcc/*.patch )" ] ; then
                                            for i in ../../patches/gcc/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                            done ;
                                          fi""".replace("\n", " ")
                ]))

        # download gmp
        self.addStep(
            Compile(name="gmp-download",
                    description=["gmp", "download"],
                    descriptionDone=["downloaded", "gmp"],
                    command=["make", "-f", "mingw-makefile", "gmp-download"],
                    env={"GMP_VERSION": Property("gmp_version")}))
        self.addStep(
            Compile(name="gmp-extract",
                    description=["gmp", "extract"],
                    descriptionDone=["extracted", "gmp"],
                    command=["make", "-f", "mingw-makefile", "gmp-extract"],
                    env={"GMP_VERSION": Property("gmp_version")}))

        # Fix gmp (fails to find m4 for flex)
        self.addStep(
            ShellCommand(
                name="gmp-patch",
                workdir="build/src/gcc/src/gmp",
                description=["patch", "gmp"],
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../../../patches/gmp/*.patch )" ] ; then
                                            for i in ../../../patches/gmp/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                            done ;
                                          fi""".replace("\n", " ")
                ]))

        # download mpfr
        self.addStep(
            Compile(name="mpfr-download",
                    description=["mpfr", "download"],
                    descriptionDone=["downloaded", "mpfr"],
                    command=["make", "-f", "mingw-makefile", "mpfr-download"],
                    env={"MPFR_VERSION": Property("mpfr_version")}))

        self.addStep(
            Compile(name="mpfr-extract",
                    description=["mpfr", "extract"],
                    descriptionDone=["extracted", "mpfr"],
                    command=["make", "-f", "mingw-makefile", "mpfr-extract"],
                    env={"MPFR_VERSION": Property("mpfr_version")}))

        self.addStep(
            ShellCommand(
                name="mpfr-patch",
                description=["patch", "mpfr"],
                descriptionDone=["patched", "mpfr"],
                doStepIf=lambda step: (step.getProperty("scheduler") != "try"),
                workdir="build/src/gcc/src/mpfr",
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../../../patches/mpfr/*.patch )" ] ; then
                                            for i in ../../../patches/mpfr/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                                         done ;
                                          fi""".replace("\n", " ")
                ]))

        # download mpc
        self.addStep(
            Compile(name="mpc-download",
                    description=["mpc", "download"],
                    descriptionDone=["downloaded", "mpc"],
                    command=["make", "-f", "mingw-makefile", "mpc-download"],
                    env={"MPC_VERSION": Property("mpc_version")}))

        self.addStep(
            Compile(name="mpc-extract",
                    description=["mpc", "extract"],
                    descriptionDone=["extracted", "mpc"],
                    command=["make", "-f", "mingw-makefile", "mpc-extract"],
                    env={"MPC_VERSION": Property("mpc_version")}))

        self.addStep(
            ShellCommand(
                name="mpc-patch",
                description=["patch", "mpc"],
                descriptionDone=["patched", "mpc"],
                doStepIf=lambda step: (step.getProperty("scheduler") != "try"),
                workdir="build/src/gcc/src/mpc",
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../../../patches/mpc/*.patch )" ] ; then
                                            for i in ../../../patches/mpc/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                            done ;
                                          fi""".replace("\n", " ")
                ]))

        # download mingw-w64 crt and headers
        self.addStep(
            Compile(name="mingw-pull",
                    description=["mingw", "pull"],
                    descriptionDone=["pulled", "mingw"],
                    command=["make", "-f", "mingw-makefile", "mingw-pull"],
                    env={
                        "MINGW_REVISION":
                        Property("mingw_revision", default="head"),
                        "MINGW_BRANCH":
                        Property("mingw_branch")
                    }))

        self.addStep(
            ShellCommand(
                name="mingw-patch",
                description=["patch", "mingw"],
                descriptionDone=["patched", "mingw"],
                workdir="build/src/mingw",
                doStepIf=lambda step: (step.getProperty("scheduler") != "try"),
                command=[
                    "bash", "-c",
                    """if [ -n "$( ls ../patches/mingw/*.patch )" ] ; then
                                            for i in ../patches/mingw/*.patch ; do
                                              patch -p1 -f -i "$i" ;
                                            done ;
                                          fi""".replace("\n", " ")
                ]))

        # update the build stamp
        self.addStep(
            SubversionRevProperty(name="gcc-svnrev",
                                  workdir="build/src/gcc/src",
                                  prop_prefix="gcc_",
                                  config_dir=WithProperties("%(basedir:-.)s")))
        self.addStep(
            SubversionRevProperty(name="mingw-svnrev",
                                  workdir="build/src/mingw",
                                  prop_prefix="mingw_",
                                  config_dir=WithProperties("%(basedir:-.)s")))
        self.addStep(
            SetProperty(property="datestamp",
                        command=["date", "-u", "+_%Y%m%d"],
                        doStepIf=lambda step:
                        (not step.getProperty("datestamp") == "")))
        self.addStep(
            ShellCommand(
                name="mingw-datestamp",
                workdir="build/src/mingw/mingw-w64-crt",
                description=["writing", "buildstamp"],
                descriptionDone=["buildstamp", "written"],
                command=[
                    "bash", "-c",
                    WithProperties(
                        """echo -e '/* generated by buildbot */\n"""
                        """#define __MINGW_W64_REV "%(mingw_revision)s"\n"""
                        """#define __MINGW_W64_REV_STAMP "%(mingw_datestamp)s"\n'"""
                        """ > revstamp.h """)
                ]))

        # Set the gcc version strings if this is a formal release
        self.addStep(
            ShellCommand(
                name="release-information",
                workdir="build/src/gcc/src/gcc",
                description=["writing", "version", "string"],
                descriptionDone=["version", "string", "written"],
                doStepIf=lambda step: step.getProperty("release_build"),
                command=[
                    "bash", "-c",
                    WithProperties(
                        """echo '%(release_gcc_ver:-)s' > BASE-VER && echo > DEV-PHASE """
                    )
                ]))
        # make the tarball
        self.addStep(
            SetProperty(property="destname",
                        command=[
                            "echo",
                            WithPropertiesRecursive(
                                WithProperties("%(srcname_format)s"))
                        ]))
        self.addStep(
            Compile(name="src-package",
                    description=["tarball", "package"],
                    descriptionDone=["packaged", "tarball"],
                    command=["make", "-f", "mingw-makefile", "src-archive"],
                    env={"SRC_ARCHIVE": Property("filename")}))

        # upload the tarball to the master
        self.addStep(
            FileUpload(name="src-upload",
                       slavesrc=Property("filename"),
                       masterdest=Property("filename")))

        # trigger upload
        self.addStep(
            Trigger(
                name="src-publish",
                doStepIf=lambda step: step.build.getProperty("is_nightly"),
                schedulerNames=["sourceforge-upload"],
                waitForFinish=True,  # needed for the builders
                set_properties={
                    "masterdir": WithProperties("%(masterdir)s"),
                    "filename": WithProperties("%(filename)s"),
                    "destname": WithProperties("%(destname)s"),
                    "datestamp": WithProperties("%(datestamp:-)s"),
                    "target-os": "src",
                    "path": WithProperties("%(path:-)s"),
                    "is_nightly": WithProperties("%(is_nightly:-)s")
                }))

        # set the path that the build will be uploaded to (so the other slaves can
        # go ahead and download the source tarballs from sourceforge rather than
        # over the buildbot connection).  Note that if the "path" property is set,
        # we use that as an override instead.
        self.addStep(
            SetProperty(
                property="src_url",
                doStepIf=lambda step: step.build.getProperty("is_nightly"),
                command=[
                    "echo",
                    WithProperties(
                        "http://downloads.sourceforge.net/project/%s/%%(path:-%s)s/%%(destname)s"
                        % (gConfig.get("sourceforge", "group_id"),
                           gConfig.get("sourceforge", "path-src")))
                ]))

        # trigger building
        self.addStep(
            Trigger(name="start-build",
                    schedulerNames=[
                        "trigger-linux-x86_64-x86_64",
                        "trigger-linux-x86_64-x86", "trigger-linux-x86-x86_64",
                        "trigger-linux-x86-x86", "trigger-cygwin-x86-x86_64",
                        "trigger-cygwin-x86-x86", "trigger-mingw-x86-x86_64",
                        "trigger-mingw-x86-x86", "trigger-darwin-x86-x86_64",
                        "trigger-darwin-x86-x86"
                    ],
                    waitForFinish=False,
                    updateSourceStamp=True,
                    set_properties={
                        "is_nightly":
                        Property("is_nightly"),
                        "datestamp":
                        Property("datestamp"),
                        "binutils_branch":
                        Property("binutils_branch"),
                        "gcc_branch":
                        Property("gcc_branch"),
                        "mingw_branch":
                        Property("mingw_branch"),
                        "binutils_config_args":
                        Property("binutils_config_args", default=""),
                        "gcc_config_args":
                        Property("gcc_config_args", default=""),
                        "mingw_config_args":
                        Property("mingw_config_args", default=""),
                        "gmp_config_args":
                        Property("gmp_config_args", default=""),
                        "mpfr_config_args":
                        Property("mpfr_config_args", default=""),
                        "mpc_config_args":
                        Property("mpc_config_args", default=""),
                        "gcc_revision":
                        Property("gcc_revision"),
                        "mingw_revision":
                        Property("mingw_revision"),
                        "masterdir":
                        Property("masterdir"),
                        "path":
                        Property("path"),
                        "src_archive":
                        Property("filename"),
                        "src_url":
                        Property("src_url", default="")
                    }))