Exemplo n.º 1
0
def masterConfig():
    c = {}
    from buildbot.plugins import schedulers, steps, util

    class SleepAndInterrupt(steps.ShellSequence):
        @defer.inlineCallbacks
        def run(self):
            if self.worker.worker_system == "nt":
                sleep = "waitfor SomethingThatIsNeverHappening /t 100 >nul 2>&1"
            else:
                sleep = ["sleep", "100"]
            d = self.runShellSequence([util.ShellArg(sleep)])
            yield asyncSleep(1)
            self.interrupt("just testing")
            res = yield d
            return res

    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]

    f = util.BuildFactory()
    f.addStep(SleepAndInterrupt())
    c['builders'] = [
        util.BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]

    return c
Exemplo n.º 2
0
 def addBuilder(self, worker, factory):
     module = self.importWithReload(factory)
     self.masterConf['builders'].append(
         util.BuilderConfig(name=worker,
                            workernames=[worker],
                            factory=module.getFactory()))
     self.shedulers.addScheduler(module.getPropertyes(), worker)
     self.builders.append(worker)
Exemplo n.º 3
0
def add_builders(b):
    for branch in config['branch_to_namespace_mapping']:
        realm = config['branch_to_namespace_mapping'][branch]
        buildername = "wwdicebot-" + realm
        b['builders'].append(
            util.BuilderConfig(name=buildername,
                               workernames=["klaital-worker"],
                               factory=_make_factory(),
                               tags=['wwdicebot', 'home', 'discord', 'bots']))
def getPullRequestBuilder(props, pretty_branch_name):

    builders = []

    for jdk in common.getJDKBuilds(props):
        jdk_props = dict(props)
        jdk_props['jdk'] = str(jdk)

        builders.append(
            util.BuilderConfig(name=pretty_branch_name +
                               " Pull Request Build JDK " + str(jdk),
                               workernames=workers,
                               factory=build.getPullRequestPipeline(),
                               collapseRequests=True,
                               properties=jdk_props,
                               locks=[mvn_lock.access('exclusive')]))

        builders.append(
            util.BuilderConfig(name=pretty_branch_name +
                               " Pull Request Reports JDK " + str(jdk),
                               workernames=workers,
                               factory=reports.getPullRequestPipeline(),
                               collapseRequests=True,
                               properties=jdk_props,
                               locks=[mvn_lock.access('exclusive')]))

    builders.append(
        util.BuilderConfig(name=pretty_branch_name + " Pull Request Markdown",
                           workernames=workers,
                           factory=markdown.getPullRequestPipeline(),
                           properties=props,
                           collapseRequests=True))

    #    builders.append(util.BuilderConfig(
    #        name=pretty_branch_name + " Pull Request Database Tests",
    #        workernames=workers,
    #        factory=database.getPullRequestPipeline(),
    #        properties=props,
    #        collapseRequests=True,
    #        locks=[db_lock.access('exclusive')]))

    return builders
Exemplo n.º 5
0
def add_all_builders(b):
    for s_name in SERVICES:
        factory = _make_factory(s_name, SERVICES[s_name])
        for branch in get_all_possible_branch_names():
            realm = branch[len('deploy-'):]
            b.append(
                util.BuilderConfig(
                    name=f"{s_name}_{realm}",
                    workernames=WORKERNAMES,
                    factory=factory,
                    locks=[helpers.WORKER_LOCK.access('exclusive')],
                    tags=[s_name, realm]))
Exemplo n.º 6
0
def create_builders(repos_and_packages):
    ret = []
    for repo in repos_and_packages:
        for artifact in repo['artifacts']:
            print 'building builder for artifact', artifact['artifact']
            build_factory = util.BuildFactory()
            build_factory.addStep(
                MonorepoGitStep(artifact['path'],
                                repourl=repo['repo'],
                                mode='full',
                                method='copy'))

            has_assets = artifact.get('assets', None) != None

            if artifact['type'] == 'npm':
                add_npm_build_steps(build_factory, has_assets)
                ret.append(
                    util.BuilderConfig(
                        name=artifact['artifact'],
                        workernames=["build-npm"],
                        factory=build_factory,
                        properties={'owners': artifact.get('owners', [])}))
            elif artifact['type'] == 'docker-npm':
                add_docker_npm_build_steps(build_factory, artifact['artifact'],
                                           has_assets)
                ret.append(
                    util.BuilderConfig(
                        name=artifact['artifact'],
                        workernames=["build-docker-npm"],
                        factory=build_factory,
                        properties={'owners': artifact.get('owners', [])}))

        artifactsrc_yml_build_factory = util.BuildFactory()
        add_artifactsrc_yml_build_steps(artifactsrc_yml_build_factory,
                                        repo['repo'])
        ret.append(
            util.BuilderConfig(name='build-artifactsrc-yml',
                               workernames=["build-artifactsrc-yml"],
                               factory=artifactsrc_yml_build_factory))
    return ret
Exemplo n.º 7
0
def add_builders(b):
    for branch in config['branch_deployment_configs']:
        realm = config['branch_deployment_configs'][branch]['namespace']
        buildername = "vitasa-" + realm
        tags = ['vita', config['branch_deployment_configs'][branch]['namespace']]
        if config['branch_deployment_configs'][branch]['kubeconfig'] == config['home_kubeconfig']:
            tags.append('home')

        b['builders'].append(util.BuilderConfig(name=buildername,
            workernames=["vitasa-worker"],
            factory=_make_factory(config['branch_deployment_configs'][branch]),
            tags=tags
        ))
Exemplo n.º 8
0
def get_builders():
    factory = util.BuildFactory()
    # check out the source
    factory.addStep(
        steps.Git(repourl='git://github.com/buildbot/hello-world.git',
                  mode='incremental'))
    # run the tests (note that this will require that 'trial' is installed)
    factory.addStep(
        steps.ShellCommand(command=["trial", "hello"],
                           env={"PYTHONPATH": "."}))
    return [
        util.BuilderConfig(name="runtests",
                           workernames=["default"],
                           factory=factory)
    ]
def masterConfig():
    global num_reconfig
    num_reconfig += 1
    c = {}
    from buildbot.plugins import schedulers, steps, util

    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]

    f = util.BuildFactory()
    f.addStep(
        steps.SetPropertyFromCommand(property="test", command=["echo", "foo"]))
    c['builders'] = [
        util.BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]

    return c
Exemplo n.º 10
0
def nix_build_config(
    worker_names: list[str], enable_cachix: bool
) -> util.BuilderConfig:
    """
    Builds one nix flake attribute.
    """
    factory = util.BuildFactory()
    factory.addStep(
        NixBuildCommand(
            env={},
            name="Build flake attr",
            command=[
                "nix",
                "build",
                "-L",
                "--out-link",
                util.Interpolate("result-%(prop:attr)s"),
                util.Property("drv_path"),
            ],
            haltOnFailure=True,
        )
    )
    if enable_cachix:
        factory.addStep(
            steps.ShellCommand(
                name="Upload cachix",
                env=dict(CACHIX_AUTH_TOKEN=util.Secret("cachix-token")),
                command=[
                    "cachix",
                    "push",
                    util.Secret("cachix-name"),
                    util.Interpolate("result-%(prop:attr)s"),
                ],
            )
        )
    factory.addStep(UpdateBuildOutput(name="Update build output", branches=["master"]))
    return util.BuilderConfig(
        name="nix-build",
        workernames=worker_names,
        properties=[],
        collapseRequests=False,
        env={},
        factory=factory,
    )
Exemplo n.º 11
0
 def error(message):
     try:
         name = constructicon_name + '-' + builder_name
     except NameError:
         name = constructicon_name
     global errors
     name += '-uniquifier-{}'.format(errors)
     errors += 1
     all_builders.append(
         util.BuilderConfig(name=name,
                            description=git_state + ' error: ' + message,
                            slavenames=['none'],
                            factory=util.BuildFactory()))
     all_schedulers.append(
         ForceScheduler(
             name=name + '-force',
             builderNames=[name],
         ))
     log.msg('builder {}: {}'.format(name, message))
Exemplo n.º 12
0
def nix_update_flake_config(
    worker_names: list[str], projectname: str, github_token_secret: str
) -> util.BuilderConfig:
    """
    Updates the flake an opens a PR for it.
    """
    factory = util.BuildFactory()
    url_with_secret = util.Interpolate(
        f"https://*****:*****@github.com/{projectname}"
    )
    factory.addStep(
        steps.Git(
            repourl=url_with_secret,
            method="clean",
            submodules=True,
            haltOnFailure=True,
        )
    )
    factory.addStep(
        steps.ShellCommand(
            name="Update flakes",
            env=dict(
                GIT_AUTHOR_NAME="buildbot",
                GIT_AUTHOR_EMAIL="*****@*****.**",
                GIT_COMMITTER_NAME="buildbot",
                GIT_COMMITTER_EMAIL="*****@*****.**",
            ),
            command=[
                "nix",
                "flake",
                "update",
                "--commit-lock-file",
                "--commit-lockfile-summary",
                "flake.lock: Update",
            ],
            haltOnFailure=True,
        )
    )
    factory.addStep(
        steps.ShellCommand(
            name="Force-Push to update_flake_lock branch",
            command=[
                "git",
                "push",
                "--force",
                "origin",
                "HEAD:refs/heads/update_flake_lock",
            ],
            haltOnFailure=True,
        )
    )
    factory.addStep(
        CreatePr(
            name="Create pull-request",
            env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
            command=[
                "gh",
                "pr",
                "create",
                "--repo",
                projectname,
                "--title",
                "flake.lock: Update",
                "--body",
                "Automatic buildbot update",
                "--head",
                "refs/heads/update_flake_lock",
                "--base",
                "master",
            ],
        )
    )
    return util.BuilderConfig(
        name="nix-update-flake",
        workernames=worker_names,
        factory=factory,
        properties=dict(virtual_builder_name="nix-update-flake"),
    )
Exemplo n.º 13
0
    def getPerPlatformBuilders(self, platform):
        if not platform.canBuild(self):
            return

        # Don't use os.path.join as builder is a linux image
        src_path = "{0}/src".format("/data")
        build_path = "{0}/build".format("/data")

        # daily_builds_path is used in Package step on master side
        daily_builds_path = os.path.join(config.daily_builds_dir, self.name)
        # Ensure last path component doesn't get removed here and in packaging step
        daily_builds_url = urlp.urljoin(config.daily_builds_url + '/',
                                        self.name + '/')

        configure_path = src_path + "/configure"

        env = platform.getEnv(self)
        # Setup ccache as the compiler, use already set CXX as real compiler or environement CXX from docker image
        env['CXX'] = 'ccache {0}'.format(env.get('CXX', '${CXX}'))

        f = util.BuildFactory()
        f.workdir = ""
        f.useProgress = False

        self.addCleanSteps(f, platform, env=env)

        self.addConfigureSteps(f,
                               platform,
                               configure_path=configure_path,
                               env=env)

        self.addBuildSteps(f, platform, env=env)

        self.addTestsSteps(f, platform, env=env)

        self.addPackagingSteps(f,
                               platform,
                               env=env,
                               src_path=src_path,
                               daily_builds_path=daily_builds_path,
                               daily_builds_url=daily_builds_url)

        locks = [
            lock_build.access('counting'),
            self.lock_src.access("counting")
        ]
        if platform.lock_access:
            locks.append(platform.lock_access(self))

        yield util.BuilderConfig(
            name=self.names['bld-platform'](platform),
            workernames=workers.workers_by_type['builder'],
            workerbuilddir=build_path,
            factory=f,
            locks=locks,
            tags=["build", self.name, platform.name],
            properties={
                "buildname": self.name,
                "platformname": platform.name,
                "workerimage": platform.getWorkerImage(self),
            },
        )
Exemplo n.º 14
0
                       warnOnFailure=False,
                       flunkOnFailure=False))

# factory.addStep(steps.ShellCommand(
#     name="remove dangling docker images",
#     logEnviron=False,
#     command=["/bin/sh", "-c", "docker images --filter dangling=true --quiet | xargs docker rmi"],
#     haltOnFailure=False,
#     warnOnFailure=False,
#     flunkOnFailure=False)
# )

c['builders'] = []
c['builders'].append(
    util.BuilderConfig(name="cite-build",
                       workernames=workernames,
                       factory=factory))

####### REPORTERS
c['services'] = [
    CiteGitHubStatusPush(baseURL=Config.get("github", "api_url"),
                         token=Config.get("github", "api_token"),
                         verbose=True)
]

# minimalistic config to activate new web UI
c['www'] = dict(port=8010,
                plugins=dict(waterfall_view={}, console_view={}),
                change_hook_dialects={'github': {}})

####### DB URL
Exemplo n.º 15
0
from buildbot.plugins import util, steps

site_vitrine_builder = util.BuilderConfig(
    name='SiteVitrine',
    workername='site_vitrine',
    factory=util.BuildFactory([
        steps.Git(repourl='[email protected]:CentraleFitness/site_vitrine.git',
                  mode='incremental'),
        steps.ShellCommand(command=["yarn"]),
        steps.ShellCommand(command=["yarn", "build"]),
        steps.ShellCommand(command=["forever", "stop", "bin/www"]),
        steps.ShellCommand(command=["forever", "start", "bin/www"]),
    ]))

site_vitrine_builder_alt = util.BuilderConfig(
    name='SiteVitrine_npm',
    workername='site_vitrine',
    factory=util.BuildFactory([
        steps.Git(repourl='[email protected]:CentraleFitness/site_vitrine.git',
                  mode='incremental'),
        steps.ShellCommand(command=["npm", "install"]),
        steps.ShellCommand(command=["npm", "run", "build"]),
        steps.ShellCommand(command=["forever", "stop", "bin/www"]),
        steps.ShellCommand(command=["forever", "start", "bin/www"]),
    ]))
Exemplo n.º 16
0
from buildbot.plugins import util, steps

from .helpers.steps import venv_step, service_step

PYTHON_EX = "/var/buildbot/workers/backoffice_api/venv/bin/python3"


backoffice_api_builder = util.BuilderConfig(
    name='BackofficeApi',
    workername='backoffice_api',
    factory=util.BuildFactory([
        steps.Git(
            repourl='[email protected]:CentraleFitness/backoffice-server.git',
            mode='incremental'),
        steps.ShellCommand(
            command=["mv", "config/config_prod.py", "config/config.py"]),
        venv_step('backoffice_api', 'BackofficeApi'),
        steps.ShellCommand(
            command=[PYTHON_EX, "manage.py", "migrate"]),
        service_step('backoffice_api', pidfile="/var/run/backoffice_api.pid")
    ])
)
Exemplo n.º 17
0
builders = []
architecture_testing_list = get_arches()

for kernel_branch in branches_list:
    for arch in architecture_testing_list:
        for toolchain in arch["toolchain"]:
            tags = []
            tags.append(toolchain["name"])
            tags.append(arch["name"])
            tags.append(kernel_branch)
            builders.append(
                util.BuilderConfig(name=kernel_branch + ':' + arch["name"] +
                                   ':' + toolchain["name"],
                                   tags=tags,
                                   workernames=get_workers_for(
                                       arch["name"], toolchain["name"]),
                                   factory=download_new_patch_and_build_kernel(
                                       kernel_branch, arch["name"])))

builders.append(
    util.BuilderConfig(name='gentoo_sources',
                       workernames=get_workers_for("gentoo_sources", None),
                       factory=test_gentoo_sources()))

builders.append(
    util.BuilderConfig(name='other_sources',
                       workernames=get_workers_for("other_sources", None),
                       factory=test_gentoo_sources()))

builders.append(
Exemplo n.º 18
0
#

# Builders that build the CLIP OS Dockerized build environment images to be
# then used by the clipos.workers.DockerLatentWorker:
docker_buildenv_image_builders = []
for flavor, props in clipos.workers.DockerLatentWorker.FLAVORS.items():
    docker_buildenv_image_builders.append(
        util.BuilderConfig(
            name='docker-worker-image env:{}'.format(
                flavor),  # keep this short
            description=line(
                """Build the Docker image to use as a Buildbot Docker latent
                worker based upon a {} userland.""").format(
                    props['description']),
            tags=['docker-worker-image', "docker-env:{}".format(flavor)],

            # Temporary: Always build on the worker that have access to the
            # Docker socket:
            workernames=[
                docker_operations_worker.name,
            ],
            factory=clipos.build_factories.BuildDockerImage(
                flavor=flavor, buildmaster_setup=setup),
        ))

# The builder that generates the repo dir and git lfs archive artifacts from
# scratch:
repo_sync_builder = util.BuilderConfig(
    name='repo-sync',
    description=line(
        """Synchronize the CLIP OS source tree and produce a tarball from the
Exemplo n.º 19
0
    def getGlobalBuilders(self, platforms):
        ret = list()

        f = util.BuildFactory()
        f.workdir = ""
        f.useProgress = False
        f.addStep(
            steps.Git(
                mode="incremental",
                repourl=self.giturl,
                branch=self.branch,
                locks=[self.lock_src.access("exclusive")],
            ))
        if len(self.PATCHES):
            f.addStep(
                scummsteps.Patch(
                    base_dir=config.configuration_dir,
                    patches=self.PATCHES,
                    locks=[self.lock_src.access("exclusive")],
                ))
        if self.nightly is not None:
            # Trigger nightly scheduler to let it know the source stamp
            f.addStep(
                steps.Trigger(
                    name="Updating source stamp",
                    schedulerNames=["nightly-scheduler-{0}".format(self.name)],
                    set_properties={
                        'got_revision':
                        util.Property('got_revision', defaultWhenFalse=False),
                    },
                    updateSourceStamp=True,
                    hideStepIf=(lambda r, s: r == util.SUCCESS),
                ))
        f.addStep(
            steps.Trigger(name="Building all platforms",
                          schedulerNames=[self.names['sch-build']],
                          set_properties={
                              'got_revision':
                              util.Property('got_revision',
                                            defaultWhenFalse=False),
                              'clean':
                              util.Property('clean', defaultWhenFalse=False),
                              'package':
                              util.Property('package', defaultWhenFalse=False)
                          },
                          updateSourceStamp=True,
                          waitForFinish=True))

        ret.append(
            util.BuilderConfig(
                name=self.names['bld-fetch'],
                workernames=workers.workers_by_type['fetcher'],
                workerbuilddir="/data/src/{0}".format(self.name),
                factory=f,
                tags=["fetch", self.name],
                locks=[lock_build.access('counting')],
            ))

        if self.nightly is not None:
            f = util.BuildFactory()
            f.addStep(
                steps.Trigger(
                    name="Building all platforms",
                    schedulerNames=[self.names['sch-build']],
                    updateSourceStamp=True,
                    waitForFinish=True,
                    set_properties={
                        'got_revision':
                        util.Property('got_revision', defaultWhenFalse=False),
                        'clean':
                        True,
                        'package':
                        True,
                        # Ensure our tag is put first and is split from the others
                        'owner':
                        '  Nightly build  ',
                    }))
            ret.append(
                util.BuilderConfig(
                    name=self.names['bld-nightly'],
                    # We use fetcher worker here as it will prevent building of other stuff like if a change had happened
                    workernames=workers.workers_by_type['fetcher'],
                    workerbuilddir="/data/triggers/nightly-{0}".format(
                        self.name),
                    factory=f,
                    tags=["nightly", self.name],
                    locks=[lock_build.access('counting')]))

        snapshots_path = os.path.join(config.snapshots_dir, self.name)

        # Builder to clean packages
        f = util.BuildFactory()
        f.addStep(
            scummsteps.CleanupSnapshots(
                dstpath=snapshots_path,
                buildname=self.name,
                platformnames=[
                    platform.name for platform in platforms
                    if platform.canPackage(self)
                ],
                dry_run=util.Property("dry_run", False),
                keep_builds=getattr(config, 'snapshots_keep_builds', 14),
                obsolete=timedelta(
                    days=getattr(config, 'snapshots_obsolete_days', 30)),
                cleanup_unknown=getattr(config, 'snapshots_clean_unknown',
                                        True),
            ))
        ret.append(
            util.BuilderConfig(
                name=self.names['bld-clean'],
                workernames=workers.workers_by_type['fetcher'],
                workerbuilddir="/data/triggers/cleanup-{0}".format(self.name),
                factory=f,
                tags=["cleanup", self.name],
                locks=[lock_build.access('counting')]))

        return ret
Exemplo n.º 20
0
def make_builder_config(repo_url, name, worker_name, config, lock,
                        snapshots_dir, snapshots_url, snapshots_default_max):
    builder = util.BuildFactory()

    builder.addStep(
        steps.SetProperties(name="Worker Config File",
                            properties=config,
                            hideStepIf=True))

    builder.addStep(
        steps.SetPropertiesFromEnv(
            variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True))

    # TODO: use `reference` to a common volume instead? or make the builder
    # dependent on another fetch-only builder so only one builder tries to pull it?
    builder.addStep(
        steps.GitHub(repourl=repo_url,
                     workdir=Property("WORKER_REPO_DIR", None),
                     logEnviron=False,
                     getDescription={
                         "always": True,
                         "tags": True
                     }))

    builder.addStep(
        FileExistsSetProperty(name="config.mk Existence Check",
                              property="already_configured",
                              file="%s/config.mk" % builder.workdir,
                              hideStepIf=True))

    compilation_environment = Property("env", {})

    builder.addStep(
        steps.Configure(command=compute_configure,
                        env=compilation_environment,
                        doStepIf=ConfigChecker().needs_configuration))

    builder.addStep(
        steps.SetPropertyFromCommand(name="Python (Worker)",
                                     property="cpu_count",
                                     command=["python", "-c", GET_CPU_COUNT],
                                     flunkOnFailure=False,
                                     warnOnFailure=True,
                                     hideStepIf=True,
                                     description="getting CPU count",
                                     descriptionDone="got CPU count"))

    # In at least Buildbot 0.9.12, warningPattern and suppressionList are not
    # renderable, so just get the properties from the config file immediately
    compiler_warning_pattern = config.get(
        "compiler_warning_pattern",
        r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$")
    compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups
    compiler_suppression_file = Property("compiler_suppression_file", None)
    compiler_suppression_list = config.get("compiler_suppression_list", None)

    builder.addStep(
        steps.Compile(command=["make",
                               Interpolate("-j%(prop:cpu_count:~1)s")],
                      env=compilation_environment,
                      warningPattern=compiler_warning_pattern,
                      warningExtractor=compiler_warning_extractor,
                      suppressionFile=compiler_suppression_file,
                      suppressionList=compiler_suppression_list))

    builder.addStep(
        steps.Test(command=[
            "make",
            Interpolate("%(prop:can_run_tests:"
                        "#?|test|test/runner)s")
        ],
                   env=compilation_environment,
                   warningPattern=compiler_warning_pattern,
                   warningExtractor=compiler_warning_extractor,
                   suppressionFile=compiler_suppression_file,
                   suppressionList=compiler_suppression_list,
                   haltOnFailure=True,
                   flunkOnFailure=True))

    if snapshots_dir is not None and snapshots_url is not None:
        if snapshots_dir and snapshots_dir[-1] is not "/":
            snapshots_dir += "/"
        if snapshots_url and snapshots_url[-1] is not "/":
            snapshots_url += "/"

        snapshots_dir = "%s%%(prop:branch)s/" % snapshots_dir
        snapshots_url = "%s%%(prop:branch)s/" % snapshots_url

        builder.addStep(
            steps.SetProperty(name="Computed By %s" % path.basename(__file__),
                              property="package_name",
                              value=compute_package_name,
                              hideStepIf=True,
                              doStepIf=should_package))
        builder.addStep(
            Package(package_name=Property("package_name"),
                    package_files=Property("package_files", None),
                    package_format=Property("package_archive_format"),
                    make_target=Property("package_make_target"),
                    split_debug_package=Property("split_debug_package", True),
                    extra_files=Property("package_extra_files", None),
                    package_script=Interpolate(config.get(
                        "package_script", "")),
                    env=compilation_environment,
                    doStepIf=should_package))

        latest_link = Interpolate("%s%%(prop:buildername)s-latest."
                                  "%%(prop:package_archive_format:-tar.xz)s" %
                                  snapshots_dir)
        make_uploader_steps(builder=builder,
                            snapshots_dir=snapshots_dir,
                            snapshots_url=snapshots_url,
                            publish_name="archive",
                            property_name="package_filename",
                            latest_link=latest_link,
                            do_step_if=should_package)

        latest_link = Interpolate("%s%%(prop:buildername)s"
                                  "-latest-debug-symbols.tar.xz" %
                                  snapshots_dir)
        make_uploader_steps(builder=builder,
                            snapshots_dir=snapshots_dir,
                            snapshots_url=snapshots_url,
                            publish_name="debug archive",
                            property_name="debug_package_filename",
                            latest_link=latest_link,
                            do_step_if=should_package_debug)

        builder.addStep(
            MasterCleanSnapshots(
                name="clean old snapshots",
                workdir=Interpolate(snapshots_dir),
                file_prefix=Interpolate("%(prop:buildername)s-"),
                num_to_keep=Property("num_snapshots_to_keep",
                                     snapshots_default_max),
                secondary_file_suffix="-debug-symbols",
                file_extensions=r"\.(?:tar(?:\.[xg]z)?|[a-z]{3,4})$",
                doStepIf=should_package,
                hideStepIf=True))

    return util.BuilderConfig(name=name,
                              workername=worker_name,
                              collapseRequests=True,
                              factory=builder,
                              nextBuild=pick_next_build,
                              locks=[lock.access("counting")])
Exemplo n.º 21
0
####### BUILDERS

# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which workers can execute them.  Note that any particular build will
# only take place on one worker.

build_lock = util.MasterLock("Build")


c["builders"] = []
c["builders"].extend(generate_builder(target) for target in test_targets)
c["builders"].append(
    util.BuilderConfig(
        name="build",
        workernames=["director-worker"],
        factory=build_factory,
        locks=[build_lock.access("exclusive")],
    )
)

lingo_factory = util.BuildFactory()
lingo_factory.addStep(checkout_step)
lingo_factory.addStep(download_step)

lingo_directory = "./engines/director/lingo/tests/"
lingo_factory.addStep(
    GenerateStartMovieCommands(
        directory=lingo_directory,
        game_id="directortest",
        name="Generate lingo test commands",
        command=["find", lingo_directory, "-name", "*.lingo", "-printf", "%P\n"],
def getBuildersForBranch(props):

    pretty_branch_name = props['branch_pretty']

    deb_props = dict(props)
    deb_props['image'] = random.choice({{docker_debian_worker_images}})

    cent_props = dict(props)
    cent_props['image'] = random.choice({{docker_centos_worker_images}})

    builders = getPullRequestBuilder(props, pretty_branch_name)

    #Only one maven build, per branch, at a time
    branch_mvn_lock = util.MasterLock(pretty_branch_name + "mvn_lock")

    for jdk in common.getJDKBuilds(props):
        jdk_props = dict(props)
        jdk_props['jdk'] = str(jdk)

        builders.append(
            util.BuilderConfig(
                name=pretty_branch_name + " Build JDK " + str(jdk),
                workernames=workers,
                factory=build.getBuildPipeline(),
                properties=jdk_props,
                collapseRequests=True,
                #A note on these locks: We want a single maven build per branch,
                # AND a single maven build per worker
                locks=[
                    mvn_lock.access('exclusive'),
                    branch_mvn_lock.access('exclusive')
                ]))

        report_props = dict(jdk_props)
        report_props['cores'] = '1'

        builders.append(
            util.BuilderConfig(
                name=pretty_branch_name + " Reports JDK " + str(jdk),
                workernames=workers,
                factory=reports.getBuildPipeline(),
                properties=jdk_props,
                collapseRequests=True,
                #A note on these locks: We want a single maven build per branch,
                # AND a single maven build per worker
                locks=[
                    mvn_lock.access('exclusive'),
                    branch_mvn_lock.access('exclusive')
                ]))

    release_props = dict(props)
    #We use the first listed JDK since that (should) be the lowest, most common version
    release_props['jdk'] = str(common.getJDKBuilds(props)[0])
    builders.append(
        util.BuilderConfig(
            name=pretty_branch_name + " Release",
            workernames=workers,
            factory=release.getBuildPipeline(),
            properties=release_props,
            collapseRequests=True,
            #Note: We want a single maven build per worker, but since this is a release we don't
            # care if there are other maven builds running elsewhere
            locks=[mvn_lock.access('exclusive')]))

    builders.append(
        util.BuilderConfig(name=pretty_branch_name + " Markdown",
                           workernames=workers,
                           factory=markdown.getBuildPipeline(),
                           properties=props,
                           collapseRequests=True))

    #    builders.append(util.BuilderConfig(
    #        name=pretty_branch_name + " Database Tests",
    #        workernames=workers,
    #        factory=database.getBuildPipeline(),
    #        properties=props,
    #        collapseRequests=True,
    #        locks=[db_lock.access('exclusive')]))

    builders.append(
        util.BuilderConfig(name=pretty_branch_name + " Debian Packaging",
                           workernames=workers,
                           factory=debs.getBuildPipeline(),
                           properties=deb_props,
                           collapseRequests=True,
                           locks=[deb_lock.access('exclusive')]))

    for distro in (7, 8):
        el_props = dict(props)
        el_props['el_version'] = distro
        if 7 == distro:
            el_props['image'] = f"cent{distro}"
            lock = el7_lock
        elif 8 == distro:
            el_props['image'] = f"rocky{distro}"
            lock = el8_lock

        if "Develop" == pretty_branch_name:
            #Set the RPM branch to master
            el_props['rpmspec_override'] = "master"
            #Override/set a bunch of the build props since the RPM's dont relaly have a develop...

        builders.append(
            util.BuilderConfig(name=pretty_branch_name +
                               f" el{distro} RPM Packaging",
                               workernames=workers,
                               factory=rpms.getBuildPipeline(),
                               properties=el_props,
                               collapseRequests=True,
                               locks=[lock.access('exclusive')]))

    if props['deploy_env']:
        deploy_props = dict(props)
        deploy_props['deploy_suite'] = '{{ repo_deploy_suite }}'
        deploy_props['package_repo_host'] = "{{ repo_host }}"
        deploy_props['key_url'] = "{{ key_url }}"
        deploy_props['key_id'] = "{{ key_id }}"

        builders.append(
            util.BuilderConfig(
                name=pretty_branch_name + " Ansible Deploy",
                workernames=workers,
                factory=ansible.getBuildPipeline(),
                properties=deploy_props,
                collapseRequests=True,
                #Ensure that no one is changing the package databases while we're deploying!
                locks=[
                    deb_lock.access('exclusive'),
                    el7_lock.access('exclusive'),
                    el8_lock.access('exclusive')
                ]))

    return builders
Exemplo n.º 23
0
def make_builder_config(repo_url, name, worker_name, config, lock,
                        snapshots_dir, snapshots_url, snapshots_default_max):
    if snapshots_dir and snapshots_dir[-1] is not "/":
        snapshots_dir += "/"
    if snapshots_url and snapshots_url[-1] is not "/":
        snapshots_url += "/"

    builder = util.BuildFactory()

    builder.addStep(
        steps.SetProperties(name="Worker Config File",
                            properties=config,
                            hideStepIf=True))

    builder.addStep(
        steps.SetPropertiesFromEnv(
            variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True))

    # TODO: use `reference` to a common volume instead? or make the builder
    # dependent on another fetch-only builder so only one builder tries to pull it?
    builder.addStep(
        steps.GitHub(repourl=repo_url,
                     workdir=Property("WORKER_REPO_DIR", None),
                     logEnviron=False,
                     getDescription={
                         "always": True,
                         "tags": True
                     }))

    builder.addStep(
        FileExistsSetProperty(name="config.mk Existence Check",
                              property="already_configured",
                              file="%s/config.mk" % builder.workdir,
                              hideStepIf=True))

    compilation_environment = Property("env", {})

    builder.addStep(
        steps.Configure(command=compute_configure,
                        env=compilation_environment,
                        doStepIf=is_not_configured))

    builder.addStep(
        steps.SetPropertyFromCommand(name="Python (Worker)",
                                     property="cpu_count",
                                     command=["python", "-c", GET_CPU_COUNT],
                                     flunkOnFailure=False,
                                     warnOnFailure=True,
                                     hideStepIf=True,
                                     description="getting CPU count",
                                     descriptionDone="got CPU count"))

    # In at least Buildbot 0.9.12, warningPattern and suppressionList are not
    # renderable, so just get the properties from the config file immediately
    compiler_warning_pattern = config.get(
        "compiler_warning_pattern",
        r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$")
    compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups
    compiler_suppression_file = Property("compiler_suppression_file", None)
    compiler_suppression_list = config.get("compiler_suppression_list", None)

    builder.addStep(
        steps.Compile(command=["make",
                               Interpolate("-j%(prop:cpu_count:~1)s")],
                      env=compilation_environment,
                      warningPattern=compiler_warning_pattern,
                      warningExtractor=compiler_warning_extractor,
                      suppressionFile=compiler_suppression_file,
                      suppressionList=compiler_suppression_list))

    builder.addStep(
        steps.Test(command=[
            "make",
            Interpolate("%(prop:can_run_tests:"
                        "#?|test|test/runner)s")
        ],
                   env=compilation_environment,
                   warningPattern=compiler_warning_pattern,
                   warningExtractor=compiler_warning_extractor,
                   suppressionFile=compiler_suppression_file,
                   suppressionList=compiler_suppression_list,
                   haltOnFailure=True,
                   flunkOnFailure=True))

    if snapshots_dir is not None and snapshots_url is not None:
        builder.addStep(
            steps.SetProperty(name="Computed By %s" % path.basename(__file__),
                              property="package_name",
                              value=compute_package_name,
                              hideStepIf=True,
                              doStepIf=should_package))
        builder.addStep(
            Package(package_name=Property("package_name"),
                    package_format=Property("package_archive_format"),
                    make_target=Property("package_make_target"),
                    package_directory=Property("package_directory", None),
                    strip_binaries=Property("package_strip_binaries", None),
                    env=compilation_environment,
                    doStepIf=should_package))

        source_path = Property("package_filename")
        target_path = Interpolate("%s%%(prop:package_filename)s" %
                                  snapshots_dir)
        target_url = Interpolate("%s%%(prop:package_filename)s" %
                                 snapshots_url)
        # This is not an ideal target link calculation since the archive format
        # in package_filename might be fixed up by the Package step, but here
        # only None is converted into tar.xz, which is not exactly the same
        target_link = Interpolate("%s%%(prop:buildername)s-latest."
                                  "%%(prop:package_archive_format:-tar.xz)s" %
                                  snapshots_dir)

        builder.addStep(
            CleaningFileUpload(name="publish",
                               workersrc=source_path,
                               masterdest=target_path,
                               url=target_url,
                               clean=True,
                               doStepIf=should_package))
        builder.addStep(
            steps.MasterShellCommand(
                name="update latest archive",
                command=["ln", "-sf", target_path, target_link],
                logEnviron=False,
                doStepIf=should_package))
        builder.addStep(
            MasterCleanSnapshots(
                name="clean old snapshots",
                workdir=snapshots_dir,
                file_prefix=Interpolate("%(prop:buildername)s-"),
                num_to_keep=Property("num_snapshots_to_keep",
                                     snapshots_default_max),
                doStepIf=should_package))

    return util.BuilderConfig(name=name,
                              workername=worker_name,
                              collapseRequests=True,
                              factory=builder,
                              nextBuild=pick_next_build,
                              locks=[lock.access("exclusive")])
Exemplo n.º 24
0
     resources = set(resources + get_builder_base_spec('resources'))
     #get - ignore
     builder_spec.remove('get')
     #append
     f = factory(constructicon_name, builder_name, deps,
                 precommands + commands, upload, zip, unzip, url, resources)
     unused = builder_spec.unused()
     if unused:
         error('unused configuration keys\n' + pprint.pformat(unused))
         continue
     all_builders.append(
         util.BuilderConfig(
             name=full_builder_name,
             description=global_repo_urls[constructicon_name] + ' ' +
             git_state + ' on cybertron ' + common.cybertron_git_state() +
             ' in devastator ' + common.git_state(),
             slavenames=slave_names,
             factory=f,
             locks=[slave_lock.access('exclusive')],
         ))
 #schedulers
 schedulers = get_constructicon_spec(constructicon_spec, 'schedulers')
 if not check(schedulers, 'schedulers', [
     [lambda x: isinstance(x, Config), 'is not a dict'],
     [
         lambda x: all([type(i) == str
                        for i in x.keys()]), "has a key that isn't a str"
     ],
     [
         lambda x: all([isinstance(j, Config) for i, j in x.items()]),
         "has a value that isn't a dict"
Exemplo n.º 25
0
def make_config(worker_name, worker_password, worker_port, git_repo, branch,
                poll_interval, builder_name, project_name, project_url,
                buildbot_url, buildbot_web_port, buildbot_from_email):

    return {
        'workers': [worker.Worker(worker_name, worker_password)],
        'protocols': {
            'pb': {
                'port': worker_port
            }
        },
        'change_source': [
            changes.GitPoller(
                git_repo,
                workdir='gitpoller-workdir',
                branch=branch,
                pollinterval=poll_interval,
            ),
        ],
        'schedulers': [
            schedulers.SingleBranchScheduler(
                name="all",
                change_filter=util.ChangeFilter(branch=branch),
                treeStableTimer=poll_interval,
                builderNames=[builder_name],
            ),
            schedulers.ForceScheduler(
                name="force",
                builderNames=[builder_name],
            ),
        ],
        'builders': [
            util.BuilderConfig(
                name=builder_name,
                workernames=[worker_name],
                factory=util.BuildFactory([
                    # check out the source
                    steps.Git(repourl=git_repo, mode='incremental'),
                    # run the tests
                    steps.ShellCommand(command=[
                        "direnv",
                        "allow",
                        ".",
                    ], ),
                    steps.ShellCommand(
                        command=[
                            "direnv",
                            "exec",
                            ".",
                            "make",
                            "check",
                        ],
                        env={
                            'NIX_REMOTE': 'daemon',
                        },
                        # If we have to rebuild our dependencies from scratch,
                        # we can go a long time without receiving output from
                        # the compiler. Default timeout is 20 mins, bump to
                        # 1hr.
                        timeout=60 * 60,
                    ),
                ]),
            ),
        ],
        'status': [],
        'title':
        project_name,
        'titleURL':
        project_url,
        'buildbotURL':
        buildbot_url,
        'www': {
            'port': buildbot_web_port,
            'plugins': {
                'waterfall_view': {},
            },
        },
        'db': {
            'db_url': "sqlite:///state.sqlite",
        },
        'services': [
            reporters.MailNotifier(
                fromaddr=buildbot_from_email,
                # TODO(jml): Currently sending mail for all builds. We should
                # send mail under fewer circumstances once we have a better
                # idea about what we actually want.
                #
                # http://buildbot.readthedocs.io/en/latest/manual/cfg-reporters.html?highlight=github#mailnotifier-arguments
                mode='all',
                # XXX: Temporarily hard-code until we can figure out how to
                # get these automatically from commits.
                extraRecipients=[
                    "*****@*****.**",
                    "*****@*****.**",
                ],
            )
        ],
    }
Exemplo n.º 26
0
def nix_eval_config(
    worker_names: list[str], github_token_secret: str
) -> util.BuilderConfig:
    """
    Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
    For each evaluated attribute a new build pipeline is started.
    If all builds succeed and the build was for a PR opened by the flake update bot,
    this PR is merged.
    """
    factory = util.BuildFactory()
    # check out the source
    factory.addStep(
        steps.GitHub(
            repourl=util.Property("repository"), method="clean", submodules=True
        )
    )

    factory.addStep(
        NixEvalCommand(
            env={},
            name="Eval flake",
            command=[
                "nix",
                "run",
                "github:nix-community/nix-eval-jobs",
                "--",
                "--workers",
                "8",
                "--gc-roots-dir",
                # FIXME: don't hardcode this
                "/var/lib/buildbot-worker/gcroot",
                "--flake",
                ".#hydraJobs",
            ],
            haltOnFailure=True,
        )
    )
    # Merge flake-update pull requests if CI succeeds
    factory.addStep(
        MergePr(
            name="Merge pull-request",
            env=dict(GITHUB_TOKEN=util.Secret(github_token_secret)),
            github_token_secret=util.Secret(github_token_secret),
            base_branches=["master"],
            owners=["doctor-cluster-bot"],
            command=[
                "gh",
                "pr",
                "merge",
                "--repo",
                util.Property("project"),
                "--rebase",
                util.Property("pullrequesturl"),
            ],
        )
    )

    return util.BuilderConfig(
        name="nix-eval",
        workernames=worker_names,
        factory=factory,
        properties=dict(virtual_builder_name="nix-eval"),
    )
Exemplo n.º 27
0
c["schedulers"] = [
    schedulers.SingleBranchScheduler(
        name=config.TRIGGER,
        change_filter=util.ChangeFilter(category="mediasdk"),
        treeStableTimer=config.BUILDBOT_TREE_STABLE_TIMER,
        builderNames=[config.TRIGGER])
]

for builder_name, properties in config.FLOW.get_prepared_builders().items():
    if properties.get('add_triggerable_sheduler', True):
        c["schedulers"].append(
            schedulers.Triggerable(name=builder_name,
                                   builderNames=[builder_name]))
    c["builders"].append(
        util.BuilderConfig(name=builder_name,
                           workernames=get_workers(properties.get("worker")),
                           factory=properties['factory']))

# Push status of build to the Github
c["services"] = [
    reporters.GitHubStatusPush(
        token=config.GITHUB_TOKEN,
        context=util.Interpolate("buildbot/%(prop:buildername)s"),
        startDescription="Started",
        endDescription="Done",
        verbose=True)
]
# Will be useful for implementing build notifications in the future
#    reporters.GitHubCommentPush(token=config.GITHUB_TOKEN,
#                                 startDescription="Started (comment)",
#                                 endDescription="Done (comment)",
Exemplo n.º 28
0
    def getPerPlatformBuilders(self, platform):
        if not platform.canBuild(self):
            return []

        # Don't use os.path.join as builder is a linux image
        src_path = "{0}/src/{1}".format("/data", self.name)
        configure_path = src_path + "/configure"
        build_path = "{0}/builds/{1}/{2}".format("/data", platform.name,
                                                 self.name)

        # snapshots_path is used in Package step on master side
        snapshots_path = os.path.join(config.snapshots_dir, self.name)
        # Ensure last path component doesn't get removed here and in packaging step
        snapshots_url = urlp.urljoin(config.snapshots_url + '/',
                                     self.name + '/')

        env = platform.getEnv(self)

        f = util.BuildFactory()
        f.workdir = ""
        f.useProgress = False

        self.addCleanSteps(f, platform, env=env)

        self.addConfigureSteps(f,
                               platform,
                               configure_path=configure_path,
                               env=env)

        self.addBuildSteps(f, platform, env=env)

        self.addTestsSteps(f, platform, env=env)

        self.addPackagingSteps(f,
                               platform,
                               env=env,
                               src_path=src_path,
                               snapshots_path=snapshots_path,
                               snapshots_url=snapshots_url)

        locks = [
            lock_build.access('counting'),
            self.lock_src.access("counting")
        ]
        if platform.lock_access:
            locks.append(platform.lock_access(self))

        return [
            util.BuilderConfig(
                name=self.names['bld-platform'](platform),
                workernames=workers.workers_by_type['builder'],
                workerbuilddir=build_path,
                factory=f,
                locks=locks,
                tags=["build", self.name, platform.name],
                properties={
                    "platformname": platform.name,
                    "workerimage": platform.getWorkerImage(self),
                },
            )
        ]
Exemplo n.º 29
0
def get(workers):
    """
    construct a list of builders with the given list of workers
    """
    builders = []
    builders.append(
        util.BuilderConfig(
            name="pull_request_runner",
            tags=["control", "gnuradio", "pull"],
            workernames=[
                w.name for w in filterWorkers(workers, "tasks", "control")
            ],
            factory=build_PR()))
    builders.append(
        util.BuilderConfig(
            name="volk_pull_request_runner",
            tags=["control", "volk", "pull"],
            workernames=[
                w.name for w in filterWorkers(workers, "tasks", "control")
            ],
            factory=build_volk_PR()))
    builders.append(
        util.BuilderConfig(
            name="repo_push_runner",
            tags=["control", "push"],
            workernames=[
                w.name for w in filterWorkers(workers, "tasks", "control")
            ],
            factory=build_push()))

    builders.append(
        util.BuilderConfig(
            name="weekly_runner",
            tags=["control", "weekly"],
            workernames=[
                w.name for w in filterWorkers(workers, "tasks", "control")
            ],
            factory=build_weekly()))

    build_workers = filterWorkers(workers, "tasks", "build")
    distros = [
        w.properties.getProperty("distro") for w in build_workers
        if w.properties.getProperty("distro", None)
    ]
    distros = list(set(distros))
    for distro in distros:
        builders.append(
            util.BuilderConfig(
                name="build_" + distro,
                tags=["build"],
                workernames=[
                    w.name
                    for w in filterWorkers(build_workers, "distro", distro)
                ],
                factory=build_and_test()))
    coverity_workers = filterWorkers(workers, "tasks", "coverity")
    builders.append(
        util.BuilderConfig(name="test_coverity",
                           tags=["test", "coverity"],
                           workernames=[w.name for w in coverity_workers],
                           factory=build_coverity()))
    builders.append(
        util.BuilderConfig(name="test_coverage",
                           tags=["test", "coverage"],
                           workernames=[w.name for w in coverity_workers],
                           factory=build_coverage()))
    return builders
Exemplo n.º 30
0
from buildbot.plugins import util, steps

from .helpers.steps import venv_step, service_step

PYTHON_EX = "/var/buildbot/workers/email_api/venv/bin/python3"

email_api_builder = util.BuilderConfig(
    name='EmailApi',
    workername='email_api',
    factory=util.BuildFactory([
        steps.Git(repourl='[email protected]:CentraleFitness/email_api.git',
                  mode='incremental'),
        venv_step('email_api', 'EmailApi', 'requirements.txt'),
        steps.ShellCommand(
            command=["mv", "config/buildbot.py", "config/config.py"]),
        steps.ShellCommand(command=[PYTHON_EX, "manage.py", "migrate"]),
        steps.ShellCommand(command=[PYTHON_EX, "manage.py", "collectstatic"]),
        service_step('email_api', pidfile="/var/run/email_api.pid")
    ]))