# Steps to publish the runtime and SDK.
publish_deb_steps = [
    # Upload the deb package.
    FileUpload(workersrc=deb_filename, masterdest=deb_upload_filename,
               mode=0o664, haltOnFailure=True,
               doStepIf=lambda step:not step.getProperty("optimize", False)),

    # Create a torrent file and start seeding it.
    #MakeTorrent(deb_upload_filename),
    #SeedTorrent(deb_upload_filename),

    # Upload it to an apt repository.
    MasterShellCommand(name="reprepro", command=[
        "reprepro", "-b", deb_archive_dir, "includedeb", deb_archive_suite,
        deb_upload_filename], locks=[repo_lock.access('exclusive')],
        doStepIf=lambda step:not step.getProperty("optimize", False)),
]

# Now make the factories.
deb_factory = BuildFactory()
for step in build_steps + publish_deb_steps:
    deb_factory.addStep(step)


def docker_builder(buildtype, distro, suite, arch):
    return BuilderConfig(name='-'.join((buildtype, suite, arch)),
                         workernames=config.linux_workers,
                         factory=deb_factory,
                         properties={"buildtype": buildtype, "distro": distro, "suite": suite, "arch": arch, "optimize": False})
Example #2
0
    FileUpload(slavesrc=deb_filename,
               masterdest=deb_upload_filename,
               mode=0o664,
               haltOnFailure=True),

    # Create a torrent file and start seeding it.
    MakeTorrent(deb_upload_filename),
    SeedTorrent(deb_upload_filename),

    # Upload it to an apt repository.
    MasterShellCommand(name="reprepro",
                       command=[
                           "reprepro", "-b", deb_archive_dir, "includedeb",
                           deb_archive_suite, deb_upload_filename
                       ],
                       locks=[repo_lock.access('exclusive')]),
]

# Now make the factories.
deb_factory = BuildFactory()
for step in build_steps + publish_deb_steps:
    deb_factory.addStep(step)


def docker_builder(buildtype, distro, suite, arch):
    return BuilderConfig(name='-'.join((buildtype, suite, arch)),
                         slavenames=config.linux_slaves,
                         factory=deb_factory,
                         properties={
                             "buildtype": buildtype,
                             "distro": distro,
    def slave_class(self):
        return 'aws/centos-7'


ACCEPTANCE_CONFIGURATIONS = [
    AcceptanceConfiguration(provider='aws',
                            distribution='rhel-7.2',
                            dataset_backend='native')
]

# Too many simultaneous builds will hit AWS limits, but
# too few will make tests painfully slow. We need to find
# a compromise between these two variables. See FLOC-3263.
aws_lock = MasterLock('aws-lock', maxCount=3)
ACCEPTANCE_LOCKS = {
    'aws': [aws_lock.access("counting")],
}


def getBuilders(slavenames):
    builders = []
    for configuration in ACCEPTANCE_CONFIGURATIONS:
        builders.append(
            BuilderConfig(name=configuration.builder_name,
                          builddir=configuration.builder_directory,
                          slavenames=slavenames[configuration.slave_class],
                          category='flocker',
                          factory=run_acceptance_tests(configuration),
                          locks=ACCEPTANCE_LOCKS.get(configuration.provider,
                                                     []),
                          nextSlave=idleSlave))
Example #4
0
class StandardBuild(Build):
    __slots__ = [
        'baseurl', 'giturl', 'branch', 'nightly', 'enable_force', 'lock_src'
    ]

    PATCHES = []

    def __init__(self,
                 name,
                 baseurl,
                 branch,
                 nightly=None,
                 enable_force=True,
                 giturl=None):
        super().__init__(name)
        if giturl is None:
            giturl = baseurl + ".git"
        self.baseurl = baseurl
        self.giturl = giturl
        self.branch = branch
        self.nightly = nightly
        self.enable_force = enable_force
        # Lock used to avoid writing source code when it is read by another task
        self.lock_src = MasterLock("src-{0}".format(self.name),
                                   maxCount=sys.maxsize)

    def getGlobalSchedulers(self, platforms):
        ret = list()
        change_filter = ChangeFilter(repository=self.baseurl,
                                     branch=self.branch)

        # Fetch scheduler (triggered by event source)
        ret.append(
            SingleBranchScheduler(name="fetch-{0}".format(self.name),
                                  change_filter=change_filter,
                                  treeStableTimer=5,
                                  builderNames=["fetch-{0}".format(self.name)
                                                ]))

        # Nightly scheduler (started by time)
        # It's triggered after regular builds to take note of the last fetched source
        # Note that build is not started by trigger
        if self.nightly is not None:
            ret.append(
                NightlyTriggerable(
                    name="nightly-{0}".format(self.name),
                    branch=self.branch,
                    builderNames=["nightly-{0}".format(self.name)],
                    hour=self.nightly[0],
                    minute=self.nightly[1],
                    onlyIfChanged=True))

        # All compiling builders
        comp_builders = [
            "{0}-{1}".format(self.name, p.name) for p in platforms
            if p.canBuild(self)
        ]

        # Global build scheduler (triggered by fetch build)
        ret.append(Triggerable(name=self.name, builderNames=comp_builders))

        # Force schedulers
        if self.enable_force:
            ret.append(
                ForceScheduler(
                    name="force-scheduler-{0}-fetch".format(self.name),
                    reason=StringParameter(name="reason",
                                           label="Reason:",
                                           required=True,
                                           size=80),
                    builderNames=["fetch-{0}".format(self.name)],
                    codebases=[CodebaseParameter(codebase='', hide=True)],
                    properties=[
                        BooleanParameter(name="clean",
                                         label="Clean",
                                         default=False),
                        BooleanParameter(name="package",
                                         label="Package",
                                         default=False),
                    ]))
            ret.append(
                ForceScheduler(
                    name="force-scheduler-{0}-build".format(self.name),
                    reason=StringParameter(name="reason",
                                           label="Reason:",
                                           required=True,
                                           size=80),
                    builderNames=comp_builders,
                    codebases=[CodebaseParameter(codebase='', hide=True)],
                    properties=[
                        BooleanParameter(name="clean",
                                         label="Clean",
                                         default=False),
                        BooleanParameter(name="package",
                                         label="Package",
                                         default=False),
                    ]))

        return ret

    def getGlobalBuilders(self):
        ret = list()

        f = factory.BuildFactory()
        f.useProgress = False
        f.addStep(
            Git(
                mode="incremental",
                workdir=".",
                repourl=self.giturl,
                branch=self.branch,
                locks=[self.lock_src.access("exclusive")],
            ))
        if len(self.PATCHES):
            f.addStep(
                steps.Patch(
                    patches=self.PATCHES,
                    workdir=".",
                    locks=[self.lock_src.access("exclusive")],
                ))
        if self.nightly is not None:
            # Trigger nightly scheduler to let it know the source stamp
            f.addStep(
                Trigger(name="Updating source stamp",
                        hideStepIf=(lambda r, s: r == results.SUCCESS),
                        schedulerNames=["nightly-{0}".format(self.name)]))
        f.addStep(
            Trigger(name="Building all platforms",
                    schedulerNames=[self.name],
                    copy_properties=['got_revision', 'clean', 'package'],
                    updateSourceStamp=True,
                    waitForFinish=True))

        ret.append(
            BuilderConfig(
                name="fetch-{0}".format(self.name),
                # This is specific
                workername='fetcher',
                workerbuilddir="/data/src/{0}".format(self.name),
                factory=f,
                tags=["fetch"],
            ))

        if self.nightly is not None:
            f = factory.BuildFactory()
            f.addStep(
                Trigger(schedulerNames=[self.name],
                        copy_properties=['got_revision'],
                        updateSourceStamp=True,
                        waitForFinish=True,
                        set_properties={
                            'clean': True,
                            'package': True
                        }))

            ret.append(
                BuilderConfig(
                    name="nightly-{0}".format(self.name),
                    # TODO: Fix this
                    workername='fetcher',
                    workerbuilddir="/data/triggers/nightly-{0}".format(
                        self.name),
                    factory=f,
                    tags=["nightly"],
                    locks=[self.lock_src.access("counting")]))

        return ret