def run(self):
     self.build.addStepsAfterCurrentStep([
         steps.ShellSequence(
             name='archive',
             haltOnFailure=True,
             logEnviron=False,
             commands=[
                 util.ShellArg(command=['tar', 'cf', 'state-dir.tar', '.flatpak-builder'], logfile='stdio'),
                 util.ShellArg(command=['tar', 'cf', 'repo.tar', 'repo'], logfile='stdio'),
             ],
         ),
         steps.FileUpload(
             name='upload state-dir.tar',
             haltOnFailure=True,
             workersrc='state-dir.tar',
             masterdest='flatpak/state-dir.tar',
         ),
         steps.FileUpload(
             name='upload repo.tar',
             haltOnFailure=True,
             workersrc='repo.tar',
             masterdest='flatpak/repo.tar',
         ),
         steps.MasterShellCommand(
             name='sync repo',
             haltOnFailure=True,
             logEnviron=False,
             command=['./scripts/flatpak-repo.sh'],
         ),
     ])
     return buildbot.process.results.SUCCESS
Exemple #2
0
def getBuildPipeline():

    compress = GenerateCompressionCommands(
        command='ls -d */site',
        name="Determining available docs for compression",
        workdir="build/docs/guides",
        haltOnFailure=True,
        flunkOnFailure=True)

    upload = GenerateS3Commands(command='ls -d */site',
                                name="Determining available docs for upload",
                                workdir="build/docs/guides",
                                haltOnFailure=True,
                                flunkOnFailure=True)

    updateMarkdown = steps.MasterShellCommand(command=util.Interpolate(
        "rm -f {{ deployed_markdown_symlink }} && ln -s {{ deployed_markdown }} {{ deployed_markdown_symlink }}"
    ),
                                              flunkOnFailure=True,
                                              name="Deploy Markdown")

    f_build = __getBasePipeline()
    f_build.addStep(compress)
    f_build.addStep(upload)
    #f_build.addStep(updateMarkdown)
    f_build.addStep(common.getClean())

    return f_build
 def run(self):
     self.build.addStepsAfterCurrentStep([
         steps.MasterShellCommand(
             name='create flatpakref directory',
             haltOnFailure=True,
             logEnviron=False,
             command=['mkdir', '-p', '/repo/flatpak/files/' + self.channel],
         ),
         steps.MasterShellCommand(
             name='remove old flatpakref files',
             haltOnFailure=True,
             logEnviron=False,
             command=['rm', '-f', '/repo/flatpak/files/%s/*.flatpakref' % self.channel],
         ),
         steps.FileUpload(
             name='upload flatpakref files',
             haltOnFailure=True,
             workersrc='%s/*.flatpakref' % self.channel,
             masterdest='/repo/flatpak/files/' + self.channel,
         ),
     ])
     return buildbot.process.results.SUCCESS
Exemple #4
0
    def config_for_master_command(self, **kwargs):
        c = {}

        c['schedulers'] = [
            schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"])
        ]

        f = BuildFactory()
        f.addStep(steps.MasterShellCommand(**kwargs))
        c['builders'] = [
            BuilderConfig(name="testy", workernames=["local1"], factory=f)
        ]
        return c
Exemple #5
0
def masterConfig():
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import steps, schedulers

    c['schedulers'] = [
        schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"])
    ]

    f = BuildFactory()
    f.addStep(steps.MasterShellCommand(command='echo hello'))
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]
    return c
Exemple #6
0
    def _uploadSourceTreeQuicksyncArtifacts(self):
        """Upload the source tree artifacts to the buildmaster"""

        self.addStep(steps.ShellCommand(
            name="save repo quick-sync artifacts on buildmaster",
            description=line(
                """save the ".repo" directory archive and ".git/lfs"
                directories archive as artifacts on the buildmaster"""),
            haltOnFailure=True,
            command=["/usr/bin/env", "bash", "-c", textwrap.dedent(
                r"""
                set -e -u -o pipefail
                cat <<END_OF_LFTP_SCRIPT | lftp
                connect ${ARTIFACTS_FTP_URL}
                mkdir -p ${DESTINATION_PATH_IN_FTP}
                cd ${DESTINATION_PATH_IN_FTP}
                mput ${REPO_DIR_ARCHIVE_ARTIFACT_FILENAME} \
                     ${GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME}
                END_OF_LFTP_SCRIPT
                """).strip()],
            env={
                "ARTIFACTS_FTP_URL": self.buildmaster_setup.artifacts_ftp_url,
                "DESTINATION_PATH_IN_FTP": compute_artifact_path(
                    "/", "quicksync-artifacts", "buildername",
                    buildnumber_shard=True,
                ),
                "REPO_DIR_ARCHIVE_ARTIFACT_FILENAME": self.REPO_DIR_ARCHIVE_ARTIFACT_FILENAME,
                "GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME": self.GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME,
            },
        ))
        self.addStep(steps.MasterShellCommand(
            name="symlink latest artifacts location on buildmaster",
            haltOnFailure=True,
            command=[
                "ln", "-snf", util.Interpolate("%(prop:buildnumber)s"),
                compute_artifact_path(
                    self.buildmaster_setup.artifacts_dir,
                    "quicksync-artifacts",
                    "buildername",
                    buildnumber_shard="latest",
                ),
            ],
        ))
Exemple #7
0
    def __init__(self):
        ZcashBaseFactory.__init__(self)

        self.addSteps([
            sh('make', 'cov'),
            steps.DirectoryUpload(
                workersrc="./zcash-gtest.coverage",
                masterdest=util.Interpolate("{{ buildbot_coverage_dir }}/%(prop:buildnumber)s-zcash-gtest.coverage"),
                url=util.Interpolate("https://{{ buildbot_host }}/code-coverage/%(prop:buildnumber)s-zcash-gtest.coverage")
            ),
            steps.DirectoryUpload(
                workersrc="./test_bitcoin.coverage",
                masterdest=util.Interpolate("{{ buildbot_coverage_dir }}/%(prop:buildnumber)s-test_zcash.coverage"),
                url=util.Interpolate("https://{{ buildbot_host }}/code-coverage/%(prop:buildnumber)s-test_zcash.coverage")
            ),
            steps.DirectoryUpload(
                workersrc="./total.coverage",
                masterdest=util.Interpolate("{{ buildbot_coverage_dir }}/%(prop:buildnumber)s-total.coverage"),
                url=util.Interpolate("https://{{ buildbot_host }}/code-coverage/%(prop:buildnumber)s-total.coverage")
            ),
            steps.MasterShellCommand("chmod -R 755 {{ buildbot_coverage_dir }}"),
        ])
Exemple #8
0
def getBuildPipeline():

    compressSite = common.compressDir(dirToCompress="target/staging",
                                      outputFile="target/site.tar.bz2")

    compressCoverage = common.compressDir(
        dirToCompress="target/site/cobertura",
        outputFile="target/coverage.tar.bz2")

    uploadSite = common.copyAWS(
        pathFrom="target/site.tar.bz2",
        pathTo="s3://{{ s3_public_bucket }}/builds/{{ reports_fragment }}",
        name="Upload site report to S3")

    uploadCoverage = common.copyAWS(
        pathFrom="target/coverage.tar.bz2",
        pathTo="s3://{{ s3_public_bucket }}/builds/{{ coverage_fragment }}",
        name="Upload coverage report to S3")

    updateSite = steps.MasterShellCommand(command=util.Interpolate(
        "ln -s {{ deployed_reports }}/apidocs {{ deployed_javadocs }} && \
            ln -s {{ deployed_reports }}/cobertura {{ deployed_coverage }} && \
            rm -f {{ deployed_reports_symlink }} {{ deployed_javadocs_symlink }} {{ deployed_coverage_symlink }} && \
            ln -s {{ deployed_reports }} {{ deployed_reports_symlink }} && \
            ln -s {{ deployed_javadocs }} {{ deployed_javadocs_symlink }} && \
            ln -s {{ deployed_coverage }} {{ deployed_coverage_symlink }}"),
                                          flunkOnFailure=True,
                                          name="Deploy Reports")

    f_build = __getBasePipeline()
    f_build.addStep(compressSite)
    f_build.addStep(compressCoverage)
    f_build.addStep(uploadSite)
    f_build.addStep(uploadCoverage)
    #f_build.addStep(updateSite)
    f_build.addStep(common.getClean())

    return f_build
Exemple #9
0
def make_builder_config(repo_url, name, worker_name, config, lock,
                        snapshots_dir, snapshots_url, snapshots_default_max):
    if snapshots_dir and snapshots_dir[-1] is not "/":
        snapshots_dir += "/"
    if snapshots_url and snapshots_url[-1] is not "/":
        snapshots_url += "/"

    builder = util.BuildFactory()

    builder.addStep(
        steps.SetProperties(name="Worker Config File",
                            properties=config,
                            hideStepIf=True))

    builder.addStep(
        steps.SetPropertiesFromEnv(
            variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True))

    # TODO: use `reference` to a common volume instead? or make the builder
    # dependent on another fetch-only builder so only one builder tries to pull it?
    builder.addStep(
        steps.GitHub(repourl=repo_url,
                     workdir=Property("WORKER_REPO_DIR", None),
                     logEnviron=False,
                     getDescription={
                         "always": True,
                         "tags": True
                     }))

    builder.addStep(
        FileExistsSetProperty(name="config.mk Existence Check",
                              property="already_configured",
                              file="%s/config.mk" % builder.workdir,
                              hideStepIf=True))

    compilation_environment = Property("env", {})

    builder.addStep(
        steps.Configure(command=compute_configure,
                        env=compilation_environment,
                        doStepIf=is_not_configured))

    builder.addStep(
        steps.SetPropertyFromCommand(name="Python (Worker)",
                                     property="cpu_count",
                                     command=["python", "-c", GET_CPU_COUNT],
                                     flunkOnFailure=False,
                                     warnOnFailure=True,
                                     hideStepIf=True,
                                     description="getting CPU count",
                                     descriptionDone="got CPU count"))

    # In at least Buildbot 0.9.12, warningPattern and suppressionList are not
    # renderable, so just get the properties from the config file immediately
    compiler_warning_pattern = config.get(
        "compiler_warning_pattern",
        r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$")
    compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups
    compiler_suppression_file = Property("compiler_suppression_file", None)
    compiler_suppression_list = config.get("compiler_suppression_list", None)

    builder.addStep(
        steps.Compile(command=["make",
                               Interpolate("-j%(prop:cpu_count:~1)s")],
                      env=compilation_environment,
                      warningPattern=compiler_warning_pattern,
                      warningExtractor=compiler_warning_extractor,
                      suppressionFile=compiler_suppression_file,
                      suppressionList=compiler_suppression_list))

    builder.addStep(
        steps.Test(command=[
            "make",
            Interpolate("%(prop:can_run_tests:"
                        "#?|test|test/runner)s")
        ],
                   env=compilation_environment,
                   warningPattern=compiler_warning_pattern,
                   warningExtractor=compiler_warning_extractor,
                   suppressionFile=compiler_suppression_file,
                   suppressionList=compiler_suppression_list,
                   haltOnFailure=True,
                   flunkOnFailure=True))

    if snapshots_dir is not None and snapshots_url is not None:
        builder.addStep(
            steps.SetProperty(name="Computed By %s" % path.basename(__file__),
                              property="package_name",
                              value=compute_package_name,
                              hideStepIf=True,
                              doStepIf=should_package))
        builder.addStep(
            Package(package_name=Property("package_name"),
                    package_format=Property("package_archive_format"),
                    make_target=Property("package_make_target"),
                    package_directory=Property("package_directory", None),
                    strip_binaries=Property("package_strip_binaries", None),
                    env=compilation_environment,
                    doStepIf=should_package))

        source_path = Property("package_filename")
        target_path = Interpolate("%s%%(prop:package_filename)s" %
                                  snapshots_dir)
        target_url = Interpolate("%s%%(prop:package_filename)s" %
                                 snapshots_url)
        # This is not an ideal target link calculation since the archive format
        # in package_filename might be fixed up by the Package step, but here
        # only None is converted into tar.xz, which is not exactly the same
        target_link = Interpolate("%s%%(prop:buildername)s-latest."
                                  "%%(prop:package_archive_format:-tar.xz)s" %
                                  snapshots_dir)

        builder.addStep(
            CleaningFileUpload(name="publish",
                               workersrc=source_path,
                               masterdest=target_path,
                               url=target_url,
                               clean=True,
                               doStepIf=should_package))
        builder.addStep(
            steps.MasterShellCommand(
                name="update latest archive",
                command=["ln", "-sf", target_path, target_link],
                logEnviron=False,
                doStepIf=should_package))
        builder.addStep(
            MasterCleanSnapshots(
                name="clean old snapshots",
                workdir=snapshots_dir,
                file_prefix=Interpolate("%(prop:buildername)s-"),
                num_to_keep=Property("num_snapshots_to_keep",
                                     snapshots_default_max),
                doStepIf=should_package))

    return util.BuilderConfig(name=name,
                              workername=worker_name,
                              collapseRequests=True,
                              factory=builder,
                              nextBuild=pick_next_build,
                              locks=[lock.access("exclusive")])
Exemple #10
0
def get_package_steps(buildname, platformname, srcpath, dstpath, dsturl,
        archive_format, disttarget,
        build_data_files, platform_data_files,
        platform_built_files,
        **kwargs):
    if archive_format not in PACKAGE_FORMAT_COMMANDS:
        archive_format = "tar.bz2"
    archive_base_command = PACKAGE_FORMAT_COMMANDS.get(archive_format)

    files = []

    files += platform_built_files
    # If file is absolute or begins with a $ (environment variable) don't prepend srcpath
    if platform_data_files:
        files += [ f if (os.path.isabs(f) or f[0:1] == '$') else os.path.join(srcpath, f)
                for f in platform_data_files ]
    # dont pack up the default files if the port has its own dist target
    if not disttarget:
        files += [ os.path.join(srcpath, f) for f in build_data_files ]

    def namesFromProps(props):
        return create_names(buildname, platformname, archive_format, props["revision"])

    @util.renderer
    def generateCommands(props):
        name, archive, _ = namesFromProps(props)
        archive_full_command = archive_base_command + [archive, name+"/"]

        commands = []

        if disttarget:
            commands.append(util.ShellArg(["make", disttarget],
                    logname="make {0}".format(disttarget), haltOnFailure=True))

        commands.append(util.ShellArg(["mkdir", name],
            logname="archive", haltOnFailure=True))
        # Use a string for cp to allow shell globbing
        # WARNING: files aren't surrounded with quotes to let it happen
        commands.append(util.ShellArg('cp -r ' + ' '.join(files) + ' "{0}/"'.format(name),
            logname="archive", haltOnFailure=True))
        commands.append(util.ShellArg(archive_full_command,
            logname="archive", haltOnFailure=True))

        return commands

    @util.renderer
    def generateCleanup(props):
        name, _, _ = namesFromProps(props)

        commands = []
        commands.append(util.ShellArg(["rm", "-rf", name],
            logname="cleanup", haltOnFailure=True))
        return commands

    @util.renderer
    def doPackage(props):
        return ("revision" in props and
                "package" in props and
                props["revision"] is not None and
                bool(props["package"]))

    @util.renderer
    def getWorkerSrc(props):
        _, archive, _ = namesFromProps(props)
        return archive

    @util.renderer
    def getMasterDest(props):
        _, archive, _ = namesFromProps(props)
        return os.path.join(dstpath, archive)

    @util.renderer
    def getArchiveURL(props):
        _, archive, _ = namesFromProps(props)
        return urlp.urljoin(dsturl, archive)

    @util.renderer
    def getLinkCommand(props):
        _, archive, symlink = namesFromProps(props)
        return "ln", "-sf", archive, os.path.join(dstpath, symlink)

    build_package = CleanShellSequence(
        name = "package",
        description = "packaging",
        descriptionDone = "package",
        doStepIf = doPackage,
        haltOnFailure = True,
        flunkOnFailure = True,
        commands = generateCommands,
        cleanup = generateCleanup,
        **kwargs
    )

    # dstpath will get created by FileUpload
    upload_package = steps.FileUpload(
        name = "upload package",
        description = "uploading",
        descriptionDone = "uploaded",
        doStepIf = doPackage,
        haltOnFailure = True,
        flunkOnFailure = True,
        workersrc = getWorkerSrc,
        masterdest = getMasterDest,
        mode = 0o0644,
        url = getArchiveURL if dsturl else None)
    link = steps.MasterShellCommand(
        name = "link latest snapshot",
        description = "linking",
        descriptionDone = "linked",
        doStepIf = doPackage,
        haltOnFailure = True,
        flunkOnFailure = True,
        command = getLinkCommand,
        env = {})

    return build_package, upload_package, link
Exemple #11
0
def factory(constructicon_name, builder_name, deps, commands, upload, zip,
            unzip, url, resources):
    deps = sorted(deps)

    def work_dir_renderer(*suffix, **kwargs):
        @util.renderer
        def work_dir(properties):
            if kwargs.get('log', False):
                log.msg('properties are: ' +
                        pprint.pformat(properties.asDict()))
                log.msg('sourcestamps are: ' + pprint.pformat(
                    [(i.repository, i.branch, i.revision)
                     for i in properties.getBuild().getAllSourceStamps()]))
            sep = '/'
            if all_slaves[properties['slavename']].get('platform',
                                                       0) == 'windows':
                sep = '\\'
            return sep.join(('..', 'constructicons', constructicon_name,
                             constructicon_name) + suffix)

        return work_dir

    result = util.BuildFactory()

    def git_step(repo_url, work_dir, env):
        return common.sane_step(
            steps.Git,
            repourl=repo_url,
            codebase=repo_url,
            workdir=work_dir,
            mode='incremental',
            env=env,
            warnOnWarnings=False,
        )

    def extract_parameters(dict):
        return {
            i[len(parameter_prefix):]: str(j[0])
            for i, j in dict.items() if i.startswith(parameter_prefix)
        }

    @util.renderer
    def env(properties):
        return extract_parameters(properties.asDict())

    def format(command):
        @util.renderer
        def f(properties):
            return command.format(**extract_parameters(properties.asDict()))

        return f

    @util.renderer
    def get_command(properties):
        revisions = ''
        for i in properties.getBuild().getAllSourceStamps():
            revision = None
            if i.revision: revision = i.revision
            elif i.branch: revision = i.branch
            if revision: revisions += ' {}:{}'.format(i.codebase, revision)
        if revisions: revisions = ' -r' + revisions
        return common.constructicon_slave_go('g {}{}'.format(
            builder_name,
            revisions,
        ))

    for resource in resources:
        if resource not in resource_locks:
            resource_locks[resource] = util.MasterLock(resource)
    locks = [resource_locks[i].access('exclusive') for i in resources]
    #properties, get, compile
    result.addSteps([
        common.sane_step(
            steps.SetProperty,
            name='devastator git state',
            property='devastator_git_state',
            value={{{devastator_git_state}}},
        ),
        common.sane_step(
            steps.SetProperty,
            name='cybertron git state',
            property='cybertron_git_state',
            value={{{cybertron_git_state}}},
        ),
        common.sane_step(
            steps.SetProperty,
            name='git state',
            property='git_state',
            value=global_git_states[constructicon_name],
        ),
        git_step(global_repo_urls[constructicon_name], work_dir_renderer(),
                 env),
        common.sane_step(
            steps.ShellCommand,
            name='get',
            command=get_command,
            workdir=work_dir_renderer(log=True),
            env=env,
            warnOnWarnings=False,
        ),
    ])
    for command_i in range(len(commands)):
        kwargs = {}
        meat = commands[command_i][1]
        timeout = 5 * 60
        if type(meat) == str:
            command = meat
        else:
            command = meat['command']
            warning_pattern = '(.*warning[: ])'
            if 'warnings' in meat:
                warning_pattern = '({})'.format('|'.join(meat['warnings']))
            if 'suppress_warnings' in meat:
                warning_pattern = warning_pattern + '(?!{})'.format('|'.join(
                    meat['suppress_warnings']))
            kwargs['warningPattern'] = warning_pattern
            timeout = meat.get('timeout', timeout)
        result.addStep(
            common.sane_step(steps.Compile,
                             name=commands[command_i][0],
                             command=format(command),
                             workdir=work_dir_renderer(),
                             env=env,
                             locks=locks,
                             timeout=timeout,
                             maxTime=2 * 60 * 60,
                             **kwargs))
    #upload
    for i, j in upload.items(True):
        zip_steps = []
        upload_steps = []
        unzip_steps = []
        slave_src = i
        master_dst_extension = ''
        #zip
        if i in zip:

            @util.renderer
            def command(properties, i=i):
                return 'python -m zipfile -c {0}.zip {0}'.format(i)

            zip_steps.append(
                steps.ShellCommand(
                    command=command,
                    workdir=work_dir_renderer(),
                    alwaysRun=True,
                ))
            slave_src += '.zip'
            master_dst_extension = '.zip'
        #unzip
        def master_dst_function(properties,
                                j=j,
                                extension=master_dst_extension,
                                suffix=None):
            return os.path.join(
                make_full_builder_name(constructicon_name, builder_name),
                str(properties['buildnumber']) + '-constructicon',
                suffix if suffix else j + master_dst_extension)

        @util.renderer
        def master_dst_renderer(properties, f=master_dst_function):
            return f(properties)

        url_trim = 0
        if j in unzip:

            @util.renderer
            def command(properties, master_dst_function=master_dst_function):
                master_dst = master_dst_function(properties)
                unzipped = os.path.split(master_dst)[0] or '.'
                return 'python -m zipfile -e {} {}'.format(
                    master_dst, unzipped)

            unzip_steps.append(
                steps.MasterShellCommand(command=command, alwaysRun=True))
            url_trim = 4
        devastator_file_server_port = cybertron['devastator_file_server_port']
        #upload
        suffix = url.get(j, None)

        @util.renderer
        def url_renderer(
                properties,
                j=j,
                suffix=suffix,
                master_dst_function=master_dst_function,
                devastator_file_server_port=devastator_file_server_port,
                url_trim=url_trim):
            return ('http://{}:{}'.format({{{devastator_host}}},
                                          devastator_file_server_port) + '/' +
                    master_dst_function(properties, suffix=suffix))

        upload_steps.append(
            steps.FileUpload(
                slavesrc=slave_src,
                masterdest=master_dst_renderer,
                url=url_renderer,
                workdir=work_dir_renderer(),
                alwaysRun=True,
            ))
        #append
        result.addSteps(zip_steps + upload_steps + unzip_steps)
    return result
Exemple #12
0
    target_path = Interpolate("%s%%(prop:%s)s" %
                              (snapshots_dir, property_name))
    target_url = Interpolate("%s%%(prop:%s)s" % (snapshots_url, property_name))
    target_link = latest_link
    builder.addStep(
        CleaningFileUpload(name="publish %s" % publish_name,
                           workersrc=source_path,
                           masterdest=target_path,
                           url=target_url,
                           clean=True,
                           mode=0644,
                           doStepIf=do_step_if))
    builder.addStep(
        steps.MasterShellCommand(
            name="update latest %s" % publish_name,
            command=["ln", "-sf", target_path, target_link],
            logEnviron=False,
            hideStepIf=True,
            doStepIf=do_step_if))


def make_builder_config(repo_url, name, worker_name, config, lock,
                        snapshots_dir, snapshots_url, snapshots_default_max):
    builder = util.BuildFactory()

    builder.addStep(
        steps.SetProperties(name="Worker Config File",
                            properties=config,
                            hideStepIf=True))

    builder.addStep(
        steps.SetPropertiesFromEnv(
Exemple #13
0
    def _identifyAndSaveProducedArtifactsOntoBuildmaster(self):
        self.addStep(steps.SetPropertyFromCommand(
            name="assert which artifact have been produced",
            property="artifacts_produced",
            command=["/usr/bin/env", "bash", "-c", textwrap.dedent(
                r"""
                set -e -u -o pipefail
                artifacts_produced=()  # which artifact have been produced

                for type in sdks cache build; do
                    if [[ -d "artifacts/${type}" &&
                          -n "$(ls -A "artifacts/${type}")" ]]; then
                        artifacts_produced+=("${type}")
                    fi
                done

                echo "${artifacts_produced[@]}"
                """).strip()],
            haltOnFailure=False,
            warnOnFailure=True,
        ))

        def is_artifact_save_necessary(artifact_type):
            def checker(step: BuildStep) -> bool:
                if artifact_type not in ['sdks', 'cache', 'build']:
                    raise ValueError("is_artifact_save_necessary: Unsupported artifact type {!r}".format(artifact_type))
                artifact_produced_property = "produce_{}_artifacts".format(artifact_type)
                #print("DEBUG: {!r} property = {!r}".format(
                #    artifact_produced_property, step.getProperty(artifact_produced_property)))
                #print("DEBUG: {!r} property .split() = {!r}".format(
                #    "artifacts_produced", str(step.getProperty("artifacts_produced")).split()))
                #print("DEBUG: my boolean evaluation = {!r}".format(
                #    bool(step.getProperty(artifact_produced_property)) and
                #    (artifact_type in
                #     str(step.getProperty("artifacts_produced")).split())
                #))
                return (
                    bool(step.getProperty(artifact_produced_property)) and
                    (artifact_type in
                     str(step.getProperty("artifacts_produced")).split())
                )
            return checker

        for artifact_type in ['sdks', 'cache', 'build']:
            self.addStep(steps.ShellCommand(
                name="save {} artifact on buildmaster".format(artifact_type)[:50],
                description="save the {} artifact archive on the buildmaster".format(artifact_type),
                haltOnFailure=True,
                doStepIf=is_artifact_save_necessary(artifact_type),
                command=["/usr/bin/env", "bash", "-c", textwrap.dedent(
                    r"""
                    set -e -u -o pipefail
                    cat <<END_OF_LFTP_SCRIPT | lftp
                    connect ${ARTIFACTS_FTP_URL}
                    lcd ${SOURCE_PATH_ON_WORKER}
                    mkdir -p ${DESTINATION_PATH_IN_FTP}
                    cd ${DESTINATION_PATH_IN_FTP}
                    mput *
                    END_OF_LFTP_SCRIPT
                    """).strip()],
                env={
                    "ARTIFACTS_FTP_URL": self.buildmaster_setup.artifacts_ftp_url,
                    "SOURCE_PATH_ON_WORKER": 'artifacts/{}'.format(artifact_type),
                    "DESTINATION_PATH_IN_FTP": compute_artifact_path(
                        "/", artifact_type, "buildername",
                        buildnumber_shard=True,
                    ),
                },
            ))
            self.addStep(steps.MasterShellCommand(
                name="symlink latest {} artifacts".format(artifact_type)[:50],
                haltOnFailure=True,
                doStepIf=is_artifact_save_necessary(artifact_type),
                command=[
                    "ln", "-snf", util.Interpolate("%(prop:buildnumber)s"),
                    compute_artifact_path(
                        self.buildmaster_setup.artifacts_dir,
                        artifact_type,
                        "buildername",
                        buildnumber_shard="latest",
                    ),
                ],
            ))