Esempio n. 1
0
def build_coverage():
    remove_build = steps.RemoveDirectory("build")
    create_build = steps.MakeDirectory("build")
    cmake_step = steps.CMake(path=util.Property("src_dir"),
                             definitions=util.Property("cmake_defs", {}),
                             options=util.Property("cmake_opts", []),
                             workdir="build",
                             env=env)

    @util.renderer
    def join_make_opts(props):
        make_opts = props.getProperty("make_opts", [])
        return ["make"] + make_opts

    make_step = steps.Compile(command=join_make_opts, workdir="build", env=env)

    test_coverage = steps.ShellCommand(command=["make", "coverage"],
                                       workdir="build")

    upload_coverage_data = steps.ShellCommand(command=[
        "bash", "-c",
        util.Interpolate("bash <(curl -s https://codecov.io/bash) -t " +
                         tokens.codecovToken +
                         " -C %(prop:revision)s -f coverage.info.cleaned")
    ],
                                              workdir="build")

    factory = util.BuildFactory()
    factory.addStep(remove_build)
    factory.addStep(create_build)
    factory.addStep(cmake_step)
    factory.addStep(make_step)
    factory.addStep(test_coverage)
    factory.addStep(upload_coverage_data)
    return factory
Esempio n. 2
0
def executeScript(name, script, args=(), haltOnFailure=True, flunkOnFailure=True, alwaysRun=False, env={}, **kwargs):
    """Download the executable script onto the worker and execute it"""
    shellScriptPath = util.Interpolate("%(prop:builddir)s/scripts/%(kw:fileName)s",
                                       fileName=createScriptFileName(name))

    kwargs["haltOnFailure"] = haltOnFailure
    kwargs["flunkOnFailure"] = flunkOnFailure
    kwargs["alwaysRun"] = alwaysRun

    downloadScript = steps.StringDownload(
        script,
        workerdest=shellScriptPath,
        name="Download script to the worker: {}".format(name),
        mode=0o755,
        **kwargs)

    runScript = steps.ShellCommand(
        name="Execute script: {}".format(name),
        command=[shellScriptPath, *args],
        env=env,
        timeout=1800,
        **kwargs)

    kwargs["alwaysRun"] = True

    removeScript = steps.ShellCommand(
        name="Remove script from worker: {}".format(name),
        command=["rm", "-f", shellScriptPath],
        **kwargs)

    return [downloadScript, runScript, removeScript]
Esempio n. 3
0
def getFactory():
    factory = base.getFactory();

    list = [
        steps.Git(
            repourl='https://github.com/QuasarApp/quasarAppCoin.git',
            branch=util.Interpolate('%(prop:Branch)s'),
            mode='incremental',
            submodules=True
        ),
        steps.ShellCommand(
            command= ['qmake'],
        ),
        steps.ShellCommand(
            command= ['make', 'deploy'],
        ),
        steps.CopyDirectory(
            src="build/Distro",
            dest="~/shared/quasarAppCoin/"
        )

    ]

    factory.addSteps(list);

    return factory
Esempio n. 4
0
 def __init__(self, channel, options, *args, **kwargs):
     channel_filename = 'channel-%s.yaml' % channel
     util.BuildFactory.__init__(self, *args, **kwargs)
     self.addSteps([
         steps.ShellCommand(
             name='install tools',
             haltOnFailure=True,
             logEnviron=False,
             command=['sudo', 'dnf', 'install', '-y', 'flatpak', 'flatpak-builder', 'python3-PyYAML'],
         ),
         FlatpakGPGStep(name='setup gpg keys'),
         steps.Git(
             name='checkout sources',
             codebase=util.Property('codebase'),
             repourl=util.Property('repository'),
             branch=util.Property('branch'),
             mode='incremental',
             submodules=True,
             shallow=True,
         ),
         FlatpakPullStep(name='pull from master'),
         steps.ShellCommand(
             name='build',
             haltOnFailure=True,
             command=['./flatpak-build', '--repo=repo', '--channel=' + channel_filename, '--jobs=1', '--export', '--gpg-homedir=flatpak-gpg', '--gpg-sign=' + options['gpg-key']],
         ),
         FlatpakRefStep(name='copy flatpakref files', channel=channel),
         FlatpakSyncStep(name='sync repo'),
     ])
Esempio n. 5
0
    def _add_pyside_setup_repo(self, factory):
        repo_name = "pyside-setup"
        repo_url = "https://github.com/ivanalejandro0/" + repo_name + ".git"
        git_branch = "master"

        venv_name = "virtualenv_wheels"
        sandbox_path = {'PATH': "../" + venv_name + '/bin' + ':${PATH}'}

        publish_pyside_wheel = self._publish_pyside_command(
            '`ls -t *.whl | head -1`')
        factory.addSteps([
            steps.ShellCommand(command=['rm', '-rf', repo_name],
                               workdir='.',
                               env=sandbox_path,
                               name="Remove previous pyside"),
            steps.Git(repourl=repo_url,
                      branch=git_branch,
                      workdir=repo_name,
                      mode='full',
                      method='clobber',
                      shallow=True,
                      haltOnFailure=True,
                      name="Pull " + repo_url),
            steps.ShellCommand(
                command=['python', 'setup.py', 'bdist_wheel', '--standalone'],
                workdir=repo_name,
                env=sandbox_path,
                name="Wheel for " + repo_name),
            steps.ShellCommand(command=publish_pyside_wheel,
                               workdir=repo_name + '/dist/',
                               name="Publish pyside")
        ])
def masterConfig():
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import schedulers, steps

    c['services'] = [FakeSecretReporter('http://example.com/hook',
                                        auth=('user', Interpolate('%(secret:httppasswd)s')))]
    c['schedulers'] = [
        schedulers.ForceScheduler(
            name="force",
            builderNames=["testy"])]

    c['secretsProviders'] = [FakeSecretStorage(
        secretdict={"foo": "bar", "something": "more", 'httppasswd': 'myhttppasswd'})]
    f = BuildFactory()
    if os.name == "posix":
        f.addStep(steps.ShellCommand(command=Interpolate(
            'echo %(secret:foo)s | sed "s/bar/The password was there/"')))
    else:
        f.addStep(steps.ShellCommand(command=Interpolate(
            'echo %(secret:foo)s')))
    c['builders'] = [
        BuilderConfig(name="testy",
                      workernames=["local1"],
                      factory=f)]
    return c
def masterConfig(num_concurrent, extra_steps=None):
    if extra_steps is None:
        extra_steps = []
    c = {}

    c['schedulers'] = [
        schedulers.ForceScheduler(
            name="force",
            builderNames=["testy"])]
    triggereables = []
    for i in range(num_concurrent):
        c['schedulers'].append(
            schedulers.Triggerable(
                name="trigsched" + str(i),
                builderNames=["build"]))
        triggereables.append("trigsched" + str(i))

    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    f.addStep(steps.Trigger(schedulerNames=triggereables,
                            waitForFinish=True,
                            updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))
    f2 = BuildFactory()
    f2.addStep(steps.ShellCommand(command='echo ola'))
    for step in extra_steps:
        f2.addStep(step)
    c['builders'] = [
        BuilderConfig(name="testy",
                      workernames=["marathon0"],
                      factory=f),
        BuilderConfig(name="build",
                      workernames=["marathon" + str(i)
                                   for i in range(num_concurrent)],
                      factory=f2)]
    url = os.environ.get('BBTEST_MARATHON_URL')
    creds = os.environ.get('BBTEST_MARATHON_CREDS')
    if creds is not None:
        user, password = creds.split(":")
    else:
        user = password = None
    masterFQDN = os.environ.get('masterFQDN')
    marathon_extra_config = {
    }
    c['workers'] = [
        MarathonLatentWorker('marathon' + str(i), url, user, password, 'buildbot/buildbot-worker:master',
                             marathon_extra_config=marathon_extra_config,
                             masterFQDN=masterFQDN)
        for i in range(num_concurrent)
    ]
    # un comment for debugging what happens if things looks locked.
    # c['www'] = {'port': 8080}
    # if the masterFQDN is forced (proxy case), then we use 9989 default port
    # else, we try to find a free port
    if masterFQDN is not None:
        c['protocols'] = {"pb": {"port": "tcp:9989"}}
    else:
        c['protocols'] = {"pb": {"port": "tcp:0"}}

    return c
Esempio n. 8
0
def mkdocsfactory():
    f = util.BuildFactory()
    f.addSteps([
        gitStep,
        steps.FileDownload(mastersrc="virtualenv.py",
                           slavedest="virtualenv.py",
                           flunkOnFailure=True),
        # run docs tools in their own virtualenv, otherwise we end up
        # documenting the version of Buildbot running the metabuildbot!
        VirtualenvSetup(name='virtualenv setup',
                        no_site_packages=True,
                        virtualenv_packages=[
                            'sphinx==1.2.2', '--editable=master',
                            '--editable=slave'
                        ],
                        virtualenv_dir='sandbox',
                        haltOnFailure=True),
        # manual
        steps.ShellCommand(command=util.Interpolate(
            textwrap.dedent("""\
            source sandbox/bin/activate &&
            make docs
            """)),
                           name="create docs"),
        steps.ShellCommand(command=textwrap.dedent("""\
            export VERSION=latest &&
            tar -C /home/buildbot/www/buildbot.net/buildbot/docs -zvxf master/docs/docs.tgz &&
            chmod -R a+rx /home/buildbot/www/buildbot.net/buildbot/docs/latest &&
            find /home/buildbot/www/buildbot.net/buildbot/docs/latest -name '*.html' | xargs python /home/buildbot/www/buildbot.net/buildbot/add-tracking.py
            """),
                           name="docs to web",
                           flunkOnFailure=True,
                           haltOnFailure=True)
    ])
    return f
Esempio n. 9
0
def masterConfig():
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import steps, schedulers

    c['schedulers'] = [
        schedulers.Triggerable(name="trigsched", builderNames=["build"]),
        schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"])
    ]

    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    f.addStep(
        steps.Trigger(schedulerNames=['trigsched'],
                      waitForFinish=True,
                      updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))
    f2 = BuildFactory()
    f2.addStep(steps.ShellCommand(command='echo ola'))
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["local1"], factory=f),
        BuilderConfig(name="build", workernames=["local1"], factory=f2)
    ]
    return c
Esempio n. 10
0
def createBuildSteps():
    buildSteps = []
    buildSteps.extend(common.configureMdbciVmPathProperty())
    buildSteps.append(steps.SetProperties(properties=configureBuildProperties))
    buildSteps.extend(common.cloneRepository())
    buildSteps.append(steps.ShellCommand(
        name="Build MaxScale using MDBCI",
        command=['/bin/bash', '-c', 'BUILD/mdbci/build.sh || BUILD/mdbci/build.sh'],
        timeout=3600,
        workdir=util.Interpolate("%(prop:builddir)s/build")
    ))
    buildSteps.extend(common.destroyVirtualMachine())
    buildSteps.append(common.runSshCommand(
        name="Make dir for build results on the repo server",
        host=util.Property("upload_server"),
        command=["mkdir", "-p", util.Interpolate(constants.UPLOAD_PATH + '/%(prop:target)s')],
    ))
    buildSteps.append(common.rsyncViaSsh(
        name="Rsync builds results to the repo server",
        local=util.Interpolate("%(prop:builddir)s/repository/%(prop:target)s/mariadb-maxscale/"),
        remote=util.Interpolate("%(prop:upload_server)s:" + constants.UPLOAD_PATH + "/%(prop:target)s/")
    ))
    buildSteps.append(common.generateMdbciRepositoryForTarget())
    buildSteps.extend(common.syncRepod())
    buildSteps.append(steps.ShellCommand(
        name="Upgrade test",
        command=['BUILD/mdbci/upgrade_test.sh'],
        timeout=1800,
        doStepIf=(util.Property('run_upgrade_test') == 'yes'),
        workdir=util.Interpolate("%(prop:builddir)s/build")
    ))
    buildSteps.extend(common.cleanBuildDir())
    return buildSteps
Esempio n. 11
0
    def init_trigger_factory(self, build_specification, props):
        trigger_factory = self.factory_with_deploying_infrastructure_step(props)
        worker_os = props['os']
        get_path = bb.utils.get_path_on_os(worker_os)

        repository_name = bb.utils.get_repository_name_by_url(props['repository'])

        trigger_factory.extend([
            steps.ShellCommand(
                name='create manifest',
                command=[self.run_command[worker_os], 'manifest_runner.py',
                         '--root-dir',
                         util.Interpolate(get_path(r'%(prop:builddir)s/repositories')),
                         '--repo', repository_name,
                         '--branch', util.Interpolate('%(prop:branch)s'),
                         '--revision', util.Interpolate('%(prop:revision)s'),
                         '--build-event', props['event_type'],
                         '--commit-time', buildbot_utils.get_event_creation_time] +
                        (['--target-branch', props['target_branch']] if props.hasProperty('target_branch') else []),
                workdir=get_path(r'infrastructure/build_scripts'))])

        # TODO: List of repos should be imported from config
        if props['event_type'] == 'pre_commit' and repository_name in ['MediaSDK', 'infrastructure',
                                                                       'product-configs']:
            trigger_factory.extend([
                steps.ShellCommand(
                    name='check author name and email',
                    command=[self.run_command[worker_os], 'check_author.py',
                             '--repo-path',
                             util.Interpolate(
                                 get_path(rf'%(prop:builddir)s/repositories/{repository_name}')),
                             '--revision', util.Interpolate('%(prop:revision)s')],
                    workdir=get_path(r'infrastructure/pre_commit_checks'))])

        return trigger_factory
Esempio n. 12
0
def masterConfig(use_with=False):
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import schedulers, steps

    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]

    c['secretsProviders'] = [
        FakeSecretStorage(secretdict={
            "foo": "bar",
            "something": "more"
        })
    ]
    f = BuildFactory()
    if use_with:
        secrets_list = [("pathA", Interpolate('%(secret:something)s'))]
        with f.withSecrets(secrets_list):
            f.addStep(
                steps.ShellCommand(command=Interpolate('echo %(secret:foo)s')))
    else:
        f.addSteps(
            [steps.ShellCommand(command=Interpolate('echo %(secret:foo)s'))],
            withSecrets=[("pathA", Interpolate('%(secret:something)s'))])
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]
    return c
Esempio n. 13
0
def linuxSteps():

    list = [
        steps.ShellCommand(
            command = ['git', 'clean', '-xdf'],
            doStepIf = lambda step : isLinux(step),
            name = 'clean for Linux',
            description = 'clean old build data',
        ),
        steps.ShellCommand(
            command = ['git', 'submodule', 'foreach', '--recursive', 'git', 'clean', '-xdf'],
            doStepIf = lambda step :isLinux(step),
            name = 'clean submodule for Linux',
            description = 'clean submodule old build data',
        ),
        steps.ShellCommand(
            command = getLinuxConfigOptions,
            haltOnFailure = True,
            doStepIf = lambda step : isLinux(step),
            name = 'configure Linux',
            description = 'create a make files for projects',
        ),
        steps.Compile(
            command = base.makeCommand,
            name = 'Build Qt for Linux',
            haltOnFailure = True,
            doStepIf = lambda step : isLinux(step),

            description = 'run make for project',
        ),

        steps.Compile(
            command = ['make', 'install', '-j2'],
            name = 'Install Qt for Linux',
            haltOnFailure = True,
            doStepIf = lambda step : isLinux(step),

            description = 'run make for project',
        ),

        steps.ShellCommand(
            command = cpIcuLinux,
            haltOnFailure = True,
            doStepIf = lambda step : isLinux(step),
            name = 'Copy ICU libs for Linux',
            description = 'Copy extra libs',
        ),

        steps.ShellCommand(
            command = lsLinux,
            haltOnFailure = True,
            doStepIf = lambda step : isLinux(step),
            name = 'Create ls links for Linux',
            description = 'deploy qt',
        ),

    ]

    return list;
Esempio n. 14
0
def LinuxSteps():

    list = [
        steps.ShellCommand(
            command=[
                'qmake-linux', "QMAKE_CXX='ccache g++'", "-r",
                "CONFIG+=qtquickcompiler", 'ONLINE="~/repo"'
            ],
            haltOnFailure=True,
            doStepIf=lambda step: isLinux(step),
            name='QMake Linux',
            description='create a make files for projects',
        ),
        steps.ShellCommand(
            command=['make', 'clean'],
            doStepIf=lambda step: isClean(step) and isLinux(step),
            name='clean Linux',
            description='clean old build data',
        ),
        steps.Compile(
            command=base.makeCommand,
            name='Build Linux',
            doStepIf=lambda step: isLinux(step),
            haltOnFailure=True,
            description='run make for project',
        ),
        steps.ShellCommand(
            command=['make', 'deploy'],
            doStepIf=lambda step: isDeploy(step) and isLinux(step),
            name='deploy Linux',
            haltOnFailure=True,
            description='deploy project ',
        ),
        steps.Compile(
            command=['make', 'test'],
            doStepIf=lambda step: isTest(step) and isLinux(step),
            name='tests ',
            haltOnFailure=True,
            description='run autotests of project',
        ),
        steps.ShellCommand(
            command=['make', 'release'],
            doStepIf=lambda step: isRelease(step) and isLinux(step),
            name='release Linux',
            haltOnFailure=True,
            description=
            'release project, like push to store or online repository',
        ),
        steps.ShellCommand(
            command=['make', 'distclean'],
            doStepIf=lambda step: isLinux(step),
            name='clean Linux makefiles',
            description='clean old makefiles  ',
        ),
    ]

    return list
Esempio n. 15
0
def masterConfig(num_concurrent, extra_steps=None):
    if extra_steps is None:
        extra_steps = []
    c = {}

    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]
    triggereables = []
    for i in range(num_concurrent):
        c['schedulers'].append(
            schedulers.Triggerable(name="trigsched" + str(i),
                                   builderNames=["build"]))
        triggereables.append("trigsched" + str(i))

    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    f.addStep(
        steps.Trigger(schedulerNames=triggereables,
                      waitForFinish=True,
                      updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))
    f2 = BuildFactory()
    f2.addStep(steps.ShellCommand(command='echo ola'))
    for step in extra_steps:
        f2.addStep(step)
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["hyper0"], factory=f),
        BuilderConfig(
            name="build",
            workernames=["hyper" + str(i) for i in range(num_concurrent)],
            factory=f2)
    ]
    hyperconfig = workerhyper.Hyper.guess_config()
    if isinstance(hyperconfig, string_types):
        hyperconfig = json.load(open(hyperconfig))
    hyperhost, hyperconfig = hyperconfig['clouds'].items()[0]
    masterFQDN = os.environ.get('masterFQDN')
    c['workers'] = [
        HyperLatentWorker('hyper' + str(i),
                          'passwd',
                          hyperhost,
                          hyperconfig['accesskey'],
                          hyperconfig['secretkey'],
                          'buildbot/buildbot-worker:master',
                          masterFQDN=masterFQDN) for i in range(num_concurrent)
    ]
    # un comment for debugging what happens if things looks locked.
    # c['www'] = {'port': 8080}
    # if the masterFQDN is forced (proxy case), then we use 9989 default port
    # else, we try to find a free port
    if masterFQDN is not None:
        c['protocols'] = {"pb": {"port": "tcp:9989"}}
    else:
        c['protocols'] = {"pb": {"port": "tcp:0"}}

    return c
Esempio n. 16
0
def _make_factory():
    f = util.BuildFactory()

    # Sync git
    f.addStep(
        steps.Git(repourl="https://github.com/klaital/wwdice",
                  method='clobber',
                  mode='full',
                  shallow=True,
                  haltOnFailure=True,
                  name='git sync'))

    version_specifier = util.Interpolate(
        'VERSION=%(prop:branch)s-%(prop:buildnumber)s')

    # Build binary
    f.addStep(
        steps.ShellCommand(
            name='compile binary',
            command=['make', 'wwdicebot', version_specifier],
            env={'GOOS': 'linux'},
            haltOnFailure=True,
        ))

    # Run tests
    f.addStep(
        steps.ShellCommand(
            name='run tests',
            command=['make', 'test'],
            haltOnFailure=True,
        ))

    # TODO: Run linters

    # Build Docker image
    f.addStep(
        steps.ShellCommand(
            name='build and push docker image',
            command=['make', 'wwdicebot-push'],
            haltOnFailure=True,
        ))

    # Update k8s deployment
    f.addStep(
        steps.ShellCommand(name='push to home cluster',
                           command=[
                               'kubectl', '--kubeconfig',
                               'wwdicebot_kubeconfig', 'apply', '-f',
                               'cmd/wwdicebot/k8s.yaml'
                           ],
                           haltOnFailure=True,
                           doStepIf=_is_deploy_branch))

    # TODO: add liveness check to see if the new version is actually deployed and reachable

    return f
Esempio n. 17
0
def masterConfig():
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import steps, schedulers

    c['schedulers'] = [
        schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"])
    ]
    triggereables = []
    for i in range(NUM_CONCURRENT):
        c['schedulers'].append(
            schedulers.Triggerable(name="trigsched" + str(i),
                                   builderNames=["build"]))
        triggereables.append("trigsched" + str(i))

    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    f.addStep(
        steps.Trigger(schedulerNames=triggereables,
                      waitForFinish=True,
                      updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))
    f2 = BuildFactory()
    f2.addStep(steps.ShellCommand(command='echo ola'))
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["hyper0"], factory=f),
        BuilderConfig(
            name="build",
            workernames=["hyper" + str(i) for i in range(NUM_CONCURRENT)],
            factory=f2)
    ]
    hyperconfig = workerhyper.Hyper.guess_config()
    if isinstance(hyperconfig, string_types):
        hyperconfig = json.load(open(hyperconfig))
    hyperhost, hyperconfig = hyperconfig['clouds'].items()[0]
    masterFQDN = os.environ.get('masterFQDN')
    c['workers'] = [
        HyperLatentWorker('hyper' + str(i),
                          'passwd',
                          hyperhost,
                          hyperconfig['accesskey'],
                          hyperconfig['secretkey'],
                          'buildbot/buildbot-worker:master',
                          masterFQDN=masterFQDN) for i in range(NUM_CONCURRENT)
    ]
    # if the masterFQDN is forced (proxy case), then we use 9989 default port
    # else, we try to find a free port
    if masterFQDN is not None:
        c['protocols'] = {"pb": {"port": "tcp:9989"}}
    else:
        c['protocols'] = {"pb": {"port": "tcp:0"}}

    return c
Esempio n. 18
0
def WinSteps():
    list = [
        steps.ShellCommand(
            command=[
                'qmake-windows', '-spec', 'win32-g++',
                "QMAKE_CXX='ccache x86_64-w64-mingw32-g++'", "-r",
                "CONFIG+=qtquickcompiler", 'ONLINE="~/repo"'
            ],
            name='QMake Windows',
            haltOnFailure=True,
            doStepIf=lambda step: isWin(step),
            description='create a make files for projects',
        ),
        steps.ShellCommand(
            command=['make', 'clean'],
            doStepIf=lambda step: isClean(step) and isWin(step),
            name='clean Windows',
            description='clean old build data',
        ),
        steps.Compile(
            command=base.makeCommand,
            name='Build Windows',
            haltOnFailure=True,
            doStepIf=lambda step: isWin(step),
            description='run make for project',
        ),
        steps.ShellCommand(
            command=['make', 'deploy'],
            doStepIf=lambda step: isDeploy(step) and isWin(step),
            name='deploy Windows',
            haltOnFailure=True,
            description='deploy project ',
        ),
        steps.ShellCommand(
            command=['make', 'release'],
            doStepIf=lambda step: isRelease(step) and isWin(step),
            name='release Windows',
            haltOnFailure=True,
            description=
            'release project, like push to store or online repository',
        ),
        steps.ShellCommand(
            command=['make', 'distclean'],
            doStepIf=lambda step: isWin(step),
            name='clean Windows makefiles',
            description='clean old makefiles  ',
        ),
    ]
    return list
Esempio n. 19
0
def masterConfig(num_concurrent, extra_steps=None):
    if extra_steps is None:
        extra_steps = []
    c = {}

    c['schedulers'] = [
        schedulers.ForceScheduler(name="force", builderNames=["testy"])
    ]
    triggereables = []
    for i in range(num_concurrent):
        c['schedulers'].append(
            schedulers.Triggerable(
                name="trigsched" + str(i), builderNames=["build"]))
        triggereables.append("trigsched" + str(i))

    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    f.addStep(
        steps.Trigger(
            schedulerNames=triggereables,
            waitForFinish=True,
            updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))
    f2 = BuildFactory()
    f2.addStep(steps.ShellCommand(command='echo ola'))
    for step in extra_steps:
        f2.addStep(step)
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["kubernetes0"], factory=f),
        BuilderConfig(
            name="build",
            workernames=["kubernetes" + str(i) for i in range(num_concurrent)],
            factory=f2)
    ]
    masterFQDN = os.environ.get('masterFQDN')
    c['workers'] = [
        kubernetes.KubeLatentWorker(
            'kubernetes' + str(i),
            'buildbot/buildbot-worker',
            kube_config=kubeclientservice.KubeCtlProxyConfigLoader(
                namespace=os.getenv("KUBE_NAMESPACE", "default")),
            masterFQDN=masterFQDN) for i in range(num_concurrent)
    ]
    # un comment for debugging what happens if things looks locked.
    # c['www'] = {'port': 8080}
    c['protocols'] = {"pb": {"port": "tcp:9989"}}

    return c
Esempio n. 20
0
def steps_boot(builder_name, target, config, run_pm_tests=False):
    st = []

    st.append(
        steps.ShellCommand(command=[
            'rm', '-fr', 'lib', 'deploy-modules-out.tar.gz',
            'initramfs-odroidxu3.img'
        ],
                           name='Remove old binaries'))
    st = st + steps_download(target)

    st.append(
        steps.ShellCommand(command=[
            '/opt/tools/buildbot/build-slave-deploy.sh', target, config,
            util.Property('revision'), 'modules-tmp'
        ],
                           haltOnFailure=True,
                           name='Deploy on server binaries for booting'))
    st.append(
        steps.SetPropertyFromCommand(command='ls modules-tmp/lib/modules',
                                     property='kernel_version',
                                     haltOnFailure=True))

    st.append(step_serial_open(target))

    st.append(step_gracefull_shutdown(target, config, halt_on_failure=False))

    st.append(step_boot_to_prompt(target, config))
    st.append(step_test_ping(target, config))
    st.append(step_test_uname(target, config))
    st.append(step_test_dmesg_errors(target, config))
    st.append(step_test_dmesg_warnings(target, config))

    st = st + steps_test_suite_fast(target, config)
    st = st + steps_test_suite_slow(target, config)

    # After all the tests check again if ping and SSH are working:
    st.append(step_test_ping(target, config))
    st.append(step_test_uname(target, config))

    # Test reboot
    st.append(step_test_reboot(target, config))
    st.append(step_test_ping(target, config))
    st.append(step_test_uname(target, config))

    st = st + steps_shutdown(target, config)

    return st
Esempio n. 21
0
def build_coverity():
    remove_build = steps.RemoveDirectory("build")
    remove_src = steps.RemoveDirectory("src")
    create_build = steps.MakeDirectory("build")
    download_src_archive = steps.FileDownload(
        mastersrc=util.Property("src_archive"),
        workerdest="src.tar.xz",
        workdir="src")
    extract_src_archive = steps.ShellCommand(
        name="Extract source archive",
        command=["tar", "xJf", "src.tar.xz"],
        workdir="src")
    cmake_step = steps.CMake(path="../src/",
                             definitions=util.Property("cmake_defs", {}),
                             options=util.Property("cmake_opts", []),
                             workdir="build",
                             env=env)

    make_step = steps.Compile(command=[
        "cov-build", "--dir", "cov-int", "make", "-j", "16", "-l", "32"
    ],
                              workdir="build",
                              env=env)

    compress = steps.ShellCommand(
        command=["tar", "czvf", "gnuradio.tgz", "cov-int"], workdir="build")

    upload = steps.ShellCommand(command=[
        "curl", "--form", "token=" + tokens.coverityToken, "--form",
        "[email protected]", "--form", "[email protected]", "--form",
        util.Interpolate("version=%(prop:revision)s"), "--form",
        util.Interpolate(
            "description=\"Weekly Buildbot submission for %(prop:branch)s branch \""
        ), "https://scan.coverity.com/builds?project=GNURadio"
    ],
                                workdir="build")

    factory = util.BuildFactory()
    factory.addStep(remove_build)
    factory.addStep(remove_src)
    factory.addStep(create_build)
    factory.addStep(download_src_archive)
    factory.addStep(extract_src_archive)
    factory.addStep(cmake_step)
    factory.addStep(make_step)
    factory.addStep(compress)
    factory.addStep(upload)
    return factory
Esempio n. 22
0
    def __init__(self, builder_name, environment):
        self.environment = environment
        try:
            config_dir = os.path.dirname(os.path.realpath(__file__))
            yaml_path = os.path.join(config_dir, 'steps.yml')
            with open(yaml_path) as steps_file:
                builder_steps = yaml.safe_load(steps_file)
            commands = builder_steps[builder_name]
            dynamic_steps = [self.make_step(command) for command in commands]
        except Exception as e:  # Bad step configuration, fail build
            print(str(e))
            dynamic_steps = [BadConfigurationStep(e)]

        # TODO: windows compatibility (use a custom script for this?)
        pkill_step = [
            steps.ShellCommand(command=["pkill", "-x", "servo"],
                               decodeRC={
                                   0: SUCCESS,
                                   1: SUCCESS
                               })
        ]

        # util.BuildFactory is an old-style class so we cannot use super()
        # but must hardcode the superclass here
        ServoFactory.__init__(self, pkill_step + dynamic_steps)
Esempio n. 23
0
    def run(self):
        try:
            print_yaml_cmd = "cat {}".format(self.yaml_path)
            cmd = yield self.makeRemoteShellCommand(command=[print_yaml_cmd],
                                                    collectStdout=True)
            yield self.runCommand(cmd)

            result = cmd.results()
            if result != util.SUCCESS:
                raise Exception("Command failed with return code: {}".format(
                    str(cmd.rc)))
            else:
                builder_steps = yaml.safe_load(cmd.stdout)
                commands = builder_steps[self.builder_name]
                dynamic_steps = [
                    self.make_step(command) for command in commands
                ]
        except Exception as e:  # Bad step configuration, fail build
            # Capture the exception and re-raise with a friendly message
            raise Exception("Bad step configuration for {}: {}".format(
                self.builder_name, str(e)))

        # TODO: windows compatibility (use a custom script for this?)
        pkill_step = steps.ShellCommand(command=["pkill", "-x", "servo"],
                                        decodeRC={
                                            0: SUCCESS,
                                            1: SUCCESS
                                        })
        static_steps = [pkill_step]

        self.build.steps += static_steps + dynamic_steps

        defer.returnValue(result)
Esempio n. 24
0
def step_pexpect(name, target, python_code, interpolate=False,
                 do_step_if=True, always_run=False, halt_on_failure=True,
                 verbose=False, no_special_chars=False):
    """ Return step for executing Python code with pexpect.

    Arguments:
        name - name of step
        target - which board
        python_code - Python code to execute after setting up pexpect (this can be actually any Python code)
    Optional arguments:
        interpolate - put the python_cmd within buildbot.util.Interpolate (default: False)
        do_step_if - optional callable whether step should be done (passed to doStepIf) (default: True)
        always_run - whether step should be executed always (default: False)
        halt_on_failure - whether step should halt the build on failure (default: True)
        verbose - be verbose and print everything (including serial connection logs) to stdout (default: False)
        no_special_chars - convert all special (non-printable) characters to hex value and do not
                           write to log file (cause this would still store special characters there);
                           when enabled you probably should set verbose=True as well to get the
                           output of log (default: False)
    Returns:
        step
    """
    if interpolate:
        full_cmd = util.Interpolate(pexpect_start(target, SERIAL_LOG, verbose, no_special_chars) + "\n" + python_code + "\n" + pexpect_finish())
    else:
        full_cmd = pexpect_start(target, SERIAL_LOG, verbose, no_special_chars) + "\n" + python_code + "\n" + pexpect_finish()

    return steps.ShellCommand(command=['/usr/bin/env', 'python', '-c', full_cmd],
                              name=name,
                              logfiles={'serial0': SERIAL_LOG},
                              doStepIf=do_step_if,
                              alwaysRun=always_run,
                              haltOnFailure=halt_on_failure)
Esempio n. 25
0
    def init_test_factory(self, test_specification, props):
        product_type = test_specification['product_type']
        build_type = test_specification['build_type']
        conf_file = test_specification["product_conf_file"]
        custom_types = test_specification["custom_types"]

        test_factory = self.factory_with_deploying_infrastructure_step(props)

        worker_os = props['os']
        get_path = bb.utils.get_path_on_os(worker_os)

        repository_name = bb.utils.get_repository_name_by_url(props['repository'])
        # TODO: define component mapper in config
        component_by_repository = {'product-configs': 'mediasdk',
                                   'MediaSDK': 'mediasdk',
                                   'media-driver': 'media-driver'}

        command = [self.run_command[worker_os], "tests_runner.py",
                   '--manifest', self.get_manifest_path(props),
                   '--component', component_by_repository[repository_name],
                   '--test-config', util.Interpolate(
                              get_path(rf"%(prop:builddir)s/product-configs/{conf_file}")),
                   '--root-dir', util.Interpolate('%(prop:builddir)s/test_dir'),
                   '--product-type', product_type,
                   '--build-type', build_type,
                   '--custom-types', custom_types,
                   '--stage']

        for test_stage in TestStage:
            test_factory.append(
                steps.ShellCommand(name=test_stage.value,
                                   command=command + [test_stage.value],
                                   workdir=get_path(r"infrastructure/build_scripts")))
        return test_factory
Esempio n. 26
0
def masterConfig():
    c = {}
    from buildbot.config import BuilderConfig
    from buildbot.process.factory import BuildFactory
    from buildbot.plugins import steps, schedulers, reporters
    c['schedulers'] = [
        schedulers.AnyBranchScheduler(name="sched", builderNames=["testy"])
    ]
    f = BuildFactory()
    f.addStep(steps.ShellCommand(command='echo hello'))
    c['builders'] = [
        BuilderConfig(name="testy", workernames=["local1"], factory=f)
    ]
    notifier = reporters.PushoverNotifier(
        '1234',
        'abcd',
        mode="all",
        watchedWorkers=['local1'],
        messageFormatter=MessageFormatter(template='This is a message.'),
        messageFormatterMissingWorker=MessageFormatterMissingWorker(
            template='No worker.'))
    c['services'] = [
        reporters.MailNotifier("*****@*****.**", mode="all"), notifier
    ]
    return c
Esempio n. 27
0
def setupTriggerConfiguration(triggeredFactory, nextBuild=None):
    c = {}

    c['schedulers'] = [
        schedulers.Triggerable(name="trigsched", builderNames=["triggered"]),
        schedulers.AnyBranchScheduler(name="sched", builderNames=["main"])
    ]

    f = BuildFactory()
    f.addStep(
        steps.Trigger(schedulerNames=['trigsched'],
                      waitForFinish=True,
                      updateSourceStamp=True))
    f.addStep(steps.ShellCommand(command='echo world'))

    mainBuilder = BuilderConfig(name="main", workernames=["local1"], factory=f)

    triggeredBuilderKwargs = {
        'name': "triggered",
        'workernames': ["local1"],
        'factory': triggeredFactory
    }

    if nextBuild is not None:
        triggeredBuilderKwargs['nextBuild'] = nextBuild

    triggeredBuilder = BuilderConfig(**triggeredBuilderKwargs)

    c['builders'] = [mainBuilder, triggeredBuilder]
    return c
Esempio n. 28
0
def mkcoveragefactory():
    f = util.BuildFactory()
    f.addSteps([
        gitStep,
        VirtualenvSetup(name='virtualenv setup',
                        no_site_packages=True,
                        virtualenv_packages=[
                            'coverage', 'mock', '--editable=master',
                            '--editable=slave'
                        ],
                        virtualenv_dir='sandbox',
                        haltOnFailure=True),
        steps.ShellCommand(command=textwrap.dedent("""
            PYTHON=sandbox/bin/python;
            sandbox/bin/coverage run --rcfile=common/coveragerc \
                sandbox/bin/trial buildbot.test buildslave.test \
                || exit 1;
            sandbox/bin/coverage html -i --rcfile=.coveragerc \
                -d /home/buildbot/www/buildbot.buildbot.net/static/coverage \
                || exit 1;
            chmod -R a+rx /home/buildbot/www/buildbot.buildbot.net/static/coverage || exit 1
        """),
                           usePTY=False,
                           description='coverage',
                           descriptionDone='coverage',
                           name='coverage report')
    ])
    return f
Esempio n. 29
0
def getFactory():
    factory = base.getFactory();

    factory.addStep(
        steps.Git(
            repourl="https://github.com/qt/qt5.git",
            branch=util.Interpolate('%(prop:branch)s'),
            mode='full',
            method = 'fresh',
            submodules=True,
            name = 'git operations',
            description = 'operations of git like pull clone fetch',
        )
    );

    factory.addStep(
        steps.ShellCommand(
            command= getHelp,
            name = 'read help',
            haltOnFailure = True,
            description = 'read help for generate the configure command',
        )
    );

    factory.addSteps(linuxSteps());
    factory.addSteps(windowsSteps());
    factory.addSteps(androidSteps());

    return factory
Esempio n. 30
0
def downloadAndRunScript(scriptName, extraFiles=(), args=(), **kwargs):
    """
    Downloads the script to remote location and executes it
    :param: scriptName name of the local script to execute
    :param: extraFiles name of extra files that should be transferred to the remote host
    :param: args list of arguments to pass to the remote script
    :param: kwargs parameters of the executeStep
    """
    taskSteps = []
    allFiles = list(extraFiles)
    allFiles.append(scriptName)
    for file in allFiles:
        taskSteps.append(
            steps.FileDownload(
                name="Transferring {} to worker".format(file),
                mastersrc="maxscale/builders/support/scripts/{}".format(file),
                workerdest=util.Interpolate(
                    "%(prop:builddir)s/scripts/{}".format(file)),
                hideStepIf=True,
                alwaysRun=True,
                mode=0o755,
            ))
    remoteScriptName = util.Interpolate(
        "%(prop:builddir)s/scripts/{}".format(scriptName))
    taskSteps.append(
        steps.ShellCommand(command=[remoteScriptName, *args],
                           timeout=1800,
                           **kwargs))
    return taskSteps