def init_trigger_factory(self, build_specification, props): trigger_factory = self.factory_with_deploying_infrastructure_step(props) worker_os = props['os'] get_path = bb.utils.get_path_on_os(worker_os) repository_name = bb.utils.get_repository_name_by_url(props['repository']) trigger_factory.extend([ steps.ShellCommand( name='create manifest', command=[self.run_command[worker_os], 'manifest_runner.py', '--root-dir', util.Interpolate(get_path(r'%(prop:builddir)s/repositories')), '--repo', repository_name, '--branch', util.Interpolate('%(prop:branch)s'), '--revision', util.Interpolate('%(prop:revision)s'), '--build-event', props['event_type'], '--commit-time', buildbot_utils.get_event_creation_time] + (['--target-branch', props['target_branch']] if props.hasProperty('target_branch') else []), workdir=get_path(r'infrastructure/build_scripts'))]) # TODO: List of repos should be imported from config if props['event_type'] == 'pre_commit' and repository_name in ['MediaSDK', 'infrastructure', 'product-configs']: trigger_factory.extend([ steps.ShellCommand( name='check author name and email', command=[self.run_command[worker_os], 'check_author.py', '--repo-path', util.Interpolate( get_path(rf'%(prop:builddir)s/repositories/{repository_name}')), '--revision', util.Interpolate('%(prop:revision)s')], workdir=get_path(r'infrastructure/pre_commit_checks'))]) return trigger_factory
def generateDBTestStep(dbname, dbport): mysqlString = "mysql -u root -h 127.0.0.1 -P " + dbport return common.shellSequence( commands=[ common.shellArg(command='echo "select version()" | ' + mysqlString, haltOnFailure=False, logname='version'), common.shellArg(command=util.Interpolate( 'echo "create database opencast%(prop:buildnumber)s;" | ' + mysqlString), haltOnFailure=False, logname='createdb'), common.shellArg(command=util.Interpolate( mysqlString + ' opencast%(prop:buildnumber)s < docs/scripts/ddl/mysql5.sql'), haltOnFailure=False, logname='newdb'), common.shellArg(command=util.Interpolate( 'echo "drop database opencast%(prop:buildnumber)s;" | ' + mysqlString), haltOnFailure=False, logname='dropdb'), ], workdir="build/", name="Test database generation script against " + dbname, haltOnFailure=False, flunkOnFailure=True, doStepIf=lambda step: int(step.getProperty("pkg_major_version")) < 9)
def _applyRepoLocalManifest(self): """Apply a ``local-manifest.xml`` file in the current repo source tree context if instructed by the build properties.""" def assert_local_manifest_application(step: BuildStep) -> bool: return bool(step.getProperty("use_local_manifest") and step.getProperty("local_manifest_xml")) self.addStep(steps.ShellCommand( name="apply local-manifest", description=line("""apply local-manifest if specified and provided by the build properties"""), haltOnFailure=True, doStepIf=assert_local_manifest_application, command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail if [[ "${use_local_manifest:-}" -ne 0 ]]; then mkdir .repo/local_manifests echo "${local_manifest_xml:-}" \ > ".repo/local_manifests/local_manifest.xml" repo sync -j4 fi """).strip()], env={ "use_local_manifest": util.Interpolate("%(prop:use_local_manifest:#?|1|0)s"), "local_manifest_xml": util.Interpolate("%(prop:local_manifest_xml)s"), }, ))
def run(self): # run the command to get the list of targets cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) # if the command passes extract the list of stages result = cmd.results() if result == util.SUCCESS: # create a ShellCommand for each stage and add them to the build self.build.addStepsAfterCurrentStep([ common.shellCommand( command=['ansible', '-e', 'ansible_user={{ buildbot_user }}', util.Interpolate( '--private-key=%(prop:builddir)s/%(prop:deploy_env)s'), '-i', util.Interpolate( "{{ buildbot_config }}/envs/" + target), 'admin_node', '-m', 'copy', '-a', util.Interpolate( 'src={{ buildbot_config }}/opencast-ingest.sh dest=opencast-ingest.sh')], name="Copy ingest script to " + target + " env", haltOnFailure=False, flunkOnFailure=True) for target in self.extract_targets(self.observer.getStdout()) ]) return result
def __getBasePipeline(): checkSpaces = common.shellSequence(commands=[ common.shellArg(command=util.Interpolate( "(! grep -rnP '\t' modules assemblies pom.xml etc --include=pom.xml)" ), haltOnFailure=False, logname='Tab Check'), common.shellArg(command=util.Interpolate( "(! grep -rn ' $' modules assemblies pom.xml etc --include=pom.xml)" ), haltOnFailure=False, logname='End Of Line Space Check') ], workdir="build/docs/guides", name="Formatting checks") reports = [ 'site', 'site:stage', '-Daggregate=true', '-Dcheckstyle.skip=true', '-P', 'none,!frontend' ] site = common.getBuild(override=reports, name="Build site report") f_build = util.BuildFactory() f_build.addStep(common.getPreflightChecks()) f_build.addStep(common.getClone()) f_build.addStep(common.getWorkerPrep()) f_build.addStep(common.setTimezone()) f_build.addStep(common.setLocale()) f_build.addStep(common.getBuild()) f_build.addStep(checkSpaces) f_build.addStep(site) return f_build
def downloadAndRunScript(scriptName, extraFiles=(), args=(), **kwargs): """ Downloads the script to remote location and executes it :param: scriptName name of the local script to execute :param: extraFiles name of extra files that should be transferred to the remote host :param: args list of arguments to pass to the remote script :param: kwargs parameters of the executeStep """ taskSteps = [] allFiles = list(extraFiles) allFiles.append(scriptName) for file in allFiles: taskSteps.append( steps.FileDownload( name="Transferring {} to worker".format(file), mastersrc="maxscale/builders/support/scripts/{}".format(file), workerdest=util.Interpolate( "%(prop:builddir)s/scripts/{}".format(file)), hideStepIf=True, alwaysRun=True, mode=0o755, )) remoteScriptName = util.Interpolate( "%(prop:builddir)s/scripts/{}".format(scriptName)) taskSteps.append( steps.ShellCommand(command=[remoteScriptName, *args], timeout=1800, **kwargs)) return taskSteps
def configureCommonProperties(properties): testId = "{}-{}".format(properties.getProperty("buildername"), properties.getProperty("buildnumber")) logDirectory = "{}/LOGS/{}/".format(properties.getProperty("HOME"), testId) coreDumpsLog = "{}/coredumps_{}".format(logDirectory, testId) return { "buildLogFile": util.Interpolate("%(prop:builddir)s/build_log_%(prop:buildnumber)s"), "resultFile": util.Interpolate("result_%(prop:buildnumber)s"), "jsonResultsFile": util.Interpolate("%(prop:builddir)s/json_%(prop:buildnumber)s"), "mdbciConfig": util.Interpolate("%(prop:MDBCI_VM_PATH)s/%(prop:name)s"), "upload_server": constants.UPLOAD_SERVERS[properties.getProperty("host")], "testId": testId, "logDirectory": logDirectory, "coreDumpsLog": coreDumpsLog, "mdbciVMPath": util.Interpolate("%(prop:name)s_vms"), }
def initTargetProperty(): """ Sets 'target' property of the build to: - <branch>-buildbot-<starttime> if it isn't set yet or property 'targetInitMode' is TargetInitOptions.GENERATE; - <branch> if property 'targetInitMode' is TargetInitOptions.SET_FROM_BRANCH. :return: list of steps """ return [ steps.SetProperty( name=util.Interpolate("Set 'target' property"), property="target", value=util.Interpolate( "%(prop:branch)s-buildbot-%(kw:startTime)s", startTime=getFormattedDateTime("%Y-%b-%d-%H-%M-%S")), doStepIf=lambda step: step.build.getProperty('target') is None and step.build.getProperty('targetInitMode') is None or step.build. getProperty('targetInitMode') == TargetInitOptions.GENERATE, hideStepIf=lambda results, s: results == SKIPPED), steps.SetProperty( name=util.Interpolate("Set 'target' property"), property="target", value=util.Property("branch"), doStepIf=lambda step: step.build.getProperty( 'targetInitMode') == TargetInitOptions.SET_FROM_BRANCH, hideStepIf=lambda results, s: results == SKIPPED) ]
def run(self): # run the command to get the list of targets cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) # if the command passes extract the list of stages result = cmd.results() if result == util.SUCCESS: # create a ShellCommand for each stage and add them to the build self.build.addStepsAfterCurrentStep([ common.shellCommand( command=["ansible", util.Interpolate( '--private-key=%(prop:builddir)s/%(prop:deploy_env)s'), "-i", util.Interpolate( "{{ buildbot_config }}/envs/" + target), "admin_node", "-m", "shell", "-a", "bash opencast-ingest.sh", "--extra-vars", util.Interpolate(" ".join(params))], name="Ingest media to " + target + " env", haltOnFailure=False, flunkOnFailure=True) for target in self.extract_targets(self.observer.getStdout()) ]) return result
def buildProduct(self, product_name: str): if product_name != 'clipos': raise NotImplementedError("Only \"clipos\" product is supported for the moment.") self.setupClipOsToolkit() self._getRequestedArtifactsFromBuildmaster( sdks=['clipos/sdk', 'clipos/sdk_debian'], cache=['clipos/core', 'clipos/efiboot'], ) current_location = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(current_location, "scripts/complete-build.sh"), "r") as scriptfile: self.addStep(clipos.steps.ToolkitEnvironmentShellCommand( name="complete build", haltOnFailure=False, warnOnFailure=True, flunkOnFailure=True, command=scriptfile.read(), env={ "produce_sdks_artifacts": util.Interpolate("%(prop:produce_sdks_artifacts:#?|1|0)s"), "reuse_sdks_artifacts": util.Interpolate("%(prop:reuse_sdks_artifacts:#?|1|0)s"), "produce_cache_artifacts": util.Interpolate("%(prop:produce_cache_artifacts:#?|1|0)s"), "reuse_cache_artifacts": util.Interpolate("%(prop:reuse_cache_artifacts:#?|1|0)s"), "produce_build_artifacts": util.Interpolate("%(prop:produce_build_artifacts:#?|1|0)s"), }, )) self._identifyAndSaveProducedArtifactsOntoBuildmaster()
def init_mediasdk_test_factory(self, test_specification, props): product_type = test_specification["product_type"] build_type = test_specification["build_type"] test_factory = self.factory_with_deploying_infrastructure_step(props) worker_os = props['os'] get_path = bb.utils.get_path_on_os(worker_os) branch = props.getProperty('target_branch') or props.getProperty( 'branch') test_factory.append( steps.ShellCommand( command=[ self.run_command[worker_os], "test_adapter.py", "--branch", branch, "--build-event", "pre_commit" if props.hasProperty('target_branch') else 'commit', "--product-type", product_type, "--commit-id", util.Interpolate(r"%(prop:revision)s"), "--build-type", build_type, "--root-dir", util.Interpolate(get_path(r"%(prop:builddir)s/build_dir")) ], workdir=get_path(r"infrastructure/ted_adapter"))) return test_factory
def run(self): # run the command to get the list of targets cmd = yield self.makeRemoteShellCommand() yield self.runCommand(cmd) # if the command passes extract the list of stages result = cmd.results() if result == util.SUCCESS: # create a ShellCommand for each stage and add them to the build self.build.addStepsAfterCurrentStep([ common.shellCommand( command=['ansible-playbook', '-b', util.Interpolate( '--private-key=%(prop:builddir)s/%(prop:deploy_env)s'), '-i', util.Interpolate( "{{ buildbot_config }}/envs/" + target), 'uninstall.yml', 'opencast.yml', 'reset.yml', '--extra-vars', util.Interpolate(" ".join(params))], name="Deploy Opencast to " + target + " env", haltOnFailure=False, flunkOnFailure=True) for target in self.extract_targets(self.observer.getStdout()) ]) return result
def send(self, build): if build['complete']: if build['results'] == results.SUCCESS: self.endDescription = util.Interpolate( 'Build %(kw:state)s. imageName:%(kw:imageName)s', state=results.Results[build['results']], imageName=getImage) else: steps = yield self.master.data.get( ('builds', build['buildid'], "steps")) logURL = '' for step in reversed(steps): if step['results'] != results.SUCCESS: logs = yield self.master.data.get( ("steps", step['stepid'], 'logs')) logURL = '{buildbotURL}api/v2/logs/{logid}/contents'.format( buildbotURL=self.master.config.buildbotURL, logid=logs[0]['logid']) break self.endDescription = util.Interpolate( 'Build %(kw:state)s. logURL:%(kw:logURL)s', state=results.Results[build['results']], logURL=logURL) yield GitHubStatusPush.send(self, build)
def createBuildSteps(): buildSteps = [] buildSteps.extend(common.configureMdbciVmPathProperty()) buildSteps.append(steps.SetProperties(properties=configureBuildProperties)) buildSteps.extend(common.cloneRepository()) buildSteps.append(steps.ShellCommand( name="Build MaxScale using MDBCI", command=['/bin/bash', '-c', 'BUILD/mdbci/build.sh || BUILD/mdbci/build.sh'], timeout=3600, workdir=util.Interpolate("%(prop:builddir)s/build") )) buildSteps.extend(common.destroyVirtualMachine()) buildSteps.append(common.runSshCommand( name="Make dir for build results on the repo server", host=util.Property("upload_server"), command=["mkdir", "-p", util.Interpolate(constants.UPLOAD_PATH + '/%(prop:target)s')], )) buildSteps.append(common.rsyncViaSsh( name="Rsync builds results to the repo server", local=util.Interpolate("%(prop:builddir)s/repository/%(prop:target)s/mariadb-maxscale/"), remote=util.Interpolate("%(prop:upload_server)s:" + constants.UPLOAD_PATH + "/%(prop:target)s/") )) buildSteps.append(common.generateMdbciRepositoryForTarget()) buildSteps.extend(common.syncRepod()) buildSteps.append(steps.ShellCommand( name="Upgrade test", command=['BUILD/mdbci/upgrade_test.sh'], timeout=1800, doStepIf=(util.Property('run_upgrade_test') == 'yes'), workdir=util.Interpolate("%(prop:builddir)s/build") )) buildSteps.extend(common.cleanBuildDir()) return buildSteps
def init_test_factory(self, test_specification, props): product_type = test_specification['product_type'] build_type = test_specification['build_type'] conf_file = test_specification["product_conf_file"] custom_types = test_specification["custom_types"] test_factory = self.factory_with_deploying_infrastructure_step(props) worker_os = props['os'] get_path = bb.utils.get_path_on_os(worker_os) repository_name = bb.utils.get_repository_name_by_url(props['repository']) # TODO: define component mapper in config component_by_repository = {'product-configs': 'mediasdk', 'MediaSDK': 'mediasdk', 'media-driver': 'media-driver'} command = [self.run_command[worker_os], "tests_runner.py", '--manifest', self.get_manifest_path(props), '--component', component_by_repository[repository_name], '--test-config', util.Interpolate( get_path(rf"%(prop:builddir)s/product-configs/{conf_file}")), '--root-dir', util.Interpolate('%(prop:builddir)s/test_dir'), '--product-type', product_type, '--build-type', build_type, '--custom-types', custom_types, '--stage'] for test_stage in TestStage: test_factory.append( steps.ShellCommand(name=test_stage.value, command=command + [test_stage.value], workdir=get_path(r"infrastructure/build_scripts"))) return test_factory
def steps_build_upload_artifacts(name, config, boot, out_dir, buildbot_url): st = [] masterdest_dir_pub = 'deploy-pub/' + name + '/%(prop:revision)s/' st.append(step_prepare_upload_master('Prepare upload directory: sources', masterdest_dir_pub)) cmd = 'echo "Source URL: %(prop:repository)s\nRevision: %(prop:revision)s" > ' + out_dir + 'sources.txt; ' cmd += 'cp -p ' + out_dir + '.config ' + out_dir + 'config; ' cmd += 'chmod a+r ' + out_dir + 'config; ' cmd += 'chmod a+r ' + out_dir + 'sources.txt; ' cmd += 'chmod a+r ' + out_dir + 'include/generated/autoconf.h' st.append(steps.ShellCommand(command=util.Interpolate(cmd), name='Prepare source files for uploading')) upload_files_pub = ['config', 'include/generated/autoconf.h', 'sources.txt'] upload_files_pub = [(out_dir + i) for i in upload_files_pub] st.append(step_upload_files_to_master('Upload config and autoconf.h', upload_files_pub, masterdest_dir_pub, errors_fatal=True, url=util.Interpolate(buildbot_url + 'pub/' + masterdest_dir_pub))) if boot and config: st.extend(steps_build_upload_artifacts_binaries(name, config, out_dir)) return st
def StandardBuilderWorker(name, **kwargs): volumes = [ '{0}/bshomes:/data/bshomes'.format(config.data_dir), util.Interpolate( '{0}/builds/%(prop:platformname)s/%(prop:buildname)s:/data/build'. format(config.data_dir)), '{0}/ccache:/data/ccache'.format(config.data_dir), util.Interpolate('{0}/src/%(prop:buildname)s:/data/src:ro'.format( config.data_dir)), ] password = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(32)) tmpfs = docker.types.Mount('/tmp', None, type='tmpfs') return utils.worker.DockerWorker( name, password, docker_host=config.docker_socket, image=util.Interpolate('workers/%(prop:workerimage)s'), masterFQDN=buildbot_ip, volumes=volumes, hostconfig={ 'network_mode': config.docker_workers_net, 'read_only': True, 'mounts': [tmpfs], }, **kwargs)
class Test(DockerBuilder): properties = {'A': util.Property('builddir')} hostconfig = {'shm_size': util.Transform(to_gigabytes, 2 * 1024**3)} volumes = [ util.Interpolate('%(prop:builddir)s:/root/.ccache:rw'), util.Interpolate( '%(prop:builddir)s/subdir:%(prop:docker_workdir)s/subdir:rw') ]
def build_volk_PR(): create_src = steps.MakeDirectory(name="create src directory", dir="volk") clone_step = steps.GitHub(name="fetch PR source", repourl=util.Property("repository"), mode="full", method="fresh", submodules=True, retryFetch=True, clobberOnFailure=True, workdir="volk") rm_src_dir = steps.RemoveDirectory( dir=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s")), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) copy_src = steps.CopyDirectory( name="copy src to srcdir", src="volk", dest=util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s", "%(prop:github.base.ref)s"), ), hideStepIf=lambda results, s: results == SKIPPED or results == SUCCESS, ) # load builders.json with definitions on how to build things parent_path = os.path.dirname(__file__) with open(os.path.join(parent_path, "volk_builders.json"), "r") as builders_file: build_config = json.loads(builders_file.read()) trigger_builds = custom_steps.BuildTrigger( name="trigger the right builders", build_config=build_config, schedulerNames=["trigger"], runner="pull", set_properties={ "pr_base": util.Property("github.base.ref"), "src_dir": util.Interpolate( os.path.join(_PULL_SRC_BASE, "%(prop:github.number)s")) }, test_merge=False, updateSourceStamp=False, waitForFinish=True) factory = util.BuildFactory() factory.addStep(create_src) factory.addStep(clone_step) factory.addStep(rm_src_dir) factory.addStep(copy_src) factory.addStep(trigger_builds) return factory
def init_build_factory(self, build_specification, props): conf_file = build_specification["product_conf_file"] product_type = build_specification["product_type"] build_type = build_specification["build_type"] api_latest = build_specification.get("api_latest") fastboot = build_specification.get("fastboot") compiler = build_specification.get("compiler") compiler_version = build_specification.get("compiler_version") build_factory = self.factory_with_deploying_infrastructure_step(props) worker_os = props['os'] get_path = bb.utils.get_path_on_os(worker_os) # TODO: rename to component dependency_name = build_specification.get('dependency_name') build_factory.append( DependencyChecker(name=f"check {dependency_name} on share", command=[ self.run_command[worker_os], 'component_checker.py', '--path-to-manifest', self.get_manifest_path(props), '--component-name', dependency_name, '--product-type', product_type, '--build-type', build_type ], workdir=get_path(r'infrastructure/common'))) shell_commands = [ self.run_command[worker_os], "build_runner.py", "--build-config", util.Interpolate( get_path(rf"%(prop:builddir)s/product-configs/{conf_file}")), "--root-dir", util.Interpolate(get_path(r"%(prop:builddir)s/build_dir")), "--manifest", self.get_manifest_path(props), "--component", dependency_name, "--build-type", build_type, "--product-type", product_type ] if api_latest: shell_commands.append("api_latest=True") if fastboot: shell_commands.append("fastboot=True") if compiler and compiler_version: shell_commands.extend([ f"compiler={compiler}", f"compiler_version={compiler_version}" ]) # Build by stages: clean, extract, build, install, pack, copy for stage in Stage: build_factory.append( steps.ShellCommand( command=shell_commands + ["--stage", stage.value], workdir=get_path(r"infrastructure/build_scripts"), name=stage.value, doStepIf=is_build_dependency_needed, timeout=60 * 60)) # 1 hour for igc return build_factory
def makeWptRunCommand(properties): browser_id = None browser_name = properties.getProperty('browser_name') command = [ './wpt', 'run', '--install-fonts', '--log-wptreport', properties.getProperty('log_wptreport'), '--log-raw', properties.getProperty('log_raw'), '--this-chunk', properties.getProperty('this_chunk'), '--total-chunks', properties.getProperty('total_chunks') ] if properties.getProperty('use_sauce_labs'): if browser_name == 'edge': sauce_browser_name = 'MicrosoftEdge' else: sauce_browser_name = browser_name browser_id = util.Interpolate( 'sauce:%(kw:sauce_browser_name)s:%(prop:browser_version)s', sauce_browser_name=sauce_browser_name ) sauce_platform_id = util.Interpolate( '%(prop:os_name)s %(prop:os_version)s' ) command.extend([ '--sauce-platform', sauce_platform_id, '--sauce-user', util.Interpolate('%(secret:sauce_labs_user)s'), '--sauce-key', util.Interpolate('%(secret:sauce_labs_key)s'), '--sauce-tunnel-id', properties.getProperty('workername'), '--sauce-connect-binary', 'sc', '--no-restart-on-unexpected', '--run-by-dir', '3' ]) else: command = ['xvfb-run', '--auto-servernum'] + command browser_id = browser_name if browser_name == 'firefox': # temporary fix to allow WebRTC tests to call getUserMedia command.extend([ '--setpref', 'media.navigator.streams.fake=true' ]) elif browser_name == 'chrome': # This is intended as a temporary fix to allow the webrtc tests in # Chrome to call getUserMedia without failing out. command.extend([ '--binary-arg=--use-fake-ui-for-media-stream', '--binary-arg=--use-fake-device-for-media-stream' ]) command.append(browser_id) return command
def createWorkerConfigHyperWorker(self, config, name): return worker.HyperLatentWorker( name, str(uuid.uuid4()), hyper_host=config['hyper_host'], image=util.Interpolate(config['image']), hyper_accesskey=config['hyper_accesskey'], hyper_secretkey=config['hyper_secretkey'], masterFQDN=config.get('masterFQDN'), hyper_size=util.Interpolate(config.get('size')))
def steps_build_linux_kernel(env, build_step_name='Build kernel', skip_warnings=True): st = [] if skip_warnings: st.append(steps.ShellCommand(command=[util.Interpolate(CMD_MAKE)], haltOnFailure=True, env=env, name=build_step_name)) else: st.append(steps.Compile(command=[util.Interpolate(CMD_MAKE)], haltOnFailure=True, warnOnWarnings=True, suppressionList=BUILD_WARN_IGNORE, env=env, name=build_step_name)) return st
def configureCommonProperties(properties): return { "buildLogFile": util.Interpolate("%(prop:builddir)s/build_log_%(prop:buildnumber)s"), "resultFile": util.Interpolate("result_%(prop:buildnumber)s"), "jsonResultsFile": util.Interpolate("%(prop:builddir)s/json_%(prop:buildnumber)s"), "networkConfigPath": '.config/performance_test/performance-test_network_config' }
def configureCommonProperties(properties): return { "buildLogFile": util.Interpolate("%(prop:builddir)s/build_log_%(prop:buildnumber)s"), "resultFile": util.Interpolate("result_%(prop:buildnumber)s"), "jsonResultsFile": util.Interpolate("%(prop:builddir)s/json_%(prop:buildnumber)s"), "mdbciConfig": util.Interpolate("%(prop:MDBCI_VM_PATH)s/%(prop:name)s") }
def UploadPackage(directory): return transfer.FileUpload( mode=0644, workdir="source", slavesrc=util.Interpolate("%(prop:output-filepath)s"), masterdest=util.Interpolate( "%(kw:base)s/%(kw:directory)s/%(prop:output-filename)s", base=UPLOADBASE, directory=directory), url=util.Interpolate( "%(kw:base)s/%(kw:directory)s/%(prop:output-filename)s", base=UPLOADURL, directory=directory))
def build_coverity(): remove_build = steps.RemoveDirectory("build") remove_src = steps.RemoveDirectory("src") create_build = steps.MakeDirectory("build") download_src_archive = steps.FileDownload( mastersrc=util.Property("src_archive"), workerdest="src.tar.xz", workdir="src") extract_src_archive = steps.ShellCommand( name="Extract source archive", command=["tar", "xJf", "src.tar.xz"], workdir="src") cmake_step = steps.CMake(path="../src/", definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) make_step = steps.Compile(command=[ "cov-build", "--dir", "cov-int", "make", "-j", "16", "-l", "32" ], workdir="build", env=env) compress = steps.ShellCommand( command=["tar", "czvf", "gnuradio.tgz", "cov-int"], workdir="build") upload = steps.ShellCommand(command=[ "curl", "--form", "token=" + tokens.coverityToken, "--form", "[email protected]", "--form", "[email protected]", "--form", util.Interpolate("version=%(prop:revision)s"), "--form", util.Interpolate( "description=\"Weekly Buildbot submission for %(prop:branch)s branch \"" ), "https://scan.coverity.com/builds?project=GNURadio" ], workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(remove_src) factory.addStep(create_build) factory.addStep(download_src_archive) factory.addStep(extract_src_archive) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(compress) factory.addStep(upload) return factory
def getFactory(): factory = base.getFactory(); list = [ steps.Git( repourl='https://github.com/QuasarApp/quasarAppCoin.git', branch=util.Interpolate('%(prop:Branch)s'), mode='incremental', submodules=True ), steps.ShellCommand( command= ['qmake'], ), steps.ShellCommand( command= ['make', 'deploy'], ), steps.CopyDirectory( src="build/Distro", dest="~/shared/quasarAppCoin/" ) ] factory.addSteps(list); return factory
def step_pexpect(name, target, python_code, interpolate=False, do_step_if=True, always_run=False, halt_on_failure=True, verbose=False, no_special_chars=False): """ Return step for executing Python code with pexpect. Arguments: name - name of step target - which board python_code - Python code to execute after setting up pexpect (this can be actually any Python code) Optional arguments: interpolate - put the python_cmd within buildbot.util.Interpolate (default: False) do_step_if - optional callable whether step should be done (passed to doStepIf) (default: True) always_run - whether step should be executed always (default: False) halt_on_failure - whether step should halt the build on failure (default: True) verbose - be verbose and print everything (including serial connection logs) to stdout (default: False) no_special_chars - convert all special (non-printable) characters to hex value and do not write to log file (cause this would still store special characters there); when enabled you probably should set verbose=True as well to get the output of log (default: False) Returns: step """ if interpolate: full_cmd = util.Interpolate(pexpect_start(target, SERIAL_LOG, verbose, no_special_chars) + "\n" + python_code + "\n" + pexpect_finish()) else: full_cmd = pexpect_start(target, SERIAL_LOG, verbose, no_special_chars) + "\n" + python_code + "\n" + pexpect_finish() return steps.ShellCommand(command=['/usr/bin/env', 'python', '-c', full_cmd], name=name, logfiles={'serial0': SERIAL_LOG}, doStepIf=do_step_if, alwaysRun=always_run, haltOnFailure=halt_on_failure)
def auto_update_manifest_factory(self, build_specification, props): updater_factory = self.factory_with_deploying_infrastructure_step(props) worker_os = props['os'] get_path = bb.utils.get_path_on_os(worker_os) repository_name = bb.utils.get_repository_name_by_url(props['repository']) # Additional checks for for auto-uptdated repositories are not needed. updater_factory.append(steps.ShellCommand( name=f'update manifest', command=[self.run_command[worker_os], 'update_version.py', '--branch', util.Interpolate('%(prop:branch)s'), '--revision', util.Interpolate('%(prop:revision)s'), '--component-name', repository_name], workdir=get_path(r'infrastructure/common'))) return updater_factory