def run(self): self.build.addStepsAfterCurrentStep([ steps.ShellSequence( name='archive', haltOnFailure=True, logEnviron=False, commands=[ util.ShellArg(command=['tar', 'cf', 'state-dir.tar', '.flatpak-builder'], logfile='stdio'), util.ShellArg(command=['tar', 'cf', 'repo.tar', 'repo'], logfile='stdio'), ], ), steps.FileUpload( name='upload state-dir.tar', haltOnFailure=True, workersrc='state-dir.tar', masterdest='flatpak/state-dir.tar', ), steps.FileUpload( name='upload repo.tar', haltOnFailure=True, workersrc='repo.tar', masterdest='flatpak/repo.tar', ), steps.MasterShellCommand( name='sync repo', haltOnFailure=True, logEnviron=False, command=['./scripts/flatpak-repo.sh'], ), ]) return buildbot.process.results.SUCCESS
def masterConfig(bigfilename): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/file2.txt")) # create 8 MB file with open(bigfilename, 'w') as o: buf = "xxxxxxxx" * 1024 for i in range(1000): o.write(buf) f.addStep( steps.FileDownload(mastersrc=bigfilename, workerdest="bigfile.txt")) f.addStep( steps.FileUpload(workersrc="dir/file2.txt", masterdest="master.txt")) f.addStep( steps.FileDownload(mastersrc="master.txt", workerdest="dir/file3.txt")) f.addStep(steps.DirectoryUpload(workersrc="dir", masterdest="dir")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def masterConfig(): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() # do a bunch of transfer to exercise the protocol f.addStep(steps.StringDownload("filecontent", workerdest="dir/file1.txt")) f.addStep(steps.StringDownload("filecontent2", workerdest="dir/file2.txt")) f.addStep( steps.FileUpload(workersrc="dir/file2.txt", masterdest="master.txt")) f.addStep( steps.FileDownload(mastersrc="master.txt", workerdest="dir/file3.txt")) f.addStep(steps.DirectoryUpload(workersrc="dir", masterdest="dir")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def get_config_single_step(self, step): c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.plugins import steps, schedulers c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] f = BuildFactory() f.addStep( steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest")) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def run(self): self.build.addStepsAfterCurrentStep([ steps.MasterShellCommand( name='create flatpakref directory', haltOnFailure=True, logEnviron=False, command=['mkdir', '-p', '/repo/flatpak/files/' + self.channel], ), steps.MasterShellCommand( name='remove old flatpakref files', haltOnFailure=True, logEnviron=False, command=['rm', '-f', '/repo/flatpak/files/%s/*.flatpakref' % self.channel], ), steps.FileUpload( name='upload flatpakref files', haltOnFailure=True, workersrc='%s/*.flatpakref' % self.channel, masterdest='/repo/flatpak/files/' + self.channel, ), ]) return buildbot.process.results.SUCCESS
def __init__(self, pkgbuilddir: str, group: str, pkg_base: str, properties: dict): super().__init__() gpg_sign = properties["gpg_sign"] sshdir = properties["sshdir"] workdir = f"{pkgbuilddir}/{group}/{pkg_base}" if group in ("community", "packages"): workdir += "/trunk" # set initial properties self.addStep( steps.SetProperties(name="set properties from srcinfo", properties=properties)) # find dependencies depends = properties["depends"] if depends is not None: for depends_name in depends: self.addSteps([ steps.SetProperty( name=f"set depends_name to {depends_name}", property="depends_name", value=depends_name, hideStepIf=True, ), FindDependency(name=f"find {depends_name}"), ]) # download build files self.addStep( steps.FileDownload( name="download PKGBUILD", mastersrc=f"{workdir}/PKGBUILD", workerdest="PKGBUILD", )) for src_name in properties["src_names"]: self.addStep( steps.FileDownload( name=f"download {src_name}", mastersrc=f"{workdir}/{src_name}", workerdest=src_name, )) install = properties["install"] if install: self.addStep( steps.FileDownload( name=f"download {install}", mastersrc=f"{workdir}/{install}", workerdest=install, )) # update pkgver, pkgrel self.addSteps([SetPkgver(), SetPkgrel(), Updpkgsums()]) # update git tag revision if properties["git_tag"]: self.addStep(SetTagRevision()) # update git commit revision if properties["git_revision"]: self.addStep(SetCommitRevision()) # build self.addStep(ArchBuild()) # update properties self.addSteps([ Srcinfo(), steps.FileUpload( name="upload updated PKGBUILD", workersrc="PKGBUILD", masterdest=f"{workdir}/PKGBUILD", ), steps.FileUpload( name="upload updated .SRCINFO", workersrc=".SRCINFO", masterdest=f"{workdir}/.SRCINFO", ), steps.SetProperties( name="refresh properties from srcinfo", properties=ArchBuildUtil.srcinfo, ), ]) # upload and optionally sign packages for pkg_name in properties["pkg_names"]: self.addSteps([ steps.SetProperty( name=f"set pkg_name to {pkg_name}", property="pkg_name", value=pkg_name, hideStepIf=True, ), steps.FileUpload( name=f"upload {pkg_name}", workersrc=ArchBuildUtil.pkg, masterdest=ArchBuildUtil.pkg_masterdest, ), MovePackage(name=f"move {pkg_name}"), ]) if gpg_sign: self.addSteps([ GpgSign(name=f"sign {pkg_name}"), steps.FileDownload( name=f"download {pkg_name} sig", mastersrc=ArchBuildUtil.sig_mastersrc, workerdest=ArchBuildUtil.sig_workerdest, ), ]) # update repository self.addStep(RepoAdd(name=f"add {pkg_name}")) # synchronize repository if sshdir: self.addStep(CreateSshfsDirectory()) self.addStep(MountPkgbuildCom(env=ArchBuildUtil.ssh_agent)) self.addStep(RepoSync(env=ArchBuildUtil.ssh_agent)) self.addStep(UnmountPkgbuildCom()) # cleanup self.addStep(Cleanup())
def get_package_steps(buildname, platformname, srcpath, dstpath, dsturl, archive_format, disttarget, build_data_files, platform_data_files, platform_built_files, **kwargs): if archive_format not in PACKAGE_FORMAT_COMMANDS: archive_format = "tar.bz2" archive_base_command = PACKAGE_FORMAT_COMMANDS.get(archive_format) files = [] files += platform_built_files # If file is absolute or begins with a $ (environment variable) don't prepend srcpath if platform_data_files: files += [ f if (os.path.isabs(f) or f[0:1] == '$') else os.path.join(srcpath, f) for f in platform_data_files ] # dont pack up the default files if the port has its own dist target if not disttarget: files += [ os.path.join(srcpath, f) for f in build_data_files ] def namesFromProps(props): return create_names(buildname, platformname, archive_format, props["revision"]) @util.renderer def generateCommands(props): name, archive, _ = namesFromProps(props) archive_full_command = archive_base_command + [archive, name+"/"] commands = [] if disttarget: commands.append(util.ShellArg(["make", disttarget], logname="make {0}".format(disttarget), haltOnFailure=True)) commands.append(util.ShellArg(["mkdir", name], logname="archive", haltOnFailure=True)) # Use a string for cp to allow shell globbing # WARNING: files aren't surrounded with quotes to let it happen commands.append(util.ShellArg('cp -r ' + ' '.join(files) + ' "{0}/"'.format(name), logname="archive", haltOnFailure=True)) commands.append(util.ShellArg(archive_full_command, logname="archive", haltOnFailure=True)) return commands @util.renderer def generateCleanup(props): name, _, _ = namesFromProps(props) commands = [] commands.append(util.ShellArg(["rm", "-rf", name], logname="cleanup", haltOnFailure=True)) return commands @util.renderer def doPackage(props): return ("revision" in props and "package" in props and props["revision"] is not None and bool(props["package"])) @util.renderer def getWorkerSrc(props): _, archive, _ = namesFromProps(props) return archive @util.renderer def getMasterDest(props): _, archive, _ = namesFromProps(props) return os.path.join(dstpath, archive) @util.renderer def getArchiveURL(props): _, archive, _ = namesFromProps(props) return urlp.urljoin(dsturl, archive) @util.renderer def getLinkCommand(props): _, archive, symlink = namesFromProps(props) return "ln", "-sf", archive, os.path.join(dstpath, symlink) build_package = CleanShellSequence( name = "package", description = "packaging", descriptionDone = "package", doStepIf = doPackage, haltOnFailure = True, flunkOnFailure = True, commands = generateCommands, cleanup = generateCleanup, **kwargs ) # dstpath will get created by FileUpload upload_package = steps.FileUpload( name = "upload package", description = "uploading", descriptionDone = "uploaded", doStepIf = doPackage, haltOnFailure = True, flunkOnFailure = True, workersrc = getWorkerSrc, masterdest = getMasterDest, mode = 0o0644, url = getArchiveURL if dsturl else None) link = steps.MasterShellCommand( name = "link latest snapshot", description = "linking", descriptionDone = "linked", doStepIf = doPackage, haltOnFailure = True, flunkOnFailure = True, command = getLinkCommand, env = {}) return build_package, upload_package, link
def get_non_existing_file_upload_config(self): from buildbot.plugins import steps step = steps.FileUpload(workersrc="dir/noexist_path", masterdest="master_dest") return self.get_config_single_step(step)
def factory(constructicon_name, builder_name, deps, commands, upload, zip, unzip, url, resources): deps = sorted(deps) def work_dir_renderer(*suffix, **kwargs): @util.renderer def work_dir(properties): if kwargs.get('log', False): log.msg('properties are: ' + pprint.pformat(properties.asDict())) log.msg('sourcestamps are: ' + pprint.pformat( [(i.repository, i.branch, i.revision) for i in properties.getBuild().getAllSourceStamps()])) sep = '/' if all_slaves[properties['slavename']].get('platform', 0) == 'windows': sep = '\\' return sep.join(('..', 'constructicons', constructicon_name, constructicon_name) + suffix) return work_dir result = util.BuildFactory() def git_step(repo_url, work_dir, env): return common.sane_step( steps.Git, repourl=repo_url, codebase=repo_url, workdir=work_dir, mode='incremental', env=env, warnOnWarnings=False, ) def extract_parameters(dict): return { i[len(parameter_prefix):]: str(j[0]) for i, j in dict.items() if i.startswith(parameter_prefix) } @util.renderer def env(properties): return extract_parameters(properties.asDict()) def format(command): @util.renderer def f(properties): return command.format(**extract_parameters(properties.asDict())) return f @util.renderer def get_command(properties): revisions = '' for i in properties.getBuild().getAllSourceStamps(): revision = None if i.revision: revision = i.revision elif i.branch: revision = i.branch if revision: revisions += ' {}:{}'.format(i.codebase, revision) if revisions: revisions = ' -r' + revisions return common.constructicon_slave_go('g {}{}'.format( builder_name, revisions, )) for resource in resources: if resource not in resource_locks: resource_locks[resource] = util.MasterLock(resource) locks = [resource_locks[i].access('exclusive') for i in resources] #properties, get, compile result.addSteps([ common.sane_step( steps.SetProperty, name='devastator git state', property='devastator_git_state', value={{{devastator_git_state}}}, ), common.sane_step( steps.SetProperty, name='cybertron git state', property='cybertron_git_state', value={{{cybertron_git_state}}}, ), common.sane_step( steps.SetProperty, name='git state', property='git_state', value=global_git_states[constructicon_name], ), git_step(global_repo_urls[constructicon_name], work_dir_renderer(), env), common.sane_step( steps.ShellCommand, name='get', command=get_command, workdir=work_dir_renderer(log=True), env=env, warnOnWarnings=False, ), ]) for command_i in range(len(commands)): kwargs = {} meat = commands[command_i][1] timeout = 5 * 60 if type(meat) == str: command = meat else: command = meat['command'] warning_pattern = '(.*warning[: ])' if 'warnings' in meat: warning_pattern = '({})'.format('|'.join(meat['warnings'])) if 'suppress_warnings' in meat: warning_pattern = warning_pattern + '(?!{})'.format('|'.join( meat['suppress_warnings'])) kwargs['warningPattern'] = warning_pattern timeout = meat.get('timeout', timeout) result.addStep( common.sane_step(steps.Compile, name=commands[command_i][0], command=format(command), workdir=work_dir_renderer(), env=env, locks=locks, timeout=timeout, maxTime=2 * 60 * 60, **kwargs)) #upload for i, j in upload.items(True): zip_steps = [] upload_steps = [] unzip_steps = [] slave_src = i master_dst_extension = '' #zip if i in zip: @util.renderer def command(properties, i=i): return 'python -m zipfile -c {0}.zip {0}'.format(i) zip_steps.append( steps.ShellCommand( command=command, workdir=work_dir_renderer(), alwaysRun=True, )) slave_src += '.zip' master_dst_extension = '.zip' #unzip def master_dst_function(properties, j=j, extension=master_dst_extension, suffix=None): return os.path.join( make_full_builder_name(constructicon_name, builder_name), str(properties['buildnumber']) + '-constructicon', suffix if suffix else j + master_dst_extension) @util.renderer def master_dst_renderer(properties, f=master_dst_function): return f(properties) url_trim = 0 if j in unzip: @util.renderer def command(properties, master_dst_function=master_dst_function): master_dst = master_dst_function(properties) unzipped = os.path.split(master_dst)[0] or '.' return 'python -m zipfile -e {} {}'.format( master_dst, unzipped) unzip_steps.append( steps.MasterShellCommand(command=command, alwaysRun=True)) url_trim = 4 devastator_file_server_port = cybertron['devastator_file_server_port'] #upload suffix = url.get(j, None) @util.renderer def url_renderer( properties, j=j, suffix=suffix, master_dst_function=master_dst_function, devastator_file_server_port=devastator_file_server_port, url_trim=url_trim): return ('http://{}:{}'.format({{{devastator_host}}}, devastator_file_server_port) + '/' + master_dst_function(properties, suffix=suffix)) upload_steps.append( steps.FileUpload( slavesrc=slave_src, masterdest=master_dst_renderer, url=url_renderer, workdir=work_dir_renderer(), alwaysRun=True, )) #append result.addSteps(zip_steps + upload_steps + unzip_steps) return result
build_factory = util.BuildFactory() # check out the source checkout_step = steps.GitHub( repourl="git://github.com/scummvm/scummvm.git", mode="incremental", **default_step_kwargs, ) build_factory.addStep(checkout_step) # run the tests (note that this will require that 'trial' is installed) build_factory.addStep( steps.Configure( command=[ "./configure", "--disable-all-engines", "--enable-engine=director", ], env={"CXX": "ccache g++"}, **default_step_kwargs, )) build_factory.addStep(steps.Compile(command=["make"], **default_step_kwargs)) master_dir = os.path.dirname(os.path.dirname(__file__)) master_file = os.path.join(master_dir, "scummvm-binary") worker_file = "scummvm" build_factory.addStep( steps.FileUpload(workersrc=worker_file, masterdest=master_file)) build_factory.addStep( steps.Trigger(schedulerNames=["Director Tests"], waitForFinish=True))