def MakeDebBuilder(distro, version): f = factory.BuildFactory() f.addStep(git.Git(**GitArgs("strawberry", "master"))) f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=["cmake", ".."], haltOnFailure=True ) ) f.addStep( shell.Compile( name="run dpkg-buildpackage", workdir="source", command=["dpkg-buildpackage", "-b", "-d", "-uc", "-us", "-nc", "-tc"], haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get output filename for deb", workdir="source", command=["sh", "-c", "ls -dt ../strawberry_*.deb | head -n 1"], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage("%s/%s" % (distro, version))) f.addStep( steps.SetPropertyFromCommand( name="get output filename for deb dbgsym", workdir="source", command=["sh", "-c", "ls -dt ../strawberry-dbgsym_*.*deb | head -n 1"], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage("%s/%s" % (distro, version))) f.addStep( shell.ShellCommand( name="delete file", workdir=".", command="rm -f *.deb *.ddeb *.buildinfo *.changes", haltOnFailure=True ) ) return f
def __init__(self): ZcashBaseFactory.__init__(self) self.addSteps([ steps.SetPropertyFromCommand( command=['find', '/usr', '-name', 'llvm-symbolizer*', '-type', 'f', '-executable'], property='llvm-symbolizer', name='Find llvm-symbolizer', ), asan('btest'), asan('gtest'), ])
def MakeSourceBuilder(): git_args = GitArgs("strawberry", "master") git_args["mode"] = "full" git_args["method"] = "fresh" f = factory.BuildFactory() f.addStep(git.Git(**git_args)) f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=["cmake", ".." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run maketarball", workdir="source/dist/scripts", command=["./maketarball.sh"], haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get output filename", workdir="source", command=[ "sh", "-c", "ls -dt " + "dist/scripts/strawberry-*.tar.xz" + " | head -n 1" ], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage("source")) f.addStep( shell.ShellCommand( name="delete file", workdir="source/dist/scripts", command="rm -f *.bz2 *.xz", haltOnFailure=True ) ) return f
def writeBuildResultsToDatabase(**kwargs): """Call the script to save results to the database""" return [ steps.SetPropertyFromCommand( name="Save test results to the database", command=[ util.Interpolate( "%(prop:builddir)s/scripts/write_build_results.py"), util.Property("jsonResultsFile") ], extract_fn=extractDatabaseBuildid, **kwargs) ]
def steps_build_common(env, config=None): st = [] # OpenStack machines have frequent github.com name resolution failures: # fatal: unable to access 'https://github.com/krzk/tools.git/': Could not resolve host: github.com # Cache the address first. st.append(steps.ShellCommand(command=util.Interpolate('%(prop:builddir:-~/)s/tools/buildbot/name-resolve-fixup.sh'), haltOnFailure=False, warnOnFailure=True, flunkOnFailure=False, name='Cache DNS addresses (workaround)')) st.append(steps.Git(repourl='https://github.com/krzk/tools.git', name='Clone krzk tools sources', mode='incremental', alwaysUseLatest=True, branch='master', getDescription=False, workdir='tools', haltOnFailure=True, env=util.Property('git_env'))) st.append(steps.Git(repourl=repo_git_kernel_org, name='Clone the sources', # Run full/fresh checkout to get rid of any old DTBs or binaries from # KBUILD_OUTPUT. For example when compiling stable kernel without # given DTB, old DTB from linux-next might remain. # Removal of them is necessary for boot tests so they do not re-use # wrong binaries... and anyway it is nice to test clean build. mode='full', method='fresh', haltOnFailure=True, env=util.Property('git_env'))) st.append(steps.SetPropertyFromCommand(command='${CROSS_COMPILE}gcc --version | head -n 1', property='gcc_version', haltOnFailure=True, env=env, name='Set property: gcc version')) st.append(steps.SetPropertyFromCommand(command=[util.Interpolate(CMD_MAKE), '-s', 'kernelversion'], property='kernel_version', haltOnFailure=True, env=env, name='Set property: kernel version')) st.append(step_make_config(env, config)) return st
def getLatestBuildRevision(): pathFrom = "s3://{{ s3_public_bucket }}/builds/%(prop:branch_pretty)s/latest.txt" pathTo = "-" command = 'cp' return steps.SetPropertyFromCommand( command=['aws', '--endpoint-url', '{{ s3_host }}', 's3', command, util.Interpolate(pathFrom), util.Interpolate(pathTo)], env={ "AWS_ACCESS_KEY_ID": util.Secret("s3.public_access_key"), "AWS_SECRET_ACCESS_KEY": util.Secret("s3.public_secret_key") }, # Note: We're overwriting this value to set it to the built revision rather than whatever it defaults to property="got_revision", flunkOnFailure=True, haltOnFailure=True, name="Get latest build version")
def steps_boot(builder_name, target, config, run_pm_tests=False): st = [] st.append( steps.ShellCommand(command=[ 'rm', '-fr', 'lib', 'deploy-modules-out.tar.gz', 'initramfs-odroidxu3.img' ], name='Remove old binaries')) st = st + steps_download(target) st.append( steps.ShellCommand(command=[ '/opt/tools/buildbot/build-slave-deploy.sh', target, config, util.Property('revision'), 'modules-tmp' ], haltOnFailure=True, name='Deploy on server binaries for booting')) st.append( steps.SetPropertyFromCommand(command='ls modules-tmp/lib/modules', property='kernel_version', haltOnFailure=True)) st.append(step_serial_open(target)) st.append(step_gracefull_shutdown(target, config, halt_on_failure=False)) st.append(step_boot_to_prompt(target, config)) st.append(step_test_ping(target, config)) st.append(step_test_uname(target, config)) st.append(step_test_dmesg_errors(target, config)) st.append(step_test_dmesg_warnings(target, config)) st = st + steps_test_suite_fast(target, config) st = st + steps_test_suite_slow(target, config) # After all the tests check again if ping and SSH are working: st.append(step_test_ping(target, config)) st.append(step_test_uname(target, config)) # Test reboot st.append(step_test_reboot(target, config)) st.append(step_test_ping(target, config)) st.append(step_test_uname(target, config)) st = st + steps_shutdown(target, config) return st
def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.plugins import schedulers, steps, util c['schedulers'] = [ schedulers.ForceScheduler(name="force", builderNames=["testy"]) ] f = util.BuildFactory() f.addStep( steps.SetPropertyFromCommand(property="test", command=["echo", "foo"])) c['builders'] = [ util.BuilderConfig(name="testy", workernames=["local1"], factory=f) ] return c
def build_and_test(): remove_build = steps.RemoveDirectory("build") create_build = steps.MakeDirectory("build") cmake_step = steps.CMake(path=util.Property("src_dir"), definitions=util.Property("cmake_defs", {}), options=util.Property("cmake_opts", []), workdir="build", env=env) @util.renderer def join_make_opts(props): make_opts = props.getProperty("make_opts", []) return ["make"] + make_opts make_step = steps.Compile(command=join_make_opts, workdir="build", env=env) def parse_exclude_file(rc, stdout, stderr): exclude_tests = json.loads(stdout) return {"test_excludes": exclude_tests} load_exclude_file = steps.SetPropertyFromCommand( command=["cat", os.path.join("/config", "test_excludes.json")], extract_fn=parse_exclude_file, doStepIf=lambda steps: steps.getProperty("exclude_file", False)) @util.renderer def parse_test_excludes(props): command = ["ctest", "--output-on-failure", "--timeout", "10"] excludes = props.getProperty("test_excludes", None) if excludes is not None: command += ["-E", "|".join(excludes)] return command test_step = steps.Test(command=parse_test_excludes, workdir="build") factory = util.BuildFactory() factory.addStep(remove_build) factory.addStep(create_build) factory.addStep(load_exclude_file) factory.addStep(cmake_step) factory.addStep(make_step) factory.addStep(test_step) return factory
def __init__(self): util.BuildFactory.__init__(self, [ steps.Git( repourl=git_source, mode='incremental', ), sh('git', 'clean', '-dfx', name='git clean'), ]) self.addStep(steps.SetPropertyFromCommand(command=nproc, property="numcpus")) self._addPreBuildSteps() self._addBuildSteps() # Ensures the worker has the params; usually a no-op self.addStep( sh('./zcutil/fetch-params.sh', '--testnet', haltOnFailure=True, locks=[params_lock.access('exclusive')]))
def getBuildPipeline(): clone = steps.Git(repourl="{{ ansible_scripts_url }}", branch=util.Property('branch'), alwaysUseLatest=True, mode="full", method="fresh") version = steps.SetPropertyFromCommand( command="git rev-parse HEAD", property="ansible_script_rev", flunkOnFailure=True, warnOnFailure=True, haltOnFailure=True, workdir="build", name="Get ansible script revision") deps = common.shellCommand( command=['ansible-galaxy', 'install', '-r', 'requirements.yml'], name="Installing Ansible dependencies") secrets = common.copyAWS( pathFrom="s3://{{ s3_private_bucket }}/{{ groups['master'][0] }}/env/%(prop:deploy_env)s", pathTo="%(prop:builddir)s/%(prop:deploy_env)s", name="Fetching deploy key") permissions = common.shellCommand( command=['chmod', '600', util.Interpolate("%(prop:builddir)s/%(prop:deploy_env)s")], name="Fixing deploy key permissions") install = GenerateInstallCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining install targets", haltOnFailure=True, flunkOnFailure=True) deploy = GenerateDeployCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining deploy targets", haltOnFailure=True, flunkOnFailure=True) sleep = common.shellCommand( command=["sleep", "300"], name="Sleeping to let Opencast finish starting up") # We aren't using -u here because this is executing in the same directory as the checked out ansible scripts, which # contains a group_vars/all.yml files specifying ansible_user ingest = GenerateIngestCommands( command=util.Interpolate("ls {{ buildbot_config }}/envs/ | grep %(prop:deploy_env)s"), name="Determining ingest targets", haltOnFailure=True, flunkOnFailure=True) cleanup = common.shellCommand( command=['rm', '-rf', util.Interpolate("%(prop:builddir)s/%(prop:deploy_env)s")], alwaysRun=True, name="Cleanup") f_ansible = util.BuildFactory() f_ansible.addStep(clone) f_ansible.addStep(version) f_ansible.addStep(deps) f_ansible.addStep(secrets) f_ansible.addStep(permissions) f_ansible.addStep(install) f_ansible.addStep(deploy) f_ansible.addStep(sleep) f_ansible.addStep(ingest) f_ansible.addStep(cleanup) return f_ansible
def _downloadSourceTreeQuicksyncArtifacts(self): """Download the source tree artifacts from the buildmaster and from the builder directory output into the builder workspace.""" self.addStep(steps.SetPropertyFromCommand( name="assert which quicksync artifact download is required"[:50], property="which_repo_quicksync_artifact_to_download", doStepIf=lambda step: not bool(step.getProperty("force_repo_quicksync_artifacts_download")), command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail to_download=() # which artifact will be downloaded if [[ ! -f "${REPO_DIR_ARCHIVE_ARTIFACT_FILENAME}" ]]; then to_download+=("repo-dir") fi if [[ ! -f "${GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME}" ]]; then to_download+=("git-lfs-dirs") fi echo "${to_download[@]}" """).strip()], haltOnFailure=False, warnOnFailure=True, env={ "REPO_DIR_ARCHIVE_ARTIFACT_FILENAME": self.REPO_DIR_ARCHIVE_ARTIFACT_FILENAME, "GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME": self.GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME, }, )) def is_artifact_download_required(quicksync_artifact_type): def checker(step: BuildStep) -> bool: return ( bool(step.getProperty("force_repo_quicksync_artifacts_download")) or (quicksync_artifact_type in str(step.getProperty("which_repo_quicksync_artifact_to_download")).split()) ) return checker self.addStep(steps.ShellCommand( name="retrieve repo directory artifact", description='retrieve the ".repo" directory archive from the buildmaster', haltOnFailure=True, doStepIf=is_artifact_download_required("repo-dir"), command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail cat <<END_OF_LFTP_SCRIPT | lftp connect ${ARTIFACTS_FTP_URL} set xfer:clobber yes mget ${LATEST_REPO_DIR_ARCHIVE_ARTIFACT_FILEPATH_ON_FTP} END_OF_LFTP_SCRIPT """).strip()], env={ "ARTIFACTS_FTP_URL": self.buildmaster_setup.artifacts_ftp_url, "LATEST_REPO_DIR_ARCHIVE_ARTIFACT_FILEPATH_ON_FTP": compute_artifact_path( "/", "quicksync-artifacts", 'buildername_providing_repo_quicksync_artifacts', self.REPO_DIR_ARCHIVE_ARTIFACT_FILENAME, buildnumber_shard="latest", ), }, )) self.addStep(steps.ShellCommand( name="retrieve git-lfs directories artifact", description='retrieve the ".git/lfs" directories archive from the buildmaster', haltOnFailure=True, doStepIf=is_artifact_download_required("git-lfs-dirs"), command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail cat <<END_OF_LFTP_SCRIPT | lftp connect ${ARTIFACTS_FTP_URL} set xfer:clobber yes mget ${LATEST_GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILEPATH_ON_FTP} END_OF_LFTP_SCRIPT """).strip()], env={ "ARTIFACTS_FTP_URL": self.buildmaster_setup.artifacts_ftp_url, "LATEST_GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILEPATH_ON_FTP": compute_artifact_path( "/", "quicksync-artifacts", 'buildername_providing_repo_quicksync_artifacts', self.GIT_LFS_SUBDIRECTORIES_ARCHIVE_ARTIFACT_FILENAME, buildnumber_shard="latest", ), }, ))
def MakeWindowsBuilder(is_debug, is_64, with_qt6): mingw32_name = ("x86_64-w64-mingw32.shared" if is_64 else "i686-w64-mingw32.shared") qt_dir = ("qt6" if with_qt6 else "qt5") mxe_path = "/persistent-data/mingw/mxe/source" target_path = mxe_path + "/usr/" + mingw32_name env = { "PKG_CONFIG_LIBDIR": target_path + "/lib/pkgconfig", "PATH": ":".join([ mxe_path + "/usr/x86_64-pc-linux-gnu/bin", "/usr/local/bin", "/usr/bin", "/bin", ]), } cmake_cmd = [ "cmake", "..", "-DCMAKE_TOOLCHAIN_FILE=/config/dist/" + ("Toolchain-x86_64-w64-mingw32.cmake" if is_64 else "Toolchain-i686-w64-mingw32.cmake"), "-DCMAKE_BUILD_TYPE=" + ("Debug" if is_debug else "Release"), "-DCMAKE_PREFIX_PATH=" + target_path + "/" + qt_dir + "/lib/cmake", "-DARCH=" + ("x86_64" if is_64 else "x86"), "-DENABLE_WIN32_CONSOLE=" + ("ON" if is_debug else "OFF"), "-DQT_MAJOR_VERSION=" + ("6" if with_qt6 else "5"), "-DENABLE_DBUS=OFF", "-DENABLE_LIBGPOD=OFF", "-DENABLE_LIBMTP=OFF", ] strip_cmd = mxe_path + "/usr/bin/" + mingw32_name + "-strip" extra_binary_fileslist = [ "sqlite3.exe", "killproc.exe", "gdb.exe", "gst-launch-1.0.exe", "gst-discoverer-1.0.exe", "libfreetype-6.dll", ("libssl-1_1-x64.dll" if is_64 else "libssl-1_1.dll"), ("libcrypto-1_1-x64.dll" if is_64 else "libcrypto-1_1.dll"), ] extra_binary_files = [] for i in extra_binary_fileslist: extra_binary_files.append(target_path + "/bin/" + i) nsi_files = [ "strawberry.nsi", "Capabilities.nsh", "FileAssociation.nsh", "strawberry.ico", ] imageformats_filelist = [ "qgif.dll", "qjpeg.dll", "qico.dll", ] imageformats_files = [] for i in imageformats_filelist: imageformats_files.append(target_path + "/" + qt_dir + "/plugins/imageformats/" + i) gstreamer_plugins_path = target_path + "/bin/gstreamer-1.0/" gstreamer_plugins_filelist = [ "libgstapp.dll", "libgstcoreelements.dll", "libgstaudioconvert.dll", "libgstaudiofx.dll", "libgstaudiomixer.dll", "libgstaudioparsers.dll", "libgstaudiorate.dll", "libgstaudioresample.dll", "libgstaudiotestsrc.dll", "libgstautodetect.dll", "libgstplayback.dll", "libgstvolume.dll", "libgstspectrum.dll", "libgstequalizer.dll", "libgstreplaygain.dll", "libgsttypefindfunctions.dll", "libgstgio.dll", "libgstdirectsound.dll", "libgstwasapi.dll", "libgstpbtypes.dll", "libgstapetag.dll", "libgsticydemux.dll", "libgstid3demux.dll", "libgsttaglib.dll", "libgsttcp.dll", "libgstudp.dll", "libgstsoup.dll", "libgstcdio.dll", "libgstrtp.dll", "libgstrtsp.dll", "libgstflac.dll", "libgstwavparse.dll", "libgstwavpack.dll", "libgstogg.dll", "libgstvorbis.dll", "libgstopus.dll", "libgstopusparse.dll", "libgstspeex.dll", "libgstlame.dll", "libgstaiff.dll", "libgstfaac.dll", "libgstfaad.dll", "libgstisomp4.dll", "libgstasf.dll", "libgstasfmux.dll", "libgstlibav.dll", ] gstreamer_plugins_files = [] for i in gstreamer_plugins_filelist: gstreamer_plugins_files.append(gstreamer_plugins_path + "/" + i) f = factory.BuildFactory() f.addStep(git.Git(**GitArgs("strawberry", "master"))) f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=cmake_cmd, env=env, haltOnFailure=True ) ) f.addStep( shell.Compile( name="compile", command=[ "make", "-j", MAKE_JOBS ], workdir="source/build", haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="mkdir platforms/sqldrivers/imageformats/styles/gstreamer-plugins/nsisplugins", workdir="source/build", command=[ "mkdir", "-p", "gio-modules" "platforms", "sqldrivers", "imageformats", "styles", "gstreamer-plugins", "nsisplugins", ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy libgiognutls.dll", workdir="source/build/gio-modules", command=[ "cp", target_path + "/lib/gio/modules/libgiognutls.dll", "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy qwindows.dll", workdir="source/build/platforms", command=[ "cp", target_path + "/" + qt_dir + "/plugins/platforms/qwindows.dll", "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy qsqlite.dll", workdir="source/build/sqldrivers", command=[ "cp", target_path + "/" + qt_dir + "/plugins/sqldrivers/qsqlite.dll", ".", ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy qt imageformats", workdir="source/build/imageformats", command=[ "cp", imageformats_files, "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy qt styles", workdir="source/build/styles", command=[ "cp", target_path + "/" + qt_dir + "/plugins/styles/qwindowsvistastyle.dll", "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy gstreamer-plugins", workdir="source/build/gstreamer-plugins", command=[ "cp", gstreamer_plugins_files, "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy extra binaries", workdir="source/build", command=[ "cp", extra_binary_files, "." ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copydlldeps.sh", workdir="source/build", command=[ mxe_path + "/tools/copydlldeps.sh", "-c", "-d", ".", "-F", ".", "-F", "./platforms", "-F", "./sqldrivers", "-F", "./imageformats", "-F", "./styles", "-F", "./gstreamer-plugins", "-X", target_path + "/apps", "-R", target_path, ], haltOnFailure=True ) ) if not is_debug: f.addStep( shell.ShellCommand( name="run strip", workdir="source/build", command=[ "/config/dist/win-strip.sh", strip_cmd ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy nsi files", workdir="source/dist/windows", command=["cp", nsi_files, "../../build/" ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy COPYING file", workdir="source", command=["cp", "COPYING", "build/" ], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run makensis", command=[ "makensis", "strawberry.nsi" ], workdir="source/build", haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get output filename 1", workdir="source", command=[ "sh", "-c", "ls -dt " + "build/StrawberrySetup-*.exe" + " | head -n 1" ], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage("windows")) f.addStep( shell.ShellCommand( name="delete files", workdir="source/build", command="rm -rf *.exe *.dll gio-modules platforms sqldrivers imageformats styles gstreamer-plugins nsisplugins", haltOnFailure=True ) ) return f
def MakeAppImageBuilder(name): git_args = GitArgs("strawberry", "master") git_args["mode"] = "full" git_args["method"] = "fresh" f = factory.BuildFactory() f.addStep(git.Git(**git_args)) f.addStep( shell.ShellCommand( name="clean build", workdir="source", command="rm -rf build/AppDir", haltOnFailure=True ) ) cmake_qt_flag = "-DBUILD_WITH_QT6=ON" if name == "Qt6" else "-DBUILD_WITH_QT5=ON" f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=["cmake", "..", "-DCMAKE_INSTALL_PREFIX=/usr", cmake_qt_flag], haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get version", workdir="source", command=["git", "describe", "--tags", "--always"], property="output-version", haltOnFailure=True ) ) env_version = { "VERSION": util.Interpolate("%(prop:output-version)s-%(kw:name)s", name=name) } f.addStep( shell.Compile( name="compile", workdir="source/build", command=["make", "-j", MAKE_JOBS], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run make install", workdir="source/build", command=["make", "install", "DESTDIR=AppDir"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="rename strawberry-tagreader", workdir="source/build", command=["mv", "AppDir/usr/bin/strawberry-tagreader", "./AppDir/usr/bin/strawberry-tagreader-bin"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy strawberry-tagreader.sh", workdir="source/build", command=["cp", "/config/dist/strawberry-tagreader.sh", "./AppDir/usr/bin/strawberry-tagreader"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="cp appdata", workdir="source/build", haltOnFailure=True, command=["cp", "./AppDir/usr/share/metainfo/org.strawberrymusicplayer.strawberry.appdata.xml", "./AppDir/"] ) ) f.addStep( shell.ShellCommand( name="cp icon", workdir="source/build", haltOnFailure=True, command=["cp", "./AppDir/usr/share/icons/hicolor/128x128/apps/strawberry.png", "./AppDir/"] ) ) f.addStep( shell.ShellCommand( name="run appimagetool deploy", workdir="source/build", command=["appimagetool", "-s", "deploy", "AppDir/usr/share/applications/org.strawberrymusicplayer.strawberry.desktop"], env=env_version, haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy gst-plugin-scanner.sh", workdir="source/build", command=["cp", "/config/dist/gst-plugin-scanner.sh", "./AppDir/usr/libexec/gstreamer-1.0/"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run appimagetool", workdir="source/build", command=["appimagetool", "AppDir"], env=env_version, haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get output filename", workdir="source", command=[ "sh", "-c", "ls -dt build/Strawberry*.AppImage | head -n 1" ], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage("appimage")) f.addStep( shell.ShellCommand( name="delete files", workdir="source/build", command="rm -rf AppDir *.AppImage", haltOnFailure=True ) ) return f
def MakePacmanBuilder(distro, version): f = factory.BuildFactory() f.addStep(git.Git(**GitArgs("strawberry", "master"))) f.addStep( shell.ShellCommand( name="clean build", workdir="source", command="rm -rf build", haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=["cmake", ".."], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run maketarball", workdir="source/build", command=["../dist/scripts/maketarball.sh"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="copy PKGBUILD", workdir="source/build", command=["cp", "../dist/unix/PKGBUILD", "."], haltOnFailure=True ) ) f.addStep( shell.Compile( name="run makepkg", workdir="source/build", command=["makepkg", "-f"], haltOnFailure=True ) ) f.addStep( steps.SetPropertyFromCommand( name="get output filename", workdir="source", command=[ "sh", "-c", "ls -dt build/strawberry-*.pkg.tar.xz | head -n 1" ], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) #f.addStep(UploadPackage(distro)) f.addStep( shell.ShellCommand( name="delete file", workdir="source/build", command="rm -f *.xz", haltOnFailure=True ) ) return f
def _identifyAndSaveProducedArtifactsOntoBuildmaster(self): self.addStep(steps.SetPropertyFromCommand( name="assert which artifact have been produced", property="artifacts_produced", command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail artifacts_produced=() # which artifact have been produced for type in sdks cache build; do if [[ -d "artifacts/${type}" && -n "$(ls -A "artifacts/${type}")" ]]; then artifacts_produced+=("${type}") fi done echo "${artifacts_produced[@]}" """).strip()], haltOnFailure=False, warnOnFailure=True, )) def is_artifact_save_necessary(artifact_type): def checker(step: BuildStep) -> bool: if artifact_type not in ['sdks', 'cache', 'build']: raise ValueError("is_artifact_save_necessary: Unsupported artifact type {!r}".format(artifact_type)) artifact_produced_property = "produce_{}_artifacts".format(artifact_type) #print("DEBUG: {!r} property = {!r}".format( # artifact_produced_property, step.getProperty(artifact_produced_property))) #print("DEBUG: {!r} property .split() = {!r}".format( # "artifacts_produced", str(step.getProperty("artifacts_produced")).split())) #print("DEBUG: my boolean evaluation = {!r}".format( # bool(step.getProperty(artifact_produced_property)) and # (artifact_type in # str(step.getProperty("artifacts_produced")).split()) #)) return ( bool(step.getProperty(artifact_produced_property)) and (artifact_type in str(step.getProperty("artifacts_produced")).split()) ) return checker for artifact_type in ['sdks', 'cache', 'build']: self.addStep(steps.ShellCommand( name="save {} artifact on buildmaster".format(artifact_type)[:50], description="save the {} artifact archive on the buildmaster".format(artifact_type), haltOnFailure=True, doStepIf=is_artifact_save_necessary(artifact_type), command=["/usr/bin/env", "bash", "-c", textwrap.dedent( r""" set -e -u -o pipefail cat <<END_OF_LFTP_SCRIPT | lftp connect ${ARTIFACTS_FTP_URL} lcd ${SOURCE_PATH_ON_WORKER} mkdir -p ${DESTINATION_PATH_IN_FTP} cd ${DESTINATION_PATH_IN_FTP} mput * END_OF_LFTP_SCRIPT """).strip()], env={ "ARTIFACTS_FTP_URL": self.buildmaster_setup.artifacts_ftp_url, "SOURCE_PATH_ON_WORKER": 'artifacts/{}'.format(artifact_type), "DESTINATION_PATH_IN_FTP": compute_artifact_path( "/", artifact_type, "buildername", buildnumber_shard=True, ), }, )) self.addStep(steps.MasterShellCommand( name="symlink latest {} artifacts".format(artifact_type)[:50], haltOnFailure=True, doStepIf=is_artifact_save_necessary(artifact_type), command=[ "ln", "-snf", util.Interpolate("%(prop:buildnumber)s"), compute_artifact_path( self.buildmaster_setup.artifacts_dir, artifact_type, "buildername", buildnumber_shard="latest", ), ], ))
def MakeRPMBuilder(distro, version): f = factory.BuildFactory() f.addStep(git.Git(**GitArgs("strawberry", "master"))) f.addStep( shell.ShellCommand( name="run cmake", workdir="source/build", command=["cmake", ".."], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="run maketarball", workdir="source/build", command=["../dist/scripts/maketarball.sh"], haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="move tarball to SOURCES", workdir="source/build", command="mv strawberry-*.tar.xz ~/rpmbuild/SOURCES", haltOnFailure=True ) ) f.addStep( shell.Compile( name="run rpmbuild", workdir="source/build", command=["rpmbuild", "-ba", "../dist/unix/strawberry.spec"], haltOnFailure=True ) ) if not version in ['tumbleweed']: # Upload RPM package. f.addStep( steps.SetPropertyFromCommand( name="get output rpm filename", workdir="source", command=[ "sh", "-c", "ls -dt ~/rpmbuild/RPMS/*/strawberry-*.rpm | grep -v debuginfo | grep -v debugsource | head -n 1" ], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage(distro + "/" + version)) # Upload debugsource package. f.addStep( steps.SetPropertyFromCommand( name="get output debugsource rpm filename", workdir="source", command=["sh", "-c", "ls -dt ~/rpmbuild/RPMS/*/strawberry-debugsource-*.rpm | head -n 1"], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage(distro + "/" + version)) # Upload debuginfo package. f.addStep( steps.SetPropertyFromCommand( name="get output debuginfo rpm filename", workdir="source", command=["sh", "-c", "ls -dt ~/rpmbuild/RPMS/*/strawberry-debuginfo-*.rpm | head -n 1"], property="output-filepath", haltOnFailure=True ) ) f.addStep(steps.SetProperties(properties=get_base_filename)) f.addStep(UploadPackage(distro + "/" + version)) f.addStep( shell.ShellCommand( name="delete files", workdir="source", command="rm -f ~/rpmbuild/SOURCES/*.xz ~/rpmbuild/RPMS/*/*.rpm", haltOnFailure=True ) ) f.addStep( shell.ShellCommand( name="clean rpmbuild", workdir="source/build", command="find ~/rpmbuild/ -type f -delete" ) ) return f
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=ConfigChecker().needs_configuration)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" snapshots_dir = "%s%%(prop:branch)s/" % snapshots_dir snapshots_url = "%s%%(prop:branch)s/" % snapshots_url builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_files=Property("package_files", None), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), split_debug_package=Property("split_debug_package", True), extra_files=Property("package_extra_files", None), package_script=Interpolate(config.get( "package_script", "")), env=compilation_environment, doStepIf=should_package)) latest_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="archive", property_name="package_filename", latest_link=latest_link, do_step_if=should_package) latest_link = Interpolate("%s%%(prop:buildername)s" "-latest-debug-symbols.tar.xz" % snapshots_dir) make_uploader_steps(builder=builder, snapshots_dir=snapshots_dir, snapshots_url=snapshots_url, publish_name="debug archive", property_name="debug_package_filename", latest_link=latest_link, do_step_if=should_package_debug) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=Interpolate(snapshots_dir), file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), secondary_file_suffix="-debug-symbols", file_extensions=r"\.(?:tar(?:\.[xg]z)?|[a-z]{3,4})$", doStepIf=should_package, hideStepIf=True)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("counting")])
def _make_factory(name, ms): f = util.BuildFactory() # Sync Git f.addStep( steps.Git(repourl=ms['giturl'], method='clobber', mode='full', shallow=False, haltOnFailure=True, name='git sync')) # TODO: login to dockerhub f.addStep( steps.SetPropertyFromCommand( name="set property from make version", command=["make", "version", "--always-make"], property="project_version", haltOnFailure=True)) version_specific_str = '%(prop:project_version)s-$(prop:branch)s-%(prop:buildnumber)s-' + str( DATABASE_VERSION) version_specifier = util.Interpolate('VERSION=' + version_specific_str) commit_hash_specifier = util.Interpolate('COMMIT_HASH=' + '%(prop:got_revision)s') # Compile f.addStep( steps.ShellCommand( name="compile", command=[ "make", "build", version_specifier, commit_hash_specifier ], haltOnFailure=True, )) # Run tests f.addStep( steps.ShellCommand( name="run tests", command=["make", "test", version_specifier, commit_hash_specifier], warnOnFailure=not ms['fail_on_tests'], haltOnFailure=ms['fail_on_tests'], doStepIf=ms['run_tests'], )) # Build image and push to Docker registry f.addStep( steps.ShellCommand( name="push docker image to registry", haltOnFailure=True, command=["make", "push", version_specifier, commit_hash_specifier], doStepIf=_is_deploy_branch, )) f.addStep( steps.SetProperties(name="set container properties", properties={ 'container_name': REGISTRY + r'\/' + name, 'container_tag': util.Interpolate(version_specific_str), 'project_name': name, })) # TODO: add actual k8s deployment step # TODO: add liveness check step return f
def getBuildPipeline(): rpmsClone = steps.Git( repourl="{{ source_rpm_repo_url }}", branch=util.Interpolate("%(prop:rpmspec_override:-%(prop:branch)s)s"), alwaysUseLatest=True, shallow=True, mode="full", method="clobber", flunkOnFailure=True, haltOnFailure=True, name="Cloning rpm packaging configs") rpmsVersion = steps.SetPropertyFromCommand( command="git rev-parse HEAD", property="rpm_script_rev", flunkOnFailure=True, warnOnFailure=True, haltOnFailure=True, workdir="build", name="Get rpm script revision") rpmsFullVersion = steps.SetProperty( property="rpm_version", value=util.Interpolate("%(prop:pkg_major_version)s.git%(prop:short_revision)s-%(prop:buildnumber)s")) rpmsSetup = common.shellSequence( commands=[ common.shellArg( # We're using a string here rather than an arg array since we need the shell functions command='echo -e "%_topdir `pwd`" > ~/.rpmmacros', logname="rpmdev-setup"), ], workdir="build/rpmbuild", name="Fetch built artifacts and build prep") rpmsFetch = common.syncAWS( pathFrom="s3://{{ s3_public_bucket }}/builds/{{ builds_fragment }}", pathTo="rpmbuild/SOURCES", name="Fetch build from S3") rpmsPrep = common.shellSequence( commands=[ common.shellArg( command=[ 'sed', '-i', util.Interpolate('s/define srcversion .*$/define srcversion %(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/g'), util.Interpolate('opencast.spec') ], logname='version'), common.shellArg( command=[ 'rpmdev-bumpspec', '-u', '"Buildbot <*****@*****.**>"', '-c', util.Interpolate( 'Opencast revision %(prop:got_revision)s, packaged with RPM scripts version %(prop:rpm_script_rev)s' ), util.Interpolate('opencast.spec') ], logname='rpmdev-bumpspec'), common.shellArg( command=[ 'sed', '-i', util.Interpolate("s/\(Version: *\) .*/\\1 %(prop:pkg_major_version)s.git%(prop:short_revision)s/"), util.Interpolate('opencast.spec') ], logname='version'), common.shellArg( command=[ 'sed', '-i', util.Interpolate('s/2%%{?dist}/%(prop:buildnumber)s%%{?dist}/g'), util.Interpolate('opencast.spec') ], logname='buildnumber'), common.shellArg( command=['rm', '-f', 'BUILD/opencast/build/revision.txt'], logname="cleanup") ], workdir="build/rpmbuild/SPECS", name="Prepping rpms") rpmsBuild = common.shellSequence( commands=getRPMBuilds, workdir="build/rpmbuild/SPECS", name="Build rpms") # Note: We're using a string here because using the array disables shell globbing! rpmsUpload = common.syncAWS( pathFrom="rpmbuild/RPMS/noarch", pathTo="s3://{{ s3_public_bucket }}/repo/rpms/unstable/el/%(prop:el_version)s/noarch/", name="Upload rpms to S3") rpmsPrune = common.shellCommand( command=util.Interpolate("ls -t /builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch | grep allinone | tail -n +6 | cut -f 4 -d '-' | while read version; do rm -f /builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch/*$version; done"), name=util.Interpolate("Pruning %(prop:pkg_major_version)s unstable repository")) repoMetadata = common.shellCommand( command=['createrepo', '.'], workdir=util.Interpolate("/builder/s3/repo/rpms/unstable/el/%(prop:el_version)s/noarch"), name="Building repository") f_package_rpms = util.BuildFactory() f_package_rpms.addStep(common.getPreflightChecks()) f_package_rpms.addStep(rpmsClone) f_package_rpms.addStep(rpmsVersion) f_package_rpms.addStep(common.getLatestBuildRevision()) f_package_rpms.addStep(common.getShortBuildRevision()) f_package_rpms.addStep(rpmsFullVersion) f_package_rpms.addStep(rpmsSetup) f_package_rpms.addStep(rpmsFetch) f_package_rpms.addStep(rpmsPrep) f_package_rpms.addStep(common.loadSigningKey()) f_package_rpms.addStep(rpmsBuild) f_package_rpms.addStep(common.unloadSigningKey()) f_package_rpms.addStep(rpmsUpload) f_package_rpms.addStep(common.deployS3fsSecrets()) f_package_rpms.addStep(common.mountS3fs()) f_package_rpms.addStep(rpmsPrune) f_package_rpms.addStep(repoMetadata) f_package_rpms.addStep(common.unmountS3fs()) f_package_rpms.addStep(common.cleanupS3Secrets()) f_package_rpms.addStep(common.getClean()) return f_package_rpms
def make_builder_config(repo_url, name, worker_name, config, lock, snapshots_dir, snapshots_url, snapshots_default_max): if snapshots_dir and snapshots_dir[-1] is not "/": snapshots_dir += "/" if snapshots_url and snapshots_url[-1] is not "/": snapshots_url += "/" builder = util.BuildFactory() builder.addStep( steps.SetProperties(name="Worker Config File", properties=config, hideStepIf=True)) builder.addStep( steps.SetPropertiesFromEnv( variables=["WORKER_HOST", "WORKER_REPO_DIR"], hideStepIf=True)) # TODO: use `reference` to a common volume instead? or make the builder # dependent on another fetch-only builder so only one builder tries to pull it? builder.addStep( steps.GitHub(repourl=repo_url, workdir=Property("WORKER_REPO_DIR", None), logEnviron=False, getDescription={ "always": True, "tags": True })) builder.addStep( FileExistsSetProperty(name="config.mk Existence Check", property="already_configured", file="%s/config.mk" % builder.workdir, hideStepIf=True)) compilation_environment = Property("env", {}) builder.addStep( steps.Configure(command=compute_configure, env=compilation_environment, doStepIf=is_not_configured)) builder.addStep( steps.SetPropertyFromCommand(name="Python (Worker)", property="cpu_count", command=["python", "-c", GET_CPU_COUNT], flunkOnFailure=False, warnOnFailure=True, hideStepIf=True, description="getting CPU count", descriptionDone="got CPU count")) # In at least Buildbot 0.9.12, warningPattern and suppressionList are not # renderable, so just get the properties from the config file immediately compiler_warning_pattern = config.get( "compiler_warning_pattern", r"^([^:]+):(\d+):(?:\d+:)? [Ww]arning: (.*)$") compiler_warning_extractor = steps.Compile.warnExtractFromRegexpGroups compiler_suppression_file = Property("compiler_suppression_file", None) compiler_suppression_list = config.get("compiler_suppression_list", None) builder.addStep( steps.Compile(command=["make", Interpolate("-j%(prop:cpu_count:~1)s")], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list)) builder.addStep( steps.Test(command=[ "make", Interpolate("%(prop:can_run_tests:" "#?|test|test/runner)s") ], env=compilation_environment, warningPattern=compiler_warning_pattern, warningExtractor=compiler_warning_extractor, suppressionFile=compiler_suppression_file, suppressionList=compiler_suppression_list, haltOnFailure=True, flunkOnFailure=True)) if snapshots_dir is not None and snapshots_url is not None: builder.addStep( steps.SetProperty(name="Computed By %s" % path.basename(__file__), property="package_name", value=compute_package_name, hideStepIf=True, doStepIf=should_package)) builder.addStep( Package(package_name=Property("package_name"), package_format=Property("package_archive_format"), make_target=Property("package_make_target"), package_directory=Property("package_directory", None), strip_binaries=Property("package_strip_binaries", None), env=compilation_environment, doStepIf=should_package)) source_path = Property("package_filename") target_path = Interpolate("%s%%(prop:package_filename)s" % snapshots_dir) target_url = Interpolate("%s%%(prop:package_filename)s" % snapshots_url) # This is not an ideal target link calculation since the archive format # in package_filename might be fixed up by the Package step, but here # only None is converted into tar.xz, which is not exactly the same target_link = Interpolate("%s%%(prop:buildername)s-latest." "%%(prop:package_archive_format:-tar.xz)s" % snapshots_dir) builder.addStep( CleaningFileUpload(name="publish", workersrc=source_path, masterdest=target_path, url=target_url, clean=True, doStepIf=should_package)) builder.addStep( steps.MasterShellCommand( name="update latest archive", command=["ln", "-sf", target_path, target_link], logEnviron=False, doStepIf=should_package)) builder.addStep( MasterCleanSnapshots( name="clean old snapshots", workdir=snapshots_dir, file_prefix=Interpolate("%(prop:buildername)s-"), num_to_keep=Property("num_snapshots_to_keep", snapshots_default_max), doStepIf=should_package)) return util.BuilderConfig(name=name, workername=worker_name, collapseRequests=True, factory=builder, nextBuild=pick_next_build, locks=[lock.access("exclusive")])
def getBuildPipeline(): debsClone = steps.Git(repourl="{{ source_deb_repo_url }}", branch=util.Property('branch'), alwaysUseLatest=True, mode="full", method="fresh", flunkOnFailure=True, haltOnFailure=True, name="Cloning deb packaging configs") debsVersion = steps.SetPropertyFromCommand( command="git rev-parse HEAD", property="deb_script_rev", flunkOnFailure=True, haltOnFailure=True, workdir="build", name="Get Debian script revision") removeSymlinks = common.shellCommand( command=['rm', '-rf', 'binaries', 'outputs'], alwaysRun=True, name="Prep cloned repo for CI use") debsFetch = common.syncAWS( pathFrom="s3://{{ s3_public_bucket }}/builds/{{ builds_fragment }}", pathTo= "binaries/%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/", name="Fetch build from S3") debsBuild = common.shellSequence(commands=[ common.shellArg(command=[ 'dch', '--changelog', 'opencast/debian/changelog', '--newversion', util.Interpolate( '%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s-%(prop:buildnumber)s-%(prop:short_revision)s' ), '-b', '-D', 'unstable', '-u', 'low', '--empty', util.Interpolate( 'Opencast revision %(prop:got_revision)s, packaged with Debian scripts version %(prop:deb_script_rev)s' ) ], logname='dch'), common.shellArg(command=[ 'rm', '-f', util.Interpolate( "binaries/%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s/revision.txt" ) ], logname='cleanup'), common.shellArg(command=util.Interpolate( 'echo "source library.sh\ndoOpencast %(prop:pkg_major_version)s.%(prop:pkg_minor_version)s %(prop:branch)s %(prop:got_revision)s" | tee build.sh' ), logname='write'), common.shellArg(command=util.Interpolate( 'ln -s opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s.orig.tar.xz opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.%(prop:pkg_minor_version)s-%(prop:buildnumber)s.orig.tar.xz' ), logname='link'), common.shellArg(command=['bash', 'build.sh'], logname='build'), common.shellArg(command=util.Interpolate( 'echo "Opencast version %(prop:got_revision)s packaged with version %(prop:deb_script_rev)s" | tee outputs/%(prop:oc_commit)s/revision.txt' ), logname='revision') ], env={ "NAME": "Buildbot", "EMAIL": "buildbot@{{ groups['master'][0] }}", "SIGNING_KEY": util.Interpolate( "%(prop:signing_key)s") }, name="Build debs") debRepoClone = steps.Git(repourl="{{ source_deb_packaging_repo_url }}", branch="{{ deb_packaging_repo_branch }}", alwaysUseLatest=True, mode="full", method="fresh", flunkOnFailure=True, haltOnFailure=True, name="Cloning deb repo configs") debRepoLoadKeys = common.shellCommand(command=['./build-keys'], name="Loading signing keys") debRepoCreate = common.shellCommand( command=[ './create-branch', util.Interpolate("%(prop:pkg_major_version)s.x") ], name=util.Interpolate( "Ensuring %(prop:pkg_major_version)s.x repos exist")) debRepoIngest = common.shellCommand( command=[ './include-binaries', util.Interpolate("%(prop:pkg_major_version)s.x"), util.Interpolate("%(prop:repo_component)s"), util.Interpolate( "outputs/%(prop:revision)s/opencast-%(prop:pkg_major_version)s_%(prop:pkg_major_version)s.x-%(prop:buildnumber)s-%(prop:short_revision)s_amd64.changes" ) ], name=util.Interpolate( f"Adding build to %(prop:pkg_major_version)s.x-%(prop:repo_component)s" )) debRepoPrune = common.shellCommand( command=[ './clean-unstable-repo', util.Interpolate("%(prop:pkg_major_version)s.x") ], name=util.Interpolate( f"Pruning %(prop:pkg_major_version)s.x unstable repository")) debRepoPublish = common.shellCommand( command=[ "./publish-branch", util.Interpolate("%(prop:pkg_major_version)s.x"), util.Interpolate("%(prop:signing_key)s") ], name=util.Interpolate("Publishing %(prop:pkg_major_version)s.x"), env={ "AWS_ACCESS_KEY_ID": util.Secret("s3.public_access_key"), "AWS_SECRET_ACCESS_KEY": util.Secret("s3.public_secret_key") }) f_package_debs = util.BuildFactory() f_package_debs.addStep(common.getPreflightChecks()) f_package_debs.addStep(debsClone) f_package_debs.addStep(debsVersion) f_package_debs.addStep(common.getLatestBuildRevision()) f_package_debs.addStep(common.getShortBuildRevision()) f_package_debs.addStep(removeSymlinks) f_package_debs.addStep(debsFetch) f_package_debs.addStep(common.loadSigningKey()) f_package_debs.addStep(debsBuild) f_package_debs.addStep(debRepoClone) f_package_debs.addStep(debRepoLoadKeys) f_package_debs.addStep(common.deployS3fsSecrets()) f_package_debs.addStep(common.mountS3fs()) f_package_debs.addStep(debRepoCreate) f_package_debs.addStep(debRepoIngest) f_package_debs.addStep(debRepoPrune) f_package_debs.addStep(debRepoPublish) f_package_debs.addStep(common.unloadSigningKey()) f_package_debs.addStep(common.unmountS3fs()) f_package_debs.addStep(common.cleanupS3Secrets()) f_package_debs.addStep(common.getClean()) return f_package_debs