def singleStageBuild(stage): stage += label(metadata={'MAINTAINER': 'Simeon Ehrig'}) stage += label(metadata={'EMAIL': '*****@*****.**'}) stage += label(metadata={'Version': str(container_version)}) stage += environment(variables={'GOL_VERSION': str(container_version)}) # copy example inside container stage += copy(src='notebook', dest='/') # copy and build the pnwriter library stage += copy(src='pngwriter', dest='/opt') stage += packages(ospackages=['libpng-dev']) cmake = CMakeBuild(prefix='/notebook/pngwriter') stage += shell(commands=[ cmake.configure_step(directory='/opt/pngwriter', opts=['-DBUILD_SHARED_LIBS=ON']), cmake.build_step(target='install'), 'rm -rf /opt/pngwriter' ]) # Copy notebook examples and pngwriter lib to the host's /tmp file system to obtain a writable file system. stage += runscript(commands=[ 'if [ ! -d /tmp/GOL-xeus-cling-cuda ]; then \n' ' mkdir /tmp/GOL-xeus-cling-cuda &&' ' cp -r /notebook/ /tmp/GOL-xeus-cling-cuda\n fi', 'cd /tmp/GOL-xeus-cling-cuda/notebook', 'jupyter-lab' ]) print(stage.__str__())
def build(): stage0 = Stage() config.set_container_format('singularity') stage0 += baseimage(image='ubuntu:18.04') stage0 += nvhpc( eula=True, mpi=True, ) stage0 += copy(src='heat_mpi.f90', dest='/var/tmp/heat_mpi.f90') stage0 += shell( commands=['mpif90 /var/tmp/heat_mpi.f90 -o /usr/local/bin/heat_mpi']) return stage0
def get_stage(container): # generate baseimage hpccm.config.set_container_format(container) # version >3.2 is necessary for multi-stage build if container == 'singularity': hpccm.config.set_singularity_version('3.3') Stage0 = hpccm.Stage() Stage0 += baseimage(image='ubuntu:bionic') # copy project from outside in the container if container == 'singularity': Stage0 += copy(src='../hello_world_tool', dest='/opt/') else: # docker: cannot copy files from outsite the build context # so, we need to move the build context one level up Stage0 += copy(src='./hello_world_tool', dest='/opt/hello_world_tool') # install compiler tools Stage0 += cmake(eula=True, version='3.14.5') Stage0 += packages(ospackages=['g++', 'make', 'wget', 'build-essential']) # build and install project cmb = CMakeBuild() cm = [] cm.append(cmb.configure_step(build_directory='/opt/build_hello_world_tool', directory='/opt/hello_world_tool/')) cm.append(cmb.build_step(target='install')) Stage0 += shell(commands=cm) Stage0 += shell(commands=build_openssl(name='openssl-1.1.1c', build_dir='/opt/openssl_build')) # script that runs when # - singularity uses the run parameter or the image runs directly # - docker uses the run parameter without arguments Stage0 += runscript(commands=['hello_world_tool']) return Stage0
def build(container_format='singularity', os_release='ubuntu', os_version='20.04'): config.set_container_format(container_format) image = f'{os_release}:{os_version}' stage0 = Stage(name='stage0') stage0 += baseimage(image=image, _bootstrap='docker') stage0 += environment(variables={ 'LC_ALL': 'en_AU.UTF-8', 'LANGUAGE': 'en_AU.UTF-8', }) stage0 += label(metadata={ 'maintainer': 'Luhan Cheng', 'email': '*****@*****.**' }) stage0 += shell(commands=[ 'rm -f /bin/sh && ln -s /bin/bash /bin/sh', 'rm -f /usr/bin/sh && ln -s /usr/bin/bash /usr/bin/sh', '/bin/bash', 'ln -s /usr/lib/x86_64-linux-gnu/libffi.so.7 /usr/lib/x86_64-linux-gnu/libffi.so.6' ]) stage0 += packages(apt=[ 'wget', 'git', 'software-properties-common', 'build-essential', 'locales', 'zlib1g-dev' ]) stage0 += shell(commands=['locale-gen en_AU.UTF-8']) stage0 += comment('Installing vglrun and TurboVNC') stage0 += packages(apt=[ 'ubuntu-desktop', 'vim', 'mesa-utils', 'python3-pip', 'python3-pyqt5', 'pyqt5-dev', 'python3-tk' ]) stage0 += shell(commands=[ 'wget https://swift.rc.nectar.org.au/v1/AUTH_810/CVL-Singularity-External-Files/turbovnc_2.2.5_amd64.deb && dpkg -i turbovnc_2.2.5_amd64.deb && rm turbovnc_2.2.5_amd64.deb', 'wget https://swift.rc.nectar.org.au/v1/AUTH_810/CVL-Singularity-External-Files/virtualgl_2.6.4_amd64.deb && dpkg -i virtualgl_2.6.4_amd64.deb && rm virtualgl_2.6.4_amd64.deb', 'apt update', 'apt -y upgrade' ]) stage0 += primitives.copy(src='./ChimeraX-1.2.5.tar.gz', dest='/opt/ChimeraX-1.2.5.tar.gz') stage0 += shell(commands=[ 'tar xfv /opt/ChimeraX-1.2.5.tar.gz -C /opt', 'rm /opt/ChimeraX-1.2.5.tar.gz' ]) stage0 += environment( variables=from_prefix('/opt/chimerax-1.2.5-rc-2021.05.24')) return stage0
def get_stage(container): # generate baseimage hpccm.config.set_container_format(container) # version >3.2 is necessary for multi-stage build if container == 'singularity': hpccm.config.set_singularity_version('3.3') Stage0 = hpccm.Stage() # the stages need "names" so that they can reference each other Stage0 += baseimage(image='ubuntu:bionic', _as='Stage0') # copy project from outside in the container if container == 'singularity': Stage0 += copy(src='../hello_world_tool', dest='/opt/') else: # docker: cannot copy files from outsite the build context # so, we need to move the build context one level up Stage0 += copy(src='./hello_world_tool', dest='/opt/hello_world_tool') # install compiler tools Stage0 += cmake(eula=True, version='3.14.5') Stage0 += packages(ospackages=['g++', 'make', 'wget', 'build-essential']) # build and install project cmb = CMakeBuild(prefix="/opt/hello_install/") cm = [] cm.append( cmb.configure_step(build_directory='/opt/build_hello_world_tool', directory='/opt/hello_world_tool/', opts=['-DCMAKE_INSTALL_RPATH=/usr/local/lib/'])) cm.append(cmb.build_step(target='install')) Stage0 += shell(commands=cm) Stage0 += shell(commands=build_openssl(name='openssl-1.1.1c', build_dir='/opt/openssl_build')) # add release stage Stage1 = hpccm.Stage() Stage1 += baseimage(image='ubuntu:bionic', _as='Stage1') Stage1 += copy(_from='Stage0', src='/opt/hello_install/', dest='/usr/local/') Stage1 += copy(_from='Stage0', src='/opt/openssl_install/', dest='/usr/local/') # the commands merge the bin, lib ect. folders of hello_install and openssl_install # in the /usr/local folder if container == "singularity": Stage1 += shell(commands=[ 'cp -rl /usr/local/hello_install/* /usr/local/', 'cp -rl /usr/local/openssl_install/* /usr/local/', 'rm -r /usr/local/hello_install/', 'rm -r /usr/local/openssl_install/' ]) # script that runs when # - singularity uses the run parameter or the image runs directly # - docker uses the run parameter without arguments Stage1 += runscript(commands=['hello_world_tool']) return [Stage0, Stage1]
]) Stage0 += shell(commands=[ "mkdir -p /opt/buildRelion", "mkdir -p /opt/relion-3.0.7", "cd /opt/buildRelion", "git clone -b 3.0.7 https://github.com/3dem/relion.git", "cd relion", "mkdir build", "cd build", "cmake -DCMAKE_INSTALL_PREFIX=/opt/relion-3.0.7 ..", "make -j 1", "make install", ]) # Adding MotionCor2 - local copy required, licensing prevents redistribution Stage0 += copy(src="MotionCor2_1.2.6.zip", dest='/opt/MotionCor2_1.2.6/MotionCor2_1.2.6.zip') Stage0 += shell(commands=[ "cd /opt/MotionCor2_1.2.6/", "unzip MotionCor2_1.2.6.zip", "rm MotionCor2_1.2.6.zip" ]) # Adding Gctf # Stage0 += copy(src="Gctf_v1.06_and_examples.tar.gz", dest="/opt/Gctf_v1.06_and_examples.tar.gz") # Stage0 += shell(commands=["cd /opt", # "tar -zxvf Gctf_v1.06_and_examples.tar.gz"]) Stage0 += shell(commands=[ "cd /opt", "wget http://www.mrc-lmb.cam.ac.uk/kzhang/Gctf/Gctf_v1.06_and_examples.tar.gz", "tar -zxvf Gctf_v1.06_and_examples.tar.gz" ])
def main(): parser = argparse.ArgumentParser( description= 'Simple script for generating a singularity recipe for the GOL example.' ) parser.add_argument( '--build_prefix', type=str, default='/tmp/GOL_example', help= 'Define the path in which all projects will be built (default: /tmp/GOL_example).' ) parser.add_argument('-v ', '--version', action='store_true', help='print version of the container') args = parser.parse_args() if args.version: print(container_version) sys.exit(0) hpccm.config.set_container_format('singularity') hpccm.config.set_singularity_version('3.3') stage = hpccm.Stage() stage += label(metadata={'GOL_MAINTAINER': 'Simeon Ehrig'}) stage += label(metadata={'GOL_EMAIL': '*****@*****.**'}) stage += label(metadata={'GOL_Version': str(container_version)}) # copy example inside container stage += copy(src='notebook', dest='/') stage += copy(src='jupyter_notebook_config.py', dest='/') # copy and build the pnwriter library stage += packages(ospackages=['libpng-dev']) png = [] png_git = git() png.append( png_git.clone_step( repository='https://github.com/pngwriter/pngwriter.git', branch='dev', path='/opt/')) png_cmake = CMakeBuild(prefix='/notebook/pngwriter') png.append( png_cmake.configure_step(directory='/opt/pngwriter', opts=['-DBUILD_SHARED_LIBS=ON'])) png.append(png_cmake.build_step(target='install')) png.append('rm -rf /opt/pngwriter') stage += shell(commands=png) # Copy notebook examples and pngwriter lib to the host's /tmp file system to obtain a writable file system. stage += runscript(commands=[ 'if [ ! -d /tmp/GOL-xeus-cling-cuda ]; then \n' ' mkdir /tmp/GOL-xeus-cling-cuda &&' ' cp -r /notebook/ /tmp/GOL-xeus-cling-cuda &&' ' ln -s /tmp/GOL-xeus-cling-cuda/notebook/pngwriter' ' /tmp/GOL-xeus-cling-cuda/notebook/GTC_presentations/simulation/ \n fi', 'cd /tmp/GOL-xeus-cling-cuda/notebook', 'jupyter-notebook --config=/jupyter_notebook_config.py' ]) # Add the bootstrap manually because hpccm does not support .sregistry, recipe = stage.__str__() recipe = 'Bootstrap: library\nFrom: sehrig/default/xeus-cling-cuda:2.3\n\n' + recipe print(recipe)
def build(container_format='singularity', os_release='ubuntu', os_version='20.04', cuda_version='11.0'): config.set_container_format(container_format) image = f'nvcr.io/nvidia/cuda:{cuda_version}-devel-{os_release}{os_version}' stage0 = Stage(name='stage0') stage0 += baseimage(image=image, _bootstrap='docker') stage0 += environment(variables={ 'LC_ALL': 'en_AU.UTF-8', 'LANGUAGE': 'en_AU.UTF-8', }) stage0 += label(metadata={ 'maintainer': 'Luhan Cheng', 'email': '*****@*****.**' }) stage0 += shell(commands=[ 'rm -f /bin/sh && ln -s /bin/bash /bin/sh', 'rm -f /usr/bin/sh && ln -s /usr/bin/bash /usr/bin/sh', '/bin/bash', ]) stage0 += environment(variables=from_prefix('/usr/local/cuda')) stage0 += packages(apt=[ 'wget', 'git', 'software-properties-common', 'build-essential', 'locales', 'zlib1g-dev' ]) stage0 += shell(commands=['locale-gen en_AU.UTF-8']) stage0 += comment('Installing vglrun and TurboVNC') stage0 += packages(apt=[ 'ubuntu-desktop', 'vim', 'mesa-utils', 'python3-pip', 'python3-pyqt5', 'pyqt5-dev', 'python3-tk' ]) stage0 += shell(commands=[ 'wget https://swift.rc.nectar.org.au/v1/AUTH_810/CVL-Singularity-External-Files/turbovnc_2.2.5_amd64.deb && dpkg -i turbovnc_2.2.5_amd64.deb && rm turbovnc_2.2.5_amd64.deb', 'wget https://swift.rc.nectar.org.au/v1/AUTH_810/CVL-Singularity-External-Files/virtualgl_2.6.4_amd64.deb && dpkg -i virtualgl_2.6.4_amd64.deb && rm virtualgl_2.6.4_amd64.deb', 'apt update', 'apt -y upgrade' ]) stage0 += comment('Installing pre-requisites') stage0 += primitives.copy( src="./ccp4-7.1.014-shelx-arpwarp-linux64.tar.gz", dest="/opt/ccp4-7.1.014-shelx-arpwarp-linux64.tar.gz") stage0 += shell(commands=[ 'cd /opt && tar -xf ccp4-7.1.014-shelx-arpwarp-linux64.tar.gz && rm ccp4-7.1.014-shelx-arpwarp-linux64.tar.gz', 'touch $HOME/.agree2ccp4v6', 'cd ccp4-7.1', './BINARY.setup', ]) stage0 += environment(variables=add_binary('/opt/ccp4-7.1/bin')) stage0 += comment('Installing CCP-EM') stage0 += primitives.copy(src="./ccpem-1.5.0-linux-x86_64.tar.gz", dest="/opt/ccpem-1.5.0-linux-x86_64.tar.gz") stage0 += primitives.copy(src="./input.txt", dest="/opt/input.txt") stage0 += shell(commands=[ 'touch $HOME/.agree2ccpemv1', 'cd /opt && tar -xf ccpem-1.5.0-linux-x86_64.tar.gz && rm ccpem-1.5.0-linux-x86_64.tar.gz', 'cd ccpem-1.5.0', './install_ccpem.sh', 'cat /opt/input.txt | bash install_modeller.sh' ]) return stage0
def build(container_format='singularity', os='ubuntu20.04', cuda_version='11.0', tensorflow_version='2.2.0', pytorch_version='1.7.1', pycuda_version='v2020.1'): image = f'nvcr.io/nvidia/cuda:{cuda_version}-devel-{os}' config.set_container_format(container_format) stage0 = Stage(name='stage0') stage0 += baseimage(image=image, _bootstrap='docker') stage0 += label(metadata={ 'maintainer': 'Luhan Cheng', 'email': '*****@*****.**' }) stage0 += packages( apt=['libxml2-dev', 'libxslt-dev', 'python3-pip', 'git']) stage0 += conda(eula=True, packages=[ 'python=3.7', f'tensorflow-gpu={tensorflow_version}', f'pytorch={pytorch_version}', 'torchvision', 'torchaudio' ], channels=['pytorch', 'anaconda', 'conda-forge']) stage0 += generic_build( repository='https://github.com/inducer/pycuda.git', branch=pycuda_version, build=[f'./configure.py --cuda-root=/usr/local/cuda-{cuda_version}'], ) stage0 += environment(variables=from_prefix('/usr/local/pycuda')) version = "7.2.1.6" os = "Ubuntu-18.04" arch = 'x86_64' cuda = "cuda-11.0" cudnn = "cudnn8.0" filename = f'TensorRT-{version}.{os}.{arch}-gnu.{cuda}.{cudnn}.tar.gz' TENSORRT_ROOT = '/usr/local/tensorrt' stage0 += copy(src=f'{filename}', dest=f'/{filename}') stage0 += shell(commands=[ f'mkdir -p {TENSORRT_ROOT}', f'tar -xf /TensorRT-7.2.1.6.Ubuntu-18.04.x86_64-gnu.cuda-11.0.cudnn8.0.tar.gz --strip-components 1 -C {TENSORRT_ROOT}', 'rm -rf /TensorRT-7.2.1.6.Ubuntu-18.04.x86_64-gnu.cuda-11.0.cudnn8.0.tar.gz' ]) stage0 += environment( variables={ 'LD_LIBRARY_PATH': f'$LD_LIBRARY_PATH:{TENSORRT_ROOT}/lib/', 'PATH': f'$PATH:{TENSORRT_ROOT}/bin/', 'C_INCLUDE_PATH': f'$C_INCLUDE_PATH:{TENSORRT_ROOT}/include/', **from_prefix('/usr/local/cuda'), **from_prefix('/usr/local/anaconda') }) stage0 += pip(packages=[ f'{TENSORRT_ROOT}/python/tensorrt-7.2.1.6-cp37-none-linux_x86_64.whl', f'{TENSORRT_ROOT}/uff/uff-0.6.9-py2.py3-none-any.whl', f'{TENSORRT_ROOT}/graphsurgeon/graphsurgeon-0.4.5-py2.py3-none-any.whl', f'{TENSORRT_ROOT}/onnx_graphsurgeon/onnx_graphsurgeon-0.2.6-py2.py3-none-any.whl' ], pip='pip3') cudnn_src = 'cudnn-11.0-linux-x64-v8.0.5.39.tgz' stage0 += copy(src=cudnn_src, dest=f'/{cudnn_src}') stage0 += shell(commands=[ 'mkdir -p /cudnn', f'tar -xf /{cudnn_src} -C /cudnn --strip-components 1', 'cp /cudnn/include/cudnn*.h /usr/local/cuda/include', 'cp /cudnn/lib64/libcudnn* /usr/local/cuda/lib64', 'chmod a+r /usr/local/cuda/include/cudnn*.h /usr/local/cuda/lib64/libcudnn*', 'rm -rf /cudnn /cudnn-11.0-linux-x64-v8.0.5.39.tgz' ]) return stage0
def main(): parser = argparse.ArgumentParser( description= 'Simple script for generating a singularity recipe for the GOL exercise.' ) parser.add_argument( '--build_prefix', type=str, default='/tmp/GOL_example', help= 'Define the path in which all projects will be built (default: /tmp/GOL_example).' ) parser.add_argument('-j', type=str, help='number of build threads for make (default: -j)') parser.add_argument( '-l', type=str, help='number of linker threads for the cling build (default: -j)') parser.add_argument('-v ', '--version', action='store_true', help='print version of the container') args = parser.parse_args() if args.version: print(container_version) sys.exit(0) if args.j: threads = int(args.j) if threads < 1: raise ValueError('-j have to be greater than 0') else: threads = None if args.l: linker_threads = int(args.l) if linker_threads < 1: raise ValueError('-l have to be greater than 0') else: linker_threads = None xcc_gen = gn.XCC_gen(build_prefix=args.build_prefix, threads=threads, linker_threads=linker_threads) stage = xcc_gen.gen_release_single_stage() stage += label(metadata={'EXAMPLE_CONTAINER_MAINTAINER': 'Simeon Ehrig'}) stage += label(metadata={'EXAMPLE_CONTAINER_EMAIL': '*****@*****.**'}) stage += label( metadata={'EXAMPLE_CONTAINER_Version': str(container_version)}) # disable the xsrf check, which avoid some problems in Firefox stage += copy(src='jupyter_notebook_config.py', dest='/') # copy and build the pnwriter library stage += packages(ospackages=['libpng-dev']) png_git = git() stage += png_git.clone_step( repository='https://github.com/pngwriter/pngwriter.git', branch='dev', path='/opt/') png_cmake = CMakeBuild() stage += shell(commands=[ png_cmake.configure_step(directory='/opt/pngwriter', opts=['-DBUILD_SHARED_LIBS=ON']), png_cmake.build_step(target='install') ]) # copy and install jitify jitify_git = git() stage += jitify_git.clone_step( repository='https://github.com/NVIDIA/jitify.git', path='/opt/') png_cmake = CMakeBuild() stage += shell(commands=['cp /opt/jitify/jitify.hpp /usr/local/include']) stage += shell(commands=['rm -rf /opt/pngwriter', 'rm -rf /opt/jitify']) # check if the path to the notebook is specified, otherwise use the current directory stage += runscript(commands=[ 'if [ $# -gt 0 ]', 'then', 'cd $1', 'fi', 'jupyter-notebook --config=/jupyter_notebook_config.py' ]) print(stage.__str__())
def main(): # pragma: no cover cli = Cli_Args() args = cli.parse_args() if args.deploy != '': args.build = True args.convert = True cwd = os.getcwd() c = list( itertools.product(args.format, args.ogs, args.pm, args.ompi, args.cmake_args)) if not args.print and not args.cleanup: print('Creating {} image definition(s)...'.format(len(c))) for build in c: __format = build[0] ogs_version = build[1] ogscm.config.set_package_manager(build[2]) ompi = build[3] cmake_args = build[4].strip().split(' ') # args checking if len(c) > 1 and args.file != '': print( '--file can only be used when generating a single image definition' ) quit(1) if (len(c) > 1 and args.sif_file != '') or (args.sif_file != '' and args.convert == False): print('--sif_file can only be used when generating a single image ' 'definition and --convert is given') quit(1) if (ogs_version == 'off' or ogs_version == 'clean') and len(cmake_args) > 0 and cmake_args[0] != '': cmake_args = [] print('--cmake_args cannot be used with --ogs off! Ignoring!') if __format == 'singularity': if args.runtime_only: args.runtime_only = False print( '--runtime-only cannot be used with --format singularity! ' 'Ignoring!') if args.upload: print('--upload cannot be used with --format singularity! ' 'Ignoring!') if args.convert: print('--convert cannot be used with --format singularity! ' 'Ignoring!') info = container_info(build, args) if args.cleanup: info.cleanup() exit(0) info.make_dirs() if ompi != 'off': if args.base_image == 'ubuntu:20.04': args.base_image = 'centos:8' print( 'Setting base_image to \'centos:8\'. OpenMPI is supported on CentOS only.' ) # Create definition hpccm.config.set_container_format(__format) # ------------------------------ recipe ------------------------------- Stage0 = hpccm.Stage() Stage0 += raw(docker='# syntax=docker/dockerfile:experimental') if args.runtime_only: Stage0.name = 'build' Stage0 += baseimage(image=args.base_image, _as='build') Stage0 += comment(f"Generated with ogs-container-maker {__version__}", reformat=False) Stage0 += packages(ospackages=['wget', 'tar', 'curl', 'make']) # base compiler if args.compiler != 'off': if args.compiler_version == '': if args.compiler == 'clang': args.compiler_version = '8' else: if hpccm.config.g_linux_distro == linux_distro.CENTOS: args.compiler_version = '10' # required for std::filesystem else: args.compiler_version = None # Use default if args.compiler == 'clang': compiler = llvm(extra_repository=True, extra_tools=True, version=args.compiler_version) else: compiler = gnu(fortran=False, extra_repository=True, version=args.compiler_version) toolchain = compiler.toolchain Stage0 += compiler # Upgrade stdc++ lib after installing new compiler # https://stackoverflow.com/a/46613656/80480 if args.compiler == 'gcc' and args.compiler_version is not None: Stage0 += packages(apt=['libstdc++6']) # Prepare runtime stage Stage1 = hpccm.Stage() Stage1.baseimage(image=args.base_image) # Install scif in all stages Stage0 += pip(packages=['scif'], pip='pip3') Stage1 += pip(packages=['scif'], pip='pip3') if ompi != 'off': mpicc = object if False: # eve: # Stage0 += ofed() OR mlnx_ofed(); is installed later on from debian archive # Stage0 += knem() Stage0 += ucx(version='1.5.1', cuda=False) # knem='/usr/local/knem' Stage0 += packages(ospackages=['libpmi2-0-dev' ]) # req. for --with-pmi # req. for --with-psm2 Stage0 += packages(ospackages=['libnuma1']) psm_deb_url = 'http://snapshot.debian.org/archive/debian/20181231T220010Z/pool/main' psm2_version = '11.2.68-4' Stage0 += shell(commands=[ 'cd /tmp', f'wget -nv {psm_deb_url}/libp/libpsm2/libpsm2-2_{psm2_version}_amd64.deb', f'wget -nv {psm_deb_url}/libp/libpsm2/libpsm2-dev_{psm2_version}_amd64.deb', 'dpkg --install *.deb' ]) # libibverbs # Available versions: http://snapshot.debian.org/binary/ibacm/ # ibverbs_version = '21.0-1' # works on eve, eve has 17.2-3 installed nut this version is not available in snapshot.debian ib_deb_url = 'http://snapshot.debian.org/archive/debian/20180430T215634Z/pool/main' ibverbs_version = '17.1-2' ibverbs_packages = [ 'ibacm', 'ibverbs-providers', 'ibverbs-utils', 'libibumad-dev', 'libibumad3', 'libibverbs-dev', 'libibverbs1', 'librdmacm-dev', 'librdmacm1', 'rdma-core', 'rdmacm-utils' ] ibverbs_cmds = ['cd /tmp'] for package in ibverbs_packages: ibverbs_cmds.extend([ f'wget -nv {ib_deb_url}/r/rdma-core/{package}_{ibverbs_version}_amd64.deb' ]) ibverbs_cmds.append('dpkg --install *.deb') Stage0 += packages(ospackages=[ 'libnl-3-200', 'libnl-route-3-200', 'libnl-route-3-dev', 'udev', 'perl' ]) Stage0 += shell(commands=ibverbs_cmds) mpicc = openmpi( version=ompi, cuda=False, toolchain=toolchain, ldconfig=True, ucx='/usr/local/ucx', configure_opts=[ '--disable-getpwuid', '--sysconfdir=/mnt/0', '--with-slurm', # used on taurus '--with-pmi=/usr/include/slurm-wlm', 'CPPFLAGS=\'-I /usr/include/slurm-wlm\'', '--with-pmi-libdir=/usr/lib/x86_64-linux-gnu', # '--with-pmix', '--with-psm2', '--disable-pty-support', '--enable-mca-no-build=btl-openib,plm-slurm', # eve: '--with-sge', '--enable-mpirun-prefix-by-default', '--enable-orterun-prefix-by-default', ]) else: ucx_version = '1.8.1' Stage0 += ucx(version=ucx_version, cuda=False) Stage0 += slurm_pmi2(version='17.02.11') pmix_version = True if version.parse(ompi) >= version.parse('4'): Stage0 += pmix(version='3.1.5') pmix_version = '/usr/local/pmix' mpicc = openmpi(version=ompi, cuda=False, infiniband=False, pmi='/usr/local/slurm-pmi2', pmix=pmix_version, ucx='/usr/local/ucx') toolchain = mpicc.toolchain Stage0 += mpicc # OpenMPI expects this program to exist, even if it's not used. # Default is "ssh : rsh", but that's not installed. Stage0 += shell(commands=[ 'mkdir /mnt/0', "echo 'plm_rsh_agent = false' >> /mnt/0/openmpi-mca-params.conf" ]) Stage0 += label( metadata={ 'org.opengeosys.mpi': 'openmpi', 'org.opengeosys.mpi.version': ompi }) if args.mpi_benchmarks: # osu_app = scif(name='osu', file=f"{info.out_dir}/osu.scif") Stage0 += osu_benchmarks(toolchain=toolchain) Stage0 += shell(commands=[ 'mkdir -p /usr/local/mpi-examples', 'cd /usr/local/mpi-examples', 'curl -O https://raw.githubusercontent.com/hpc/charliecloud/674b3b4e4ad243be5565f200d8f5fb92b7544480/examples/mpihello/hello.c', 'curl -O https://computing.llnl.gov/tutorials/mpi/samples/C/mpi_bandwidth.c', 'curl -O https://raw.githubusercontent.com/mpitutorial/mpitutorial/gh-pages/tutorials/mpi-send-and-receive/code/ring.c', 'mpicc -o /usr/local/bin/mpi-hello /usr/local/mpi-examples/hello.c', 'mpicc -o /usr/local/bin/mpi-ring /usr/local/mpi-examples/ring.c', 'mpicc -o /usr/local/bin/mpi-bandwidth /usr/local/mpi-examples/mpi_bandwidth.c', ]) Stage1 += copy(_from='build', src='/usr/local/bin/mpi-*', dest='/usr/local/bin/') # Stage0 += mlnx_ofed() if ogs_version != 'clean': Stage0 += ogs_base() if args.gui: Stage0 += packages(apt=[ 'mesa-common-dev', 'libgl1-mesa-dev', 'libglu1-mesa-dev', 'libxt-dev' ], yum=[ 'mesa-libOSMesa-devel', 'mesa-libGL-devel', 'mesa-libGLU-devel', 'libXt-devel' ]) Stage1 += packages( apt=[ 'libosmesa6', 'libgl1-mesa-glx', 'libglu1-mesa', 'libxt6', 'libopengl0' ], yum=['mesa-libOSMesa', 'mesa-libGL', 'mesa-libGLU', 'libXt']) if ogs_version != 'clean': if ogscm.config.g_package_manager == package_manager.CONAN: Stage0 += cmake(eula=True, version='3.16.6') conan_user_home = '/opt/conan' if args.dev: conan_user_home = '' Stage0 += pm_conan(user_home=conan_user_home) Stage0 += environment(variables={'CONAN_SYSREQUIRES_SUDO': 0}) elif ogscm.config.g_package_manager == package_manager.SYSTEM: Stage0 += cmake(eula=True, version='3.16.6') Stage0 += boost(version='1.66.0', bootstrap_opts=['headers']) Stage0 += environment( variables={'BOOST_ROOT': '/usr/local/boost'}) vtk_cmake_args = [ '-DModule_vtkIOXML=ON', '-DModule_vtkIOXdmf3=ON', '-DVTK_Group_Rendering=OFF', '-DVTK_Group_StandAlone=OFF' ] if args.gui: Stage0 += packages(apt=[ 'libgeotiff-dev', 'libshp-dev', 'libnetcdf-c++4-dev', 'libqt5x11extras5-dev', 'libqt5xmlpatterns5-dev', 'qt5-default' ], yum=[ 'libgeotiff-devel', 'shapelib-devel', 'netcdf-devel', 'qt5-qtbase-devel', 'qt5-qtxmlpatterns-devel', 'qt5-qtx11extras-devel' ]) Stage1 += packages(apt=[ 'geotiff-bin', 'shapelib', 'libnetcdf-c++4', 'libqt5x11extras5', 'libqt5xmlpatterns5', 'qt5-default' ], yum=[ 'libgeotiff', 'shapelib', 'netcdf', 'qt5-qtbase', 'qt5-qtxmlpatterns', 'qt5-qtx11extras' ]) vtk_cmake_args = [ '-DModule_vtkIOXdmf3=ON', '-DVTK_BUILD_QT_DESIGNER_PLUGIN=OFF', '-DVTK_Group_Qt=ON', '-DVTK_QT_VERSION=5' ] if hpccm.config.g_linux_distro == linux_distro.CENTOS: # otherwise linker error, maybe due to gcc 10? vtk_cmake_args.extend([ '-DBUILD_SHARED_LIBS=OFF', '-DCMAKE_POSITION_INDEPENDENT_CODE=ON' ]) if args.insitu: if args.gui: print('--gui can not be used with --insitu!') exit(1) Stage0 += paraview(cmake_args=['-DPARAVIEW_USE_PYTHON=ON'], edition='CATALYST', ldconfig=True, toolchain=toolchain, version="v5.8.1") else: if toolchain.CC == 'mpicc': vtk_cmake_args.extend([ '-D Module_vtkIOParallelXML=ON', '-D Module_vtkParallelMPI=ON' ]) Stage0 += generic_cmake( cmake_opts=vtk_cmake_args, devel_environment={'VTK_ROOT': '/usr/local/vtk'}, directory='VTK-8.2.0', ldconfig=True, prefix='/usr/local/vtk', toolchain=toolchain, url= 'https://www.vtk.org/files/release/8.2/VTK-8.2.0.tar.gz' ) if ompi != 'off': Stage0 += packages(yum=['diffutils']) Stage0 += generic_autotools( configure_opts=[ f'CC={toolchain.CC}', f'CXX={toolchain.CXX}', '--CFLAGS=\'-O3\'', '--CXXFLAGS=\'-O3\'', '--FFLAGS=\'-O3\'', '--with-debugging=no', '--with-fc=0', '--download-f2cblaslapack=1' ], devel_environment={'PETSC_DIR': '/usr/local/petsc'}, directory='petsc-3.11.3', ldconfig=True, preconfigure=[ "sed -i -- 's/python/python3/g' configure" ], prefix='/usr/local/petsc', toolchain=toolchain, url='http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/' 'petsc-lite-3.11.3.tar.gz') Stage0 += generic_cmake( devel_environment={ 'Eigen3_ROOT': '/usr/local/eigen', 'Eigen3_DIR': '/usr/local/eigen' }, directory='eigen-3.3.7', prefix='/usr/local/eigen', url= 'https://gitlab.com/libeigen/eigen/-/archive/3.3.7/eigen-3.3.7.tar.gz' ) if args.cvode: Stage0 += generic_cmake( cmake_opts=[ '-D EXAMPLES_INSTALL=OFF', '-D BUILD_SHARED_LIBS=OFF', '-D CMAKE_POSITION_INDEPENDENT_CODE=ON' ], devel_environment={'CVODE_ROOT': '/usr/local/cvode'}, directory='cvode-2.8.2', prefix='/usr/local/cvode', url='https://github.com/ufz/cvode/archive/2.8.2.tar.gz') if args.cppcheck: Stage0 += generic_cmake( devel_environment={'PATH': '/usr/local/cppcheck/bin:$PATH'}, directory='cppcheck-809a769c690d8ab6fef293e41a29c8490512866e', prefix='/usr/local/cppcheck', runtime_environment={'PATH': '/usr/local/cppcheck/bin:$PATH'}, url= 'https://github.com/danmar/cppcheck/archive/809a769c690d8ab6fef293e41a29c8490512866e.tar.gz' ) if args.iwyy and args.compiler == 'clang': Stage0 += packages(ospackages=[ 'libncurses5-dev', 'zlib1g-dev', f"llvm-{args.compiler_version}-dev", f"libclang-{args.compiler_version}-dev" ]) Stage0 += generic_cmake( cmake_opts=[ f"-D IWYU_LLVM_ROOT_PATH=/usr/lib/llvm-{args.compiler_version}" ], devel_environment={'PATH': '/usr/local/iwyy/bin:$PATH'}, directory= f"include-what-you-use-clang_{args.compiler_version}.0", prefix='/usr/local/iwyy', runtime_environment={'PATH': '/usr/local/iwyy/bin:$PATH'}, url="https://github.com/include-what-you-use/include-what-" f"you-use/archive/clang_{args.compiler_version}.0.tar.gz") if args.docs: Stage0 += packages( ospackages=['doxygen', 'graphviz', 'texlive-base']) if args.gcovr: Stage0 += pip(pip='pip3', packages=['gcovr']) if args.dev: Stage0 += packages(ospackages=[ 'neovim', 'gdb', 'silversearcher-ag', 'ssh-client', 'less' ]) if args.pip: Stage0 += pip(packages=args.pip, pip='pip3') Stage1 += pip(packages=args.pip, pip='pip3') if args.packages: Stage0 += packages(ospackages=args.packages) if args.tfel: Stage0 += generic_cmake( directory='tfel-TFEL-3.3.0', ldconfig=True, url='https://github.com/thelfer/tfel/archive/TFEL-3.3.0.tar.gz', prefix='/usr/local/tfel') Stage0 += environment(variables={'TFELHOME': '/usr/local/tfel'}) definition_file_path = os.path.join(info.out_dir, info.definition_file) if args.ccache: Stage0 += ccache(cache_size='15G') if ogs_version != 'off' and ogs_version != 'clean': mount_args = '' if args.ccache: mount_args = f'{mount_args} --mount=type=cache,target=/opt/ccache,id=ccache' if args.cvode: cmake_args.append('-DOGS_USE_CVODE=ON') if args.gui: cmake_args.append('-DOGS_BUILD_GUI=ON') if args.insitu: cmake_args.append('-DOGS_INSITU=ON') Stage0 += raw(docker=f"ARG OGS_COMMIT_HASH={info.commit_hash}") scif_file = f"{info.out_dir}/ogs.scif" if info.ogsdir: context_path_size = len(ogs_version) print(f"chdir to {ogs_version}") os.chdir(ogs_version) mount_args = f'{mount_args} --mount=type=bind,target=/scif/apps/ogs/src,rw' scif_file = f"{info.out_dir[context_path_size+1:]}/ogs.scif" definition_file_path = f"{info.out_dir[context_path_size+1:]}/{info.definition_file}" ogs_app = scif(_arguments=mount_args, name='ogs', file=scif_file) ogs_app += ogs(repo=info.repo, branch=info.branch, commit=info.commit_hash, git_version=info.git_version, toolchain=toolchain, prefix='/scif/apps/ogs', cmake_args=cmake_args, parallel=args.parallel, remove_build=True, remove_source=True) Stage0 += ogs_app stages_string = str(Stage0) if args.runtime_only: Stage1 += Stage0.runtime(exclude=['boost']) if args.compiler == 'gcc' and args.compiler_version != None: Stage1 += packages(apt=['libstdc++6']) stages_string += "\n\n" + str(Stage1) # ---------------------------- recipe end ----------------------------- with open(definition_file_path, 'w') as f: print(stages_string, file=f) if args.print: print(stages_string) else: print( f'Created definition {os.path.abspath(definition_file_path)}') # Create image if not args.build: continue if __format == 'singularity': subprocess.run( f"sudo `which singularity` build --force {info.images_out_dir}/{info.img_file}.sif" f"{definition_file_path}", shell=True) subprocess.run( f"sudo chown $USER:$USER {info.images_out_dir}/{info.img_file}.sif", shell=True) # TODO: adapt this to else continue build_cmd = (f"DOCKER_BUILDKIT=1 docker build {args.build_args} " f"-t {info.tag} -f {definition_file_path} .") print(f"Running: {build_cmd}") subprocess.run(build_cmd, shell=True) inspect_out = subprocess.check_output( f"docker inspect {info.tag} | grep Id", shell=True).decode(sys.stdout.encoding) image_id = re.search('sha256:(\w*)', inspect_out).group(1) image_id_short = image_id[0:12] if args.upload: subprocess.run(f"docker push {info.tag}", shell=True) if args.sif_file: image_file = f'{info.images_out_dir}/{args.sif_file}' else: image_file = f'{info.images_out_dir}/{info.img_file}-{image_id_short}.sif' if args.convert and not os.path.exists(image_file): subprocess.run( f"cd {cwd} && singularity build --force {image_file} docker-daemon:{info.tag}", shell=True) # Deploy image if not args.deploy: continue deploy_config_filename = f'{cwd}/config/deploy_hosts.yml' if not os.path.isfile(deploy_config_filename): print( f'ERROR: {deploy_config_filename} not found but required for deploying!' ) exit(1) with open(deploy_config_filename, 'r') as ymlfile: deploy_config = yaml.load(ymlfile, Loader=yaml.FullLoader) if not args.deploy == 'ALL' and not args.deploy in deploy_config: print(f'ERROR: Deploy host "{args.deploy}" not found in config!') exit(1) deploy_hosts = {} if args.deploy == 'ALL': deploy_hosts = deploy_config else: deploy_hosts[args.deploy] = deploy_config[args.deploy] for deploy_host in deploy_hosts: deploy_info = deploy_hosts[deploy_host] print(f'Deploying to {deploy_info} ...') proxy_cmd = '' user_cmd = '' if 'user' in deploy_info: user_cmd = f"{deploy_info['user']}@" if 'proxy' in deploy_info: proxy_cmd = f"-e 'ssh -A -J {user_cmd}{deploy_info['proxy']}'" print(proxy_cmd) print( subprocess.check_output( f"rsync -c -v {proxy_cmd} {image_file} {user_cmd}{deploy_info['host']}:{deploy_info['dest_dir']}", shell=True).decode(sys.stdout.encoding))