def get_compiler(args, compiler_build_stage: hpccm.Stage = None) -> bb_base: # Compiler if args.icc is not None: raise RuntimeError('Intel compiler toolchain recipe not implemented yet') if args.llvm is not None: # Build our own version instead to get TSAN + OMP if args.tsan is not None: if compiler_build_stage is not None: compiler = compiler_build_stage.runtime(_from='tsan') else: raise RuntimeError('No TSAN compiler build stage!') # Build the default compiler if we don't need special support else: compiler = hpccm.building_blocks.llvm(extra_repository=True, version=args.llvm) elif args.oneapi is not None: if compiler_build_stage is not None: compiler = compiler_build_stage.runtime(_from='oneapi') # Prepare the toolchain (needed only for builds done within the Dockerfile, e.g. # OpenMPI builds, which don't currently work for other reasons) oneapi_toolchain = hpccm.toolchain(CC='/opt/intel/oneapi/compiler/latest/linux/bin/intel64/icc', CXX='/opt/intel/oneapi/compiler/latest/linux/bin/intel64/icpc') setattr(compiler, 'toolchain', oneapi_toolchain) else: raise RuntimeError('No oneAPI compiler build stage!') elif args.gcc is not None: compiler = hpccm.building_blocks.gnu(extra_repository=True, version=args.gcc, fortran=False) else: raise RuntimeError('Logic error: no compiler toolchain selected.') return compiler
def get_compiler(args, compiler_build_stage: hpccm.Stage = None) -> bb_base: # Compiler if args.llvm is not None: # Build our own version instead to get TSAN + OMP if args.tsan is not None: if compiler_build_stage is not None: compiler = compiler_build_stage.runtime(_from='tsan') else: raise RuntimeError('No TSAN compiler build stage!') # Build the default compiler if we don't need special support else: # Always use the "upstream" llvm repositories because the # main ubuntu repositories stop adding support for new # llvm versions after a few llvm releases. compiler = hpccm.building_blocks.llvm(version=args.llvm, upstream=True) elif args.oneapi is not None: if compiler_build_stage is not None: compiler = compiler_build_stage.runtime(_from='oneapi') # Prepare the toolchain (needed only for builds done within the Dockerfile, e.g. # OpenMPI builds, which don't currently work for other reasons) oneapi_toolchain = hpccm.toolchain( CC= f'/opt/intel/oneapi/compiler/{args.oneapi}/linux/bin/intel64/icx', CXX= f'/opt/intel/oneapi/compiler/{args.oneapi}/linux/bin/intel64/icpx' ) setattr(compiler, 'toolchain', oneapi_toolchain) else: raise RuntimeError('No oneAPI compiler build stage!') elif args.gcc is not None: if args.cp2k is not None: compiler = hpccm.building_blocks.gnu(extra_repository=True, version=args.gcc, fortran=True) else: compiler = hpccm.building_blocks.gnu(extra_repository=True, version=args.gcc, fortran=False) else: raise RuntimeError('Logic error: no compiler toolchain selected.') return compiler
def get_compiler(args, tsan_stage: hpccm.Stage = None) -> bb_base: # Compiler if args.icc is not None: raise RuntimeError('Intel compiler toolchain recipe not implemented yet') if args.llvm is not None: # Build our own version instead to get TSAN + OMP if args.tsan is not None: if tsan_stage is not None: compiler = tsan_stage.runtime(_from='tsan') else: raise RuntimeError('No TSAN stage!') # Build the default compiler if we don't need special support else: compiler = hpccm.building_blocks.llvm(extra_repository=True, version=args.llvm) elif (args.gcc is not None): compiler = hpccm.building_blocks.gnu(extra_repository=True, version=args.gcc, fortran=False) else: raise RuntimeError('Logic error: no compiler toolchain selected.') return compiler