def generate(namespace): """Run `neurodocker generate`.""" specs = utils._namespace_to_specs(namespace) df = Dockerfile(specs) if not namespace.no_print_df: print(df.cmd) if namespace.output: df.save(filepath=namespace.output)
def test_build_image_miniconda_latest_shellscript_centos7(self): """Install latest version of Miniconda via ContinuumIO's installer script on CentOS 7. """ specs = {'pkg_manager': 'yum', 'check_urls': True, 'instructions': [ ('base', 'centos:7'), ('user', 'neuro'), ('miniconda', { 'env_name': 'default', 'conda_install': ['python=3.5.1', 'traits'], 'pip_install': ['https://github.com/nipy/nipype/archive/master.tar.gz'], }), ('miniconda', { 'env_name': 'default', 'pip_install': ['pylsl'], }) ]} df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['miniconda_centos7'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_miniconda.sh" DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_petpvc_120b_binaries_xenial(self): """Install PETPVC binaries on Ubuntu Xenial.""" specs = { 'pkg_manager': 'apt', 'check_urls': True, 'instructions': [ ('base', 'ubuntu:xenial'), ('petpvc', { 'version': '1.2.0-b', 'use_binaries': True }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['petpvc_xenial'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_petpvc.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_dcm2niix_master_source_centos7(self): """Install dcm2niix from source on CentOS 7.""" specs = { 'pkg_manager': 'yum', 'check_urls': True, 'instructions': [ ('base', 'centos:7'), ('dcm2niix', { 'version': 'master' }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'dcm2niix-master_centos7'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_dcm2niix.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_fsl_5010_binaries_centos7(self): """Install FSL binaries on CentOS 7.""" specs = { 'pkg_manager': 'yum', 'check_urls': True, 'instructions': [('base', 'centos:7'), ('fsl', { 'version': '5.0.10', 'use_binaries': True, 'eddy_5011': True })] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'fsl-5.0.10_centos7'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_fsl.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_mrtrix3_binaries_centos7(self): """Install MRtrix3 binaries on CentOS 7.""" specs = { 'pkg_manager': 'yum', 'check_urls': True, 'instructions': [ ('base', 'centos:7'), ('mrtrix3', { 'use_binaries': True }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'mrtrix3_centos7'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_mrtrix.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_convert3d_100_binaries_zesty(self): """Install Convert3D binaries on Ubuntu Zesty.""" specs = { 'pkg_manager': 'apt', 'check_urls': True, 'instructions': [ ('base', 'ubuntu:zesty'), ('c3d', { 'version': '1.0.0' }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'convert3d_zesty'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_convert3d.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_afni_latest_binaries_stretch(self): """Install latest AFNI binaries on Debian stretch.""" specs = { 'pkg_manager': 'apt', 'check_urls': False, 'instructions': [ ('base', 'debian:stretch'), ('afni', { 'version': 'latest', 'use_binaries': True }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'afni-latest_stretch'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_afni.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_freesurfer_600_min_binaries_xenial(self): """Install minimized FreeSurfer binaries on Ubuntu Xenial.""" specs = { 'pkg_manager': 'apt', 'check_urls': True, 'instructions': [ ('base', 'ubuntu:xenial'), ('freesurfer', { 'version': '6.0.0', 'use_binaries': True, 'min': True }), ('user', 'neuro'), ] } df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING[ 'freesurfer-min_zesty'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_freesurfer.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test___init__(self): with pytest.raises(TypeError): DockerImage(dict()) specs = {'pkg_manager': 'apt', 'instructions': [ ('base', 'debian:jessie',)], } df = Dockerfile(specs=specs) # Test that fileobj is a file object. image = DockerImage(df) assert image.fileobj.read()
def test_build_image_spm_12_standalone_zesty(self): """Install standalone SPM12 and MATLAB MCR R2017a.""" specs = {'pkg_manager': 'apt', 'check_urls': True, 'instructions': [ ('base', 'ubuntu:zesty'), ('spm', {'version': '12', 'matlab_version': 'R2017a'}), ('user', 'neuro'), ]} df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['spm-12_zesty'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_spm.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
def test_build_image_neurodebian_dcm2niix_xenial(self): """Install NeuroDebian on Ubuntu 16.04.""" specs = {'pkg_manager': 'apt', 'check_urls': False, 'instructions': [ ('base', 'ubuntu:16.04'), ('neurodebian', {'os_codename': 'stretch', 'download_server': 'usa-nh', 'full': True, 'pkgs': ['dcm2niix']}), ('user', 'neuro'), ]} df = Dockerfile(specs).cmd dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['neurodebian_stretch'] image, push = utils.get_image_from_memory(df, dbx_path, image_name) cmd = "bash /testscripts/test_neurodebian.sh" assert DockerContainer(image).run(cmd, volumes=utils.volumes) if push: utils.push_image(image_name)
'conda_install': 'python=3.6 numpy scipy pandas jupyter scikit-learn matplotlib pytest scikit-image', 'pip_install': 'nipype nibabel nilearn nitime nipy' }), ('ants', { 'version': '2.2.0' }), ('freesurfer', { 'version': '6.0.0', 'license_path': 'license.txt', 'min': True }), ('fsl', { 'version': '5.0.10', 'use_binaries': True }), ('neurodebian', { 'os_codename': 'zesty', 'download_server': 'usa-nh', 'pkgs': ['afni', 'dcm2niix'] }), ('spm', { 'version': '12', 'matlab_version': 'R2017a' })] } return specs if __name__ == '__main__': df = Dockerfile(set_specs()) df.save('Dockerfile') print(df)
# download data and code ('instruction', 'RUN mkdir /data \\\n' f' && curl -sSL --retry 5 {data} | tar zx -C /opt \\\n' f' && curl -sSL --retry 5 {code} | tar zx -C /opt'), ('entrypoint', '/opt/antsct.sh'), # create miniconda environment ('miniconda', { 'miniconda_version': '4.3.31', 'env_name': 'antsct', 'conda_install': ' '.join([ 'python=3.6.2', 'click=6.7', 'funcsigs=1.0.2', 'future=0.16.0', 'jinja2=2.10', 'matplotlib=2.1.1', 'mock=2.0.0', 'nibabel=2.2.1', 'numpy=1.14.0', 'packaging=16.8', 'pandas=0.21.0', 'prov=1.5.1', 'pydot=1.2.3', 'pydotplus=2.0.2', 'pytest=3.3.2', 'python-dateutil=2.6.1', 'seaborn=0.8.1', 'scikit-learn=0.19.1', 'scipy=1.0.0', 'simplejson=3.12.0', 'traits=4.6.0' ]), 'pip_install': ['nilearn==0.3.1', 'niworkflows==0.3.1', 'svgutils==0.3.0'] }) ] } if __name__ == '__main__': df = Dockerfile(specs) DockerImage(df).build(tag='antsct', log_console=True)
def get_container_from_specs(specs, **kwargs): """Return started container. `kwargs` are for `container.start()`.""" df = Dockerfile(specs) image = DockerImage(df).build(log_console=True) return DockerContainer(image).start(**kwargs)