def _get_spm_url(self): url = ("http://www.fil.ion.ucl.ac.uk/spm/download/restricted/" "utopia/dev/spm{spm}_latest_Linux_{matlab}.zip" "".format(spm=self.version, matlab=self.matlab_version)) if self.check_urls: check_url(url) return url
def install_binaries(self): """Return command to download and install FreeSurfer binaries.""" from neurodocker.generate import _add_to_entrypoint url = self._get_binaries_url() if self.check_urls and self.version == 'dev': raise ValueError( "check_urls=True and version='dev' cannot be used " "together. Set check_urls to False.") elif self.check_urls: check_url(url) if self.exclude: excluded_dirs = _get_dirs_to_exclude(self.exclude) else: excluded_dirs = '' cmd = self._install_binaries_deps() ent = _add_to_entrypoint("source $FREESURFER_HOME/SetUpFreeSurfer.sh", with_run=False) cmd += ('\n&& echo "Downloading FreeSurfer ..."' "\n&& curl -sSL --retry 5 {url}" "\n| tar xz -C /opt\n{excluded}" "\n&& {entrypoint_cmd}" "".format(url=url, excluded=excluded_dirs, entrypoint_cmd=ent)) cmd = indent("RUN", cmd) env_cmd = "ENV FREESURFER_HOME=/opt/freesurfer" return "\n".join((cmd, env_cmd))
def install_miniconda(self): """Return Dockerfile instructions to install Miniconda.""" install_url = ("https://repo.continuum.io/miniconda/" "Miniconda3-{}-Linux-x86_64.sh" "".format(self.miniconda_version)) if self.check_urls: check_url(install_url) env_cmd = ("CONDA_DIR={0}" "\nPATH={0}/bin:$PATH".format(Miniconda.INSTALL_PATH)) env_cmd = indent("ENV", env_cmd) cmds = ['echo "Downloading Miniconda installer ..."', "miniconda_installer=/tmp/miniconda.sh", "curl -sSL --retry 5 -o $miniconda_installer {url}", "/bin/bash $miniconda_installer -b -p $CONDA_DIR", "rm -f $miniconda_installer", "conda config --system --prepend channels conda-forge", "conda config --system --set auto_update_conda false", "conda config --system --set show_channel_urls true", "conda clean -tipsy && sync", ] if self.miniconda_version == 'latest': cmds.insert(-1, 'conda update -n base conda') cmd = indent("RUN", '\n&& '.join(cmds).format(url=install_url)) Miniconda.INSTALLED = True return "\n".join((env_cmd, cmd))
def install_miniconda(self): """Return Dockerfile instructions to install Miniconda.""" install_url = ("https://repo.continuum.io/miniconda/" "Miniconda3-{}-Linux-x86_64.sh" "".format(self.miniconda_version)) if self.check_urls: check_url(install_url) env_cmd = ("CONDA_DIR={0}" "\nPATH={0}/bin:$PATH".format(Miniconda.INSTALL_PATH)) env_cmd = indent("ENV", env_cmd) cmd = ('echo "Downloading Miniconda installer ..."' "\n&& miniconda_installer=/tmp/miniconda.sh" "\n&& curl -sSL -o $miniconda_installer {url}" "\n&& /bin/bash $miniconda_installer -b -p $CONDA_DIR" "\n&& rm -f $miniconda_installer" "\n&& conda config --system --prepend channels conda-forge" "\n&& conda config --system --set auto_update_conda false" "\n&& conda config --system --set show_channel_urls true" "\n&& conda update -y -q --all && sync" "\n&& conda clean -tipsy && sync" "".format(Miniconda.INSTALL_PATH, url=install_url)) cmd = indent("RUN", cmd) Miniconda.INSTALLED = True return "\n".join((env_cmd, cmd))
def create_from_yaml(self): """Return Dockerfile instructions to create conda environment from a YAML file. """ tmp_yml = "/tmp/environment.yml" cmd = ("conda env create -q --name {n} --file {tmp}" "\n&& rm -f {tmp}") if is_url(self.yaml_file): get_file = "curl -sSL {f} > {tmp}" cmd = get_file + "\n&& " + cmd if self.check_urls: check_url(self.yaml_file) cmd = indent("RUN", cmd) else: get_file = 'COPY ["{f}", "{tmp}"]' cmd = indent("RUN", cmd) cmd = "\n".join((get_file, cmd)) cmd = cmd.format(n=self.env_name, f=self.yaml_file, tmp=tmp_yml) if self.add_to_path: bin_path = posixpath.join(Miniconda.INSTALL_PATH, 'envs', self.env_name, 'bin') env_cmd = "ENV PATH={}:$PATH".format(bin_path) return "\n".join((cmd, env_cmd)) return cmd
def build_from_source(self): """Return Dockerfile instructions to build dcm2niix from source. """ pkgs = { 'apt': 'cmake g++ gcc git make pigz zlib1g-dev', 'yum': 'cmake gcc-c++ git libstdc++-static make pigz zlib-devel' } url = ("https://github.com/rordenlab/dcm2niix/tarball/{}".format( self.version)) if self.check_urls: check_url(url) workdir_cmd = "WORKDIR /tmp" cmd = ("deps='{pkgs}'" "\n&& {install}" "\n&& {clean}" "\n&& mkdir dcm2niix" "\n&& curl -sSL {url} | tar xz -C dcm2niix --strip-components 1" "\n&& mkdir dcm2niix/build && cd dcm2niix/build" "\n&& cmake .. && make" "\n&& make install" "\n&& rm -rf /tmp/*" "".format(pkgs=pkgs[self.pkg_manager], url=url, **manage_pkgs[self.pkg_manager])) cmd = cmd.format(pkgs='$deps') cmd = indent("RUN", cmd) return "\n".join((workdir_cmd, cmd))
def _get_binaries_url(self): """Return URL to binaries for requested version.""" base = "https://fsl.fmrib.ox.ac.uk/fsldownloads/" if self.version >= LooseVersion('5.0.9'): url = urljoin(base, "fsl-{ver}-centos6_64.tar.gz") else: url = urljoin(base, "oldversions/fsl-{ver}-centos5_64.tar.gz") url = url.format(ver=self.version) if self.check_urls: check_url(url) return url
def _get_mcr_url(self): base = 'https://www.mathworks.com/supportfiles/' if self.matlab_version > LooseVersion("R2013a"): rel = ('downloads/{ver}/deployment_files/{ver}/installers/' 'glnxa64/MCR_{ver}_glnxa64_installer.zip') else: rel = ('MCR_Runtime/{ver}/MCR_{ver}_glnxa64_installer.zip') url = urljoin(base, rel).format(ver=self.matlab_version) if self.check_urls: check_url(url) return url
def test_check_url(): urls = { 'good': 'https://www.google.com/', '404': 'http://httpstat.us/404', 'timeout': 'http://10.255.255.255' } assert utils.check_url(urls['good']), "Bad response from google.com" with pytest.raises(RequestException): utils.check_url(urls['404']) with pytest.raises(RequestException): utils.check_url(urls['timeout'])
def install_binaries(self): """Return Dockerfile instructions to download and install PETPVC binaries. """ petpvc_url = self._get_binaries_urls(self.version) if self.check_urls: check_url(petpvc_url) cmd = self._get_install_cmd(petpvc_url) cmd = indent("RUN", cmd) env_cmd = ("ENV PATH=/opt/petpvc/bin:$PATH") return "\n".join((cmd, env_cmd))
def install_binaries(self): """Return command to download and install MRtrix3 binaries.""" url = ("https://dl.dropbox.com/s/2g008aaaeht3m45/" "mrtrix3-Linux-centos6.tar.gz") if self.check_urls: check_url(url) cmd = ('echo "Downloading MRtrix3 ..."' '\n&& curl -sSL --retry 5 {}' '\n| tar zx -C /opt'.format(url)) cmd = indent("RUN", cmd) env_cmd = ("ENV PATH=/opt/mrtrix3/bin:$PATH") return "\n".join((cmd, env_cmd))
def __init__(self, os_codename, download_server, full=True, pkgs=None, pkg_manager='apt', check_urls=True): self.pkgs = pkgs self.check_urls = check_urls download_server = self._get_server(download_server) suffix = "full" if full else "libre" self.url = self._create_url(os_codename, download_server, suffix) if self.check_urls: check_url(self.url) self.cmd = self._create_cmd()
def install_binaries(self): """Return command to download and install FreeSurfer binaries.""" from neurodocker.generate import _add_to_entrypoint url = self._get_binaries_url() if self.check_urls and self.version == 'dev': raise ValueError( "check_urls=True and version='dev' cannot be used " "together. Set check_urls to False.") elif self.check_urls: check_url(url) # https://github.com/nipy/workshops/blob/master/170327-nipype/docker/Dockerfile.complete#L8-L20 # TODO: allow users to choose which directories to exclude. excluded_dirs = ("--exclude='freesurfer/trctrain'" "\n--exclude='freesurfer/subjects/fsaverage_sym'" "\n--exclude='freesurfer/subjects/fsaverage3'" "\n--exclude='freesurfer/subjects/fsaverage4'" "\n--exclude='freesurfer/subjects/fsaverage5'" "\n--exclude='freesurfer/subjects/fsaverage6'" "\n--exclude='freesurfer/subjects/cvs_avg35'" "\n--exclude='freesurfer/subjects/cvs_avg35_inMNI152'" "\n--exclude='freesurfer/subjects/bert'" "\n--exclude='freesurfer/subjects/V1_average'" "\n--exclude='freesurfer/average/mult-comp-cor'" "\n--exclude='freesurfer/lib/cuda'" "\n--exclude='freesurfer/lib/qt'") cmd = self._install_binaries_deps() ent = _add_to_entrypoint("source $FREESURFER_HOME/SetUpFreeSurfer.sh", with_run=False) cmd += ('\n&& echo "Downloading FreeSurfer ..."' "\n&& curl -sSL --retry 5 {url}" "\n| tar xz -C /opt\n{excluded}" "\n&& {entrypoint_cmd}" "".format(url=url, excluded=excluded_dirs, entrypoint_cmd=ent)) cmd = indent("RUN", cmd) env_cmd = "ENV FREESURFER_HOME=/opt/freesurfer" return "\n".join((cmd, env_cmd))
def install_binaries(self): """Return command to download and install ANTs binaries.""" try: url = ANTs.VERSION_TARBALLS[self.version] except KeyError: raise ValueError("Tarball not available for version {}." "".format(self.version)) if self.check_urls: check_url(url) cmd = ('echo "Downloading ANTs ..."' "\n&& curl -sSL --retry 5 {}" "\n| tar zx -C /opt".format(url)) cmd = indent("RUN", cmd) env_cmd = ("ANTSPATH=/opt/ants" "\nPATH=/opt/ants:$PATH") env_cmd = indent("ENV", env_cmd) return "\n".join((cmd, env_cmd))
def install_with_pyinstaller(check_urls=False): """Return Dockerfile instructions to install FSL using FSL's Python installer. This will install the latest version and only works on CentOS/RHEL. """ workdir_cmd = "WORKDIR /opt" url = "https://fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py" if check_urls: check_url(url) cmd = ("curl -sSL -o fslinstaller.py {url}" "\n&& python fslinstaller.py --dest=/opt --quiet" "\n&& . /opt/fsl/etc/fslconf/fsl.sh" "\n&& rm -f fslinstaller.py" "".format(url=url)) cmd = indent("RUN", cmd) path_cmd = ("FSLDIR=/opt/fsl" "\n&& PATH=/opt/fsl/bin:$PATH") path_cmd = indent("ENV", path_cmd) return "\n".join((workdir_cmd, cmd, path_cmd))
def _install_eddy_5011(self): """Return Dockerfile instructions to install FSL eddy v5.0.11 pre-release. """ url = self._get_eddy_5011_url() if self.check_urls: check_url(url) cmd = ('\n&& cd /opt/fsl/bin' '\n&& rm -f eddy_openmp eddy_cuda*' '\n&& echo "Downloading FSL eddy v5.0.11 pre-release ..."' '\n&& curl -sSLO --retry 5 {}' '\n&& chmod +x eddy_*').format(url) filename = url.split('/')[-1] if 'cuda' in filename: cmd += '\n&& ln -sv {} eddy_cuda'.format(filename) return cmd
def install_binaries(self): """Return command to download and install C3D binaries.""" try: url = Convert3D.VERSION_TARBALLS[self.version.lower()] except KeyError: raise ValueError("Unsupported version: {}".format(self.version)) if self.check_urls: check_url(url) cmd = ('echo "Downloading C3D ..."' "\n&& mkdir /opt/c3d" "\n&& curl -sSL --retry 5 {}" "\n| tar -xzC /opt/c3d --strip-components=1".format(url)) cmd = indent("RUN", cmd) c3d_path = "/opt/c3d" c3d_bin_path = posixpath.join(c3d_path, 'bin') env_cmd = ("C3DPATH={}" "\nPATH={}:$PATH").format(c3d_path, c3d_bin_path) env_cmd = indent("ENV", env_cmd) return "\n".join((cmd, env_cmd))
def create_from_yaml(self): """Return Dockerfile instructions to create conda environment from a YAML file. """ tmp_yml = "/tmp/environment.yml" cmd = ("conda env create -q --name {n} --file {tmp}" "\n&& rm -f {tmp}") if self.activate: cmd += self._get_source_activate_cmd() if is_url(self.yaml_file): get_file = "curl -sSL {f} > {tmp}" cmd = get_file + "\n&& " + cmd if self.check_urls: check_url(self.yaml_file) cmd = indent("RUN", cmd) else: get_file = 'COPY ["{f}", "{tmp}"]' cmd = indent("RUN", cmd) cmd = "\n".join((get_file, cmd)) cmd = cmd.format(n=self.env_name, f=self.yaml_file, tmp=tmp_yml) return cmd
def install_binaries(self): """Return Dockerfile instructions to download and install MINC binaries. """ from neurodocker.dockerfile import _add_to_entrypoint minc_url = self._get_binaries_urls(self.version) beast_url = self.BEAST_URL['1.1'] models_09a_url = self.MODELS_URL['09a'] models_09c_url = self.MODELS_URL['09c'] if self.check_urls: check_url(minc_url) check_url(beast_url) check_url(models_09a_url) check_url(models_09c_url) cmd = self._install_binaries_deps() ent = _add_to_entrypoint("source /opt/minc/minc-toolkit-config.sh", with_run=False) cmd += self._get_install_cmd(minc_url, beast_url, models_09a_url, models_09c_url, ent) cmd = indent("RUN", cmd) return cmd
def install_binaries(self): """Return Dockerfile instructions to download and install AFNI binaries. """ url = self._get_binaries_urls(self.version) if self.check_urls: check_url(url) pkgs = self._get_binaries_dependencies() cmd = ("{install}" '\n&& libs_path=/usr/lib/x86_64-linux-gnu' '\n&& if [ -f $libs_path/libgsl.so.19 ]; then' '\n ln $libs_path/libgsl.so.19 $libs_path/libgsl.so.0;' '\n fi' "".format(**manage_pkgs[self.pkg_manager]).format(pkgs=pkgs)) if self.pkg_manager == "apt": # libxp was removed after ubuntu trusty. deb_url = ('http://mirrors.kernel.org/debian/pool/main/libx/' 'libxp/libxp6_1.0.2-2_amd64.deb') cmd += ("\n# Install libxp (not in all ubuntu/debian repositories)" "\n&& apt-get install -yq --no-install-recommends libxp6" '\n|| /bin/bash -c "' '\n curl --retry 5 -o /tmp/libxp6.deb -sSL {}' '\n && dpkg -i /tmp/libxp6.deb && rm -f /tmp/libxp6.deb"' ''.format(deb_url)) deb_url = ('http://mirrors.kernel.org/debian/pool/main/libp/' 'libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb') cmd += ( "\n# Install libpng12 (not in all ubuntu/debian repositories)" "\n&& apt-get install -yq --no-install-recommends libpng12-0" '\n|| /bin/bash -c "' '\n curl -o /tmp/libpng12.deb -sSL {}' '\n && dpkg -i /tmp/libpng12.deb && rm -f /tmp/libpng12.deb"' ''.format(deb_url)) sh_url = ("https://gist.githubusercontent.com/kaczmarj/" "8e3792ae1af70b03788163c44f453b43/raw/" "0577c62e4771236adf0191c826a25249eb69a130/" "R_installer_debian_ubuntu.sh") cmd += ("\n# Install R" "\n&& apt-get install -yq --no-install-recommends" "\n\tr-base-dev r-cran-rmpi" '\n || /bin/bash -c "' '\n curl -o /tmp/install_R.sh -sSL {}' '\n && /bin/bash /tmp/install_R.sh"' ''.format(sh_url)) cmd += ("\n&& {clean}" '\n&& echo "Downloading AFNI ..."' "\n&& mkdir -p /opt/afni" "\n&& curl -sSL --retry 5 {}" "\n| tar zx -C /opt/afni --strip-components=1" "\n&& /opt/afni/rPkgsInstall -pkgs ALL" "\n&& rm -rf /tmp/*" "".format(url, **manage_pkgs[self.pkg_manager])) cmd = indent("RUN", cmd) env_cmd = "PATH=/opt/afni:$PATH" env_cmd = indent("ENV", env_cmd) return "\n".join((env_cmd, cmd))