Exemple #1
0
    def build(self):
        import sys
        # Don't do something when running non-interactive.
        if not sys.stdout.isatty():
            return

        from plumbum import FG
        from benchbuild.utils.downloader import update_hash
        from logging import info

        root = CFG["tmp_dir"].value()
        src_file = self.src_file + ".new"
        with local.cwd(self.builddir):
            emerge_in_chroot = uchroot()["/usr/bin/emerge"]
            emerge_boost = uchroot(uid=501, gid=10)["/usr/bin/emerge"]
            with local.env(CC="gcc", CXX="g++", ACCEPT_KEYWORDS="~amd64"):
                with local.env(USE="-filecaps"):
                    run(emerge_in_chroot["likwid"])
                with local.env(USE="static-libs"):
                    run(emerge_in_chroot["dev-libs/libpfm"])
                run(emerge_in_chroot["dev-libs/papi"])
                run(emerge_in_chroot["sys-process/time"])
                run(emerge_boost["dev-utils/boost-build"])
                run(emerge_boost["dev-libs/boost"])

            tgt_path = path.join(root, self.src_file)
            tgt_path_new = path.join(root, src_file)
            tar("cjf", tgt_path_new, ".")
            update_hash(src_file, root)
            mv(path.join(root, src_file), tgt_path)
Exemple #2
0
def setup_bash_in_container(builddir, container, outfile, mounts, shell):
    with local.cwd(builddir):
        # Switch to bash inside uchroot
        print("Entering bash inside User-Chroot. Prepare your image and "
              "type 'exit' when you are done. If bash exits with a non-zero"
              "exit code, no new container will be stored.")
        store_new_container = True
        try:
            run_in_container(shell, container, mounts)
        except ProcessExecutionError:
            store_new_container = False

        if store_new_container:  # pylint: disable=W0104
            print("Packing new container image.")

            container_filename = os.path.split(container)[-1]
            container_out = os.path.join("container-out", container_filename)
            container_out = os.path.abspath(container_out)

            # Pack the results to: container-out
            with local.cwd("container-in"):
                tar("cjf", container_out, ".")
            update_hash(container_filename, os.path.dirname(container_out))
            outdir = os.path.dirname(outfile)
            if not os.path.exists(outdir):
                mkdir("-p", outdir)
            mv(container_out, outfile)
Exemple #3
0
    def build(self):
        import sys
        # Don't do something when running non-interactive.
        if not sys.stdout.isatty():
            return

        from plumbum import FG
        from pprof.utils.downloader import update_hash
        from logging import info
        from pprof.settings import config

        root = config["tmpdir"]
        src_file = self.src_file + ".new"
        with local.cwd(self.builddir):
            bash_in_uchroot = uchroot()["/bin/bash"]
            print("Entering User-Chroot. Prepare your image and "
                  "type 'exit' when you are done.")
            bash_in_uchroot & FG  # pylint: disable=W0104
            tgt_path = path.join(root, self.src_file)
            tgt_path_new = path.join(root, src_file)
            print("Packing new stage3 image. "
                  "This will replace the original one at: ", tgt_path)
            tar("cjf", tgt_path_new, ".")
            update_hash(src_file, root)
            mv(path.join(root, src_file), tgt_path)
Exemple #4
0
def test_create(builddir, runner):
    # prepare needed files
    with local.cwd(builddir):
        (sh.echo["a.c"] > "a.c")()
        (sh.echo["b.c"] > "b.c")()

    builder = BuildFile(build_dir=builddir, runner=runner)
    builder.main(command_line=['-c', '-D', 'build'])

    expected_json = {
        'tar czvf foo.tar.gz a.c b.c': {
            'b.c': 'input-',
            'foo.tar.gz': 'output-',
            'a.c': 'input-'
        },
        '.deps_version': 2
    }

    # assertions
    with local.cwd(builddir):
        assert_same_json('.deps', expected_json)
        assert os.path.isfile('foo.tar.gz')
        assert sh.tar("tf", 'foo.tar.gz') == "a.c\nb.c\n"
        print(sh.ls("-al"))
        assert '"a.c": "input-' in sh.cat(".deps")
        sys.exit.assert_called_once_with(0)

        # Modify a.c to force rebuilding
        (sh.echo["newline"] > "a.c")()

    builder.main(command_line=['-D', 'build'])

    with local.cwd(builddir):
        sh.tar("df", "foo.tar.gz") # ensure tar diff return no difference
Exemple #5
0
    def build(self):
        import sys
        # Don't do something when running non-interactive.
        if not sys.stdout.isatty():
            return

        from plumbum import FG
        from pprof.utils.downloader import update_hash
        from logging import info
        from pprof.settings import config

        root = config["tmpdir"]
        src_file = self.src_file + ".new"
        with local.cwd(self.builddir):
            mkdir("-p", "pprof-src")
            w_pprof_src = uchroot("-m", "{}:pprof-src".format(config[
                "sourcedir"]))
            pip_in_uchroot = w_pprof_src["/usr/bin/pip3"]
            pip_in_uchroot["install", "--upgrade", "/pprof-src/"] & FG

            tgt_path = path.join(root, self.src_file)
            tgt_path_new = path.join(root, src_file)
            tar("cjf", tgt_path_new, ".")
            update_hash(src_file, root)
            mv(path.join(root, src_file), tgt_path)
Exemple #6
0
    def download(self):
        from benchbuild.utils.downloader import Wget
        from plumbum.cmd import tar

        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar("xfJ", path.join(self.builddir, self.src_file))
Exemple #7
0
    def download(self):
        from pprof.utils.downloader import Wget
        from plumbum.cmd import tar

        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar("xfz", self.src_file)
Exemple #8
0
def load_source(url_or_path: str) -> plumbum.Path:
    parsed_url = urlparse(url_or_path)

    with track_new_files(local.cwd) as new_files:
        # Load data
        if parsed_url.scheme.startswith("git+"):
            original_scheme = parsed_url.scheme[len("git+"):]
            repo_url = urlunparse((
                original_scheme,
                parsed_url.netloc,
                parsed_url.path,
                parsed_url.params,
                parsed_url.query,
                "",
            ))
            args = ["clone", repo_url, "--depth=1"]
            if parsed_url.fragment:
                args.append(f"--branch={parsed_url.fragment}")
            cmd.git(*args)

        elif parsed_url.scheme in HTTP_SCHEMES:
            (cmd.wget["-qO-", url_or_path] | cmd.tar["xvz"])()

        else:
            url_path = local.cwd / url_or_path
            if url_path.is_dir():
                cmd.cp("-r", url_path, local.cwd)
            else:
                cmd.tar("-xvf", url_or_path)

    project_path, = new_files
    return project_path
Exemple #9
0
    def download(self):
        from pprof.utils.downloader import Wget
        from plumbum.cmd import tar

        openssl_dir = path.join(self.builddir, self.src_file)
        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar("xfz", openssl_dir)
Exemple #10
0
    def prepare(self):
        super(BZip2, self).prepare()

        test_archive = self.test_archive
        test_url = self.test_url + test_archive
        with local.cwd(self.builddir):
            Wget(test_url, test_archive)
            tar("fxz", test_archive)
Exemple #11
0
    def download(self):
        from benchbuild.utils.downloader import Git, Wget
        from plumbum.cmd import tar

        with local.cwd(self.builddir):
            Wget(self.boost_src_uri, self.boost_src_file)
            Git(self.src_uri, self.src_dir)
            tar("xfj", self.boost_src_file)
Exemple #12
0
    def download(self):
        from pprof.utils.downloader import Wget, Rsync
        from plumbum.cmd import tar

        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar('xfj', path.join(self.builddir, self.src_file))
            with local.cwd(self.src_dir):
                Rsync(self.fate_uri, self.fate_dir)
Exemple #13
0
def extract_tar(tarfile, outdir):
    try:
        with local.cwd(outdir):
            log("Extracting archive...")
            tarargs = ["-xf", tarfile]
            logcmd("tar", tarargs)
            tar(*tarargs)
    except ProcessExecutionError as e:
        logerr("Could not extract file: {}".format(e.retcode))
Exemple #14
0
    def download(self):
        from pprof.utils.downloader import Wget
        from plumbum.cmd import tar, cp

        p7z_dir = path.join(self.builddir, self.src_dir)
        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar('xfj', path.join(self.builddir, self.src_file))
            cp(
                path.join(p7z_dir, "makefile.linux_clang_amd64_asm"),
                path.join(p7z_dir, "makefile.machine"))
Exemple #15
0
def install_tex():
    with local.cwd("/tmp"):
        cmd.wget("mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz")
        cmd.tar("-xvf", "install-tl-unx.tar.gz")

        directory = next(
            (p for p in (local.cwd // "install-tl*") if p.is_dir()))

        with local.cwd(directory), local.env(
                TEXLIVE_INSTALL_PREFIX="/usr/local/texlive",
                TEXLIVE_INSTALL_TEXDIR="/usr/local/texlive/latest"):
            (cmd.sudo[local[local.cwd / "install-tl"]] << "I\n")()
    def handleSamsungStockImage(self, filename):
        logger.debug('Unzipping "%s" from image file "%s"..' %
                     (self.samsungPattern, filename))
        unzip('-o', filename, self.samsungPattern)
        apFilename = self.findFileByPattern(os.listdir('.'),
                                            self.samsungPattern)
        logger.debug('done: %s', apFilename)

        logger.debug('Extracting system.img from tarball')
        tar('xf', apFilename, 'system.img')
        logger.debug('done')

        self.handleSystemImage('system.img')
        rm('-rf', 'system.img')
Exemple #17
0
def test_create(builddir, runner, end_fabricate):
    # prepare needed files
    with local.cwd(builddir):
        (sh.echo["a.c"] > "a.c")()
        (sh.echo["b.c"] > "b.c")()

    # build.py content >>>>>>>>>>>>>>>>>>>>>
    def fabricate_file():
        def build():
            run('tar', 'czvf', 'foo.tar.gz', 'a.c', 'b.c')

        def clean():
            autoclean()
        return copy(locals())

    main(globals_dict=fabricate_file(),
         build_dir=builddir,
         runner=runner,
         command_line=['-c', '-D', 'build'])
    end_fabricate()

    expected_json = {'tar czvf foo.tar.gz a.c b.c':
        {'b.c': 'input-',
         'foo.tar.gz': 'output-',
         'a.c': 'input-'},
     u'.deps_version': 2}


    # assertions
    with local.cwd(builddir):
        assert_same_json('.deps', expected_json)
        assert os.path.isfile('foo.tar.gz')
        assert sh.tar("tf", 'foo.tar.gz') == "a.c\nb.c\n"
        print(sh.ls("-al"))
        assert '"a.c": "input-' in sh.cat(".deps")
        sys.exit.assert_called_once_with(0)


        # Modify a.c to force rebuilding
        (sh.echo["newline"] > "a.c")()

    main(globals_dict=fabricate_file(),
         build_dir=builddir,
         runner=runner,
         command_line=['-D', 'build'])
    end_fabricate()

    with local.cwd(builddir):
        sh.tar("df", "foo.tar.gz") # ensure tar diff return no difference
Exemple #18
0
    def build(self):
        from plumbum import FG
        from benchbuild.utils.downloader import update_hash
        from logging import info

        uchroot = uchroot_no_llvm

        root = CFG["tmp_dir"].value()
        src_file = self.src_file + ".new"
        with local.cwd(self.builddir):
            sed_in_chroot = uchroot()["/bin/sed"]
            run(sed_in_chroot["-i", '/CC=/d', "/etc/portage/make.conf"])
            run(sed_in_chroot["-i", '/CXX=/d', "/etc/portage/make.conf"])
            emerge_in_chroot = uchroot()["/usr/bin/emerge"]
            #run(emerge_in_chroot["dev-python/pip"])

            with local.env(CC="gcc", CXX="g++"):
            #    run(emerge_in_chroot["dev-db/postgresql"])
            #    run(emerge_in_chroot["net-misc/curl"])

                # We need the unstable portage version
                with local.env(ACCEPT_KEYWORDS="~*", LD_LIBRARY_PATH=""):
                    run(emerge_in_chroot["--autounmask-only=y",
                        "-uUDN", "--with-bdeps=y", "@world"])
                    run(emerge_in_chroot["-uUDN", "--with-bdeps=y", "@world"])
                    run(emerge_in_chroot["--autounmask-only=y", "=sys-libs/ncurses-6.0-r1:0/6"])
                    run(emerge_in_chroot["=sys-libs/ncurses-6.0-r1:0/6"])
            #        run(emerge_in_chroot["sys-apps/portage"])

            #benchbuild_src = CFG["src_dir"].value()
            #version = CFG["version"].value()
            #with local.cwd(benchbuild_src):
            #    setup_py = local["./setup.py"]("sdist", "-d", self.builddir)

            #pip_in_uchroot = uchroot()["/usr/bin/pip3"]
            #pip_in_uchroot("install", "--upgrade",
            #               "benchbuild-{}.tar.gz".format(version))

            tgt_path = path.join(root, self.src_file)
            tgt_path_new = path.join(root, src_file)
            tar("cjf", tgt_path_new, ".")
            update_hash(src_file, root)
            mv(path.join(root, src_file), tgt_path)
Exemple #19
0
 def main(self):
     tmpdir = local.path(tempfile.mkdtemp(prefix='topogen.'))
     try:
         scion_pki = local.path(self.scion_pki_bin)
         topogen = local[self.topogen_bin]
         local.env.path.insert(0, scion_pki.parent)
         topogen_args = ['-o', tmpdir / 'gen', '-c', self.topo]
         if self.params != '':
             topogen_args += self.params.split()
         print('Running topogen with following arguments: ' + ' '.join(topogen_args))
         print(topogen(*topogen_args))
         # Remove the explicit scion root dir, so that tests can adapt this to wherever they
         # unpack the tar.
         cmd.sed('-i', 's@%s@$SCIONROOT@g' % local.path('.'), tmpdir / 'gen' / 'scion-dc.yml')
         cmd.sed('-i', 's@%s@$SCIONROOT@g' % tmpdir, tmpdir / 'gen' / 'scion-dc.yml')
         for support_dir in ['logs', 'gen-cache', 'gen-data', 'traces']:
             os.mkdir(tmpdir / support_dir)
         cmd.tar('-C', tmpdir, '-cf', self.outfile, '.')
     finally:
         shutil.rmtree(tmpdir, ignore_errors=True)
Exemple #20
0
def install_singularity(singularity_version="3.5.3"):
    install_with_apt("golang-go")
    install_with_apt(
        "build-essential",
        "libssl-dev",
        "uuid-dev",
        "libgpgme11-dev",
        "squashfs-tools",
        "libseccomp-dev",
        "pkg-config",
    )

    with local.cwd(make_or_find_libraries_dir()):
        cmd.wget(
            f"https://github.com/sylabs/singularity/releases/download/v{singularity_version}/singularity-{singularity_version}.tar.gz"
        )
        cmd.tar("-xzf", f"singularity-{singularity_version}.tar.gz")

        with local.cwd("singularity"):
            local["./mconfig"]("--prefix=/opt/singularity")
            cmd.make("-C", "./builddir")
            cmd.sudo[cmd.make["-C", "./builddir", "install"]]()
Exemple #21
0
 def main(self):
     tmpdir = local.path(tempfile.mkdtemp(prefix='topogen.'))
     try:
         scion_pki = local.path(self.scion_pki_bin)
         topogen = local[self.topogen_bin]
         local.env.path.insert(0, scion_pki.parent)
         # bazel only creates a symlink to the crypto_lib,
         # we copy it to tmp so that it works with docker.
         cp('-L', local.path('./scripts/cryptoplayground/crypto_lib.sh'), tmpdir)
         topogen_args = ['-o', tmpdir / 'gen', '-c', self.topo]
         if self.params != '':
             topogen_args += self.params.split()
         with local.env(CRYPTOLIB=tmpdir / 'crypto_lib.sh'):
             print('Running topogen with following arguments: ' + ' '.join(topogen_args))
             print(topogen(*topogen_args))
         # Remove the explicit scion root dir, so that tests can adapt this to wherever they
         # unpack the tar.
         sed('-i', 's@%s@$SCIONROOT@g' % local.path('.'), tmpdir / 'gen' / 'scion-dc.yml')
         sed('-i', 's@%s@$SCIONROOT@g' % tmpdir, tmpdir / 'gen' / 'scion-dc.yml')
         for support_dir in ['logs', 'gen-cache', 'gen-data', 'traces']:
             os.mkdir(tmpdir / support_dir)
         tar('-C', tmpdir, '-cf', self.outfile, '.')
     finally:
         shutil.rmtree(tmpdir, ignore_errors=True)
Exemple #22
0
    def download(self):
        from benchbuild.utils.downloader import Wget
        from plumbum.cmd import tar

        with local.cwd(self.builddir):
            Wget(self.src_uri, self.src_file)
            tar('xfz', self.src_file)

            Wget(self.libmcrypt_uri, self.libmcrypt_file)
            tar('xfz', self.libmcrypt_file)

            Wget(self.mhash_uri, self.mhash_file)
            tar('xfz', self.mhash_file)
Exemple #23
0
    def main(self):
        self.out = local.path(self.out)
        if self.out.exists():
            if self.overwrite:
                self.out.delete()
            else:
                logging.error("{} exists, use '--force' to overwrite it".format(self.out))
                sys.exit(1)

        outxfms = self.out.dirname / self.out.stem+'_xfms.tgz'

        with TemporaryDirectory() as tmpdir, local.cwd(tmpdir):
            tmpdir = local.path(tmpdir)

            dicePrefix = 'vol'

            logging.info('Dice the DWI')
            fslsplit[self.dwi] & FG

            logging.info('Extract the B0')
            check_call((' ').join([pjoin(FILEDIR,'bse.py'), '-i', self.dwi._path, '-o', 'b0.nii.gz']), shell= True)

            logging.info('Register each volume to the B0')
            vols = sorted(tmpdir // (dicePrefix + '*.nii.gz'))

            # use the following multi-processed loop
            pool= Pool(int(self.nproc))
            res= pool.map_async(_Register_vol, vols)
            volsRegistered= res.get()
            pool.close()
            pool.join()

            # or use the following for loop
            # volsRegistered = []
            # for vol in vols:
            #     volnii = vol.with_suffix('.nii.gz')
            #     logging.info('Run FSL flirt affine registration')
            #     flirt('-interp' ,'sinc'
            #           ,'-sincwidth' ,'7'
            #           ,'-sincwindow' ,'blackman'
            #           ,'-in', volnii
            #           ,'-ref', 'b0.nii.gz'
            #           ,'-nosearch'
            #           ,'-o', volnii
            #           ,'-omat', volnii.with_suffix('.txt', depth=2)
            #           ,'-paddingsize', '1')
            #     volsRegistered.append(volnii)


            fslmerge('-t', 'EddyCorrect-DWI.nii.gz', volsRegistered)
            transforms = tmpdir.glob(dicePrefix+'*.txt')
            transforms.sort()


            logging.info('Extract the rotations and realign the gradients')

            bvecs= read_bvecs(self.bvecFile._path)
            bvecs_new= bvecs.copy()
            for (i,t) in enumerate(transforms):

                logging.info('Apply ' + t)
                tra = np.loadtxt(t)

                # removes the translation
                aff = np.matrix(tra[0:3,0:3])

                # computes the finite strain of aff to get the rotation
                rot = aff*aff.T

                # compute the square root of rot
                [el, ev] = np.linalg.eig(rot)
                eL = np.identity(3)*np.sqrt(el)
                sq = ev*eL*ev.I

                # finally the rotation is defined as
                rot = sq.I*aff

                bvecs_new[i] = np.dot(rot,bvecs[i]).tolist()[0]



            tar('cvzf', outxfms, transforms)

            # save modified bvecs
            write_bvecs(self.out._path+'.bvec', bvecs_new)

            # save EddyCorrect-DWI
            local.path('EddyCorrect-DWI.nii.gz').copy(self.out._path+'.nii.gz')

            # copy bvals
            self.bvalFile.copy(self.out._path+'.bval')

            if self.debug:
                tmpdir.copy(pjoin(dirname(self.out),"eddy-debug-"+str(getpid())))
Exemple #24
0
 def _unpack_topo(self):
     cmd.tar('-xf', self.test_state.topology_tar, '-C',
             self.test_state.artifacts)
     cmd.sed('-i', 's#$SCIONROOT#%s#g' % self.test_state.artifacts,
             self.test_state.artifacts / 'gen/scion-dc.yml')
     self.test_state.dc.compose_file = self.test_state.artifacts / 'gen/scion-dc.yml'
Exemple #25
0
 def _unpack_topo(self):
     # Unpack the topogen output, adapt SCIONROOT.
     cmd.tar("-xf", "./acceptance/sig_ping/gen.tar", "-C",
             self.test_state.artifacts)
     cmd.sed("-i", "s#$SCIONROOT#%s#g" % self.test_state.artifacts,
             self.test_state.artifacts / "gen/scion-dc.yml")
Exemple #26
0
    def main(self):
        self.out = local.path(self.out)
        if self.out.exists():
            if self.overwrite:
                self.out.delete()
            else:
                logging.error(
                    "{} exists, use '--force' to overwrite it".format(
                        self.out))
                sys.exit(1)
        outxfms = self.out.dirname / self.out.stem + '-xfms.tgz'
        with TemporaryDirectory() as tmpdir, local.cwd(tmpdir):
            tmpdir = local.path(tmpdir)

            # fileinput() caused trouble reading data file in python 3, so switching to nrrd
            # if the hdr has 'nan' in space origin, the following will take care of that
            img = nrrd.read(self.dwi)
            dwi = img[0]
            hdr = img[1]

            hdr_out = hdr.copy()
            hdr_out['space origin'] = hdr_out['space origin'][0:3]

            nrrd.write('dwijoined.nhdr',
                       dwi,
                       header=hdr_out,
                       compression_level=1)

            # we want to use this hdr to write a new .nhdr file with corresponding data file
            # so delete old data file from the hdr
            if 'data file' in hdr_out.keys():
                del hdr_out['data file']
            elif 'datafile' in hdr_out.keys():
                del hdr_out['datafile']

            if 'content' in hdr_out.keys():
                del hdr_out['content']

            logging.info('Dice the DWI')

            # Since fslmerge works along the 3rd axis only, dicing also has to be along that axis
            # So, use `unu permute` to reorient the volumes to be stacked along 3rd axis only
            # Include this issue in the tutorial
            (unu['convert', '-t', 'int16', '-i', 'dwijoined.nhdr']
             | unu['dice', '-a', '3', '-o', 'Diffusion-G'])()
            vols = tmpdir.glob('Diffusion-G*.nrrd')
            vols.sort()

            logging.info('Extract the B0')
            bse_py('-i', 'dwijoined.nhdr', '-o', 'b0.nrrd')
            ConvertBetweenFileFormats('b0.nrrd', 'b0.nii.gz', 'short')

            logging.info('Register each volume to the B0')

            # use the following multi-processed loop
            pool = Pool(int(self.nproc))
            res = pool.map_async(_Register_vol, vols)
            volsRegistered = res.get()
            pool.close()
            pool.join()

            # or use the following for loop
            # volsRegistered = []
            # for vol in vols:
            #     volnii = vol.with_suffix('.nii.gz')
            #     ConvertBetweenFileFormats(vol, volnii, 'short')
            #     logging.info('Run FSL flirt affine registration')
            #     flirt('-interp' ,'sinc'
            #           ,'-sincwidth' ,'7'
            #           ,'-sincwindow' ,'blackman'
            #           ,'-in', volnii
            #           ,'-ref', 'b0.nii.gz'
            #           ,'-nosearch'
            #           ,'-o', volnii
            #           ,'-omat', volnii.with_suffix('.txt', depth=2)
            #           ,'-paddingsize', '1')
            #     volsRegistered.append(volnii)

            fslmerge('-t', 'EddyCorrect-DWI', volsRegistered)
            transforms = tmpdir.glob('Diffusion-G*.txt')
            transforms.sort()

            # nibabel loading can be avoided by setting 'data file' = EddyCorrect-DWI.nii.gz
            # and 'byteskip' = -1
            # Tashrif updated Pynrrd package to properly handle that
            new_dwi = nib.load('EddyCorrect-DWI.nii.gz').get_data()

            logging.info('Extract the rotations and realign the gradients')

            space = hdr_out['space'].lower()
            if (space == 'left'):
                spctoras = np.matrix([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
            else:
                spctoras = np.matrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
            mf = np.matrix(hdr['measurement frame'])

            # Transforms are in RAS so need to do inv(MF)*inv(SPC2RAS)*ROTATION*SPC2RAS*MF*GRADIENT
            mfras = mf.I * spctoras.I
            rasmf = spctoras * mf
            for (i, t) in enumerate(transforms):

                gDir = [
                    float(num) for num in hdr_out['DWMRI_gradient_' +
                                                  '{:04}'.format(i)].split(' ')
                    if num
                ]

                logging.info('Apply ' + t)
                tra = np.loadtxt(t)
                # removes the translation
                aff = np.matrix(tra[0:3, 0:3])
                # computes the finite strain of aff to get the rotation
                rot = aff * aff.T
                # compute the square root of rot
                [el, ev] = np.linalg.eig(rot)
                eL = np.identity(3) * np.sqrt(el)
                sq = ev * eL * ev.I
                # finally the rotation is defined as
                rot = sq.I * aff
                newdir = np.dot(mfras * rot * rasmf, gDir)

                hdr_out['DWMRI_gradient_' + '{:04}'.format(i)] = ('   ').join(
                    str(x) for x in newdir.tolist()[0])

            tar('cvzf', outxfms, transforms)

            nrrd.write(self.out, new_dwi, header=hdr_out, compression_level=1)

            if self.debug:
                tmpdir.copy(
                    join(dirname(self.out), "eddy-debug-" + str(getpid())))
Exemple #27
0
 def _unpack_topo(self):
     # Unpack the topogen output, adapt SCIONROOT.
     tar('-xf', './acceptance/sig_ping/gen.tar', '-C',
         self.test_state.artifacts)
     sed('-i', 's#$SCIONROOT#%s#g' % self.test_state.artifacts,
         self.test_state.artifacts / 'gen/scion-dc.yml')
Exemple #28
0
 def _unpack_topo(self):
     cmd.tar("-xf", "./acceptance/hidden_paths/gen.tar", "-C",
             self.test_state.artifacts)
     cmd.sed("-i", "s#$SCIONROOT#%s#g" % self.test_state.artifacts,
             self.test_state.artifacts / "gen/scion-dc.yml")
Exemple #29
0
    def main(self):
        self.out = local.path(self.out)
        if self.out.exists():
            if self.overwrite:
                self.out.delete()
            else:
                logging.error(
                    "{} exists, use '--force' to overwrite it".format(
                        self.out))
                sys.exit(1)
        outxfms = self.out.dirname / self.out.stem + '-xfms.tgz'
        with TemporaryDirectory() as tmpdir, local.cwd(tmpdir):
            tmpdir = local.path(tmpdir)

            unu('save', '-f', 'nrrd', '-e', 'gzip', '-i', self.dwi, '-o',
                'dwijoined.nhdr')

            logging.info('Dice the DWI')
            (unu['convert', '-t', 'int16', '-i', 'dwijoined.nhdr']
             | unu['dice', '-a', '3', '-o', 'Diffusion-G'])()
            vols = tmpdir.glob('Diffusion-G*.nrrd')
            vols.sort()

            logging.info('Extract the B0')
            bse_py('-i', 'dwijoined.nhdr', '-o', 'b0.nrrd')
            ConvertBetweenFileFormats('b0.nrrd', 'b0.nii.gz', 'short')

            logging.info('Register each volume to the B0')
            volsRegistered = []
            for vol in vols:
                volnii = vol.with_suffix('.nii.gz')
                ConvertBetweenFileFormats(vol, volnii, 'short')
                logging.info('Run FSL flirt affine registration')
                flirt('-interp', 'sinc', '-sincwidth', '7', '-sincwindow',
                      'blackman', '-in', volnii, '-ref', 'b0.nii.gz',
                      '-nosearch', '-o', volnii, '-omat',
                      volnii.with_suffix('.txt', depth=2), '-paddingsize', '1')
                volsRegistered.append(volnii)
            fslmerge('-t', 'EddyCorrect-DWI', volsRegistered)
            transforms = tmpdir.glob('Diffusion-G*.txt')
            transforms.sort()

            logging.info('Extract the rotations and realign the gradients')
            gDir = []
            header = ''
            gNum = []
            gframe = []
            with open('dwijoined.nhdr') as f:
                for line in f:
                    if line.find('DWMRI_gradient_') != -1:
                        gNum.append(line[15:19])
                        gDir.append(map(float, line[21:-1].split()))
                    elif line.find('data file:') != -1:
                        header = header + 'data file: EddyCorrect-DWI.nii.gz\n'
                    elif line.find('encoding:') != -1:
                        header = header + line + 'byteskip: -1\n'
                    elif line.find('measurement frame:') != -1:
                        header = header + line
                        mf = np.matrix([
                            map(float,
                                line.split()[2][1:-1].split(',')),
                            map(float,
                                line.split()[3][1:-1].split(',')),
                            map(float,
                                line.split()[4][1:-1].split(','))
                        ])
                    elif line.find('space:') != -1:
                        header = header + line
                        # Here I assume either lps or ras so only need to check the first letter
                        space = line.split()[1][0]
                        if (space == 'l') | (space == 'L'):
                            spctoras = np.matrix([[-1, 0, 0], [0, -1, 0],
                                                  [0, 0, 1]])
                        else:
                            spctoras = np.matrix([[1, 0, 0], [0, 1, 0],
                                                  [0, 0, 1]])
                    else:
                        header = header + line

            with open('EddyCorrect-DWI.nhdr', 'w') as f:
                f.write(header)
                i = 0
                # Transforms are in RAS so need to do inv(MF)*inv(SPC2RAS)*ROTATION*SPC2RAS*MF*GRADIENT
                mfras = mf.I * spctoras.I
                rasmf = spctoras * mf
                for t in transforms:
                    logging.info('Apply ' + t)
                    tra = np.loadtxt(t)
                    #removes the translation
                    aff = np.matrix(tra[0:3, 0:3])
                    # computes the finite strain of aff to get the rotation
                    rot = aff * aff.T
                    # Computer the square root of rot
                    [el, ev] = np.linalg.eig(rot)
                    eL = np.identity(3) * np.sqrt(el)
                    sq = ev * eL * ev.I
                    # finally the rotation is defined as
                    rot = sq.I * aff
                    newdir = np.dot(mfras * rot * rasmf, gDir[i])
                    f.write('DWMRI_gradient_' + gNum[i] + ':= ' +
                            str(newdir[0, 0]) + ' ' + str(newdir[0, 1]) + ' ' +
                            str(newdir[0, 2]) + '\n')
                    i = i + 1

            tar('cvzf', outxfms, transforms)
            unu('save', '-f', 'nrrd', '-e', 'gzip', '-i',
                'EddyCorrect-DWI.nhdr', '-o', self.out)

            if self.debug:
                tmpdir.move("eddy-debug-" + str(getpid()))