Exemplo n.º 1
0
def test_map():
    """Test contrib.tmap"""
    with closing(StringIO()) as our_file:
        a = range(9)
        b = [i + 1 for i in a]
        if sys.version_info[:1] < (3,):
            assert tmap(lambda x: x + 1, a, file=our_file) == map(incr, a)
        else:
            gen = tmap(lambda x: x + 1, a, file=our_file)
            assert gen != b
            assert list(gen) == b
Exemplo n.º 2
0
 def make_dataset(self, files, dset):
     dataset = data.Dataset(
         list(
             tmap(lambda d: data.Example.fromdict(d, self.example_fields),
                  chain(*map(lambda f: dset[f], files)),
                  mininterval=0.5)), self.dataset_fields)
     for file in files:
         del dset[file]
     return dataset
Exemplo n.º 3
0
def run(search_dir, glob="*.nii*"):
    search_dir = Path(search_dir)
    assert search_dir.is_dir()
    fnames = list(map(str, search_dir.glob(glob)))
    log.info("found %d files", len(fnames))
    fnames = nsort(fnames)
    res = imscroll(list(tmap(imread, fnames)))
    plt.show()
    return res
Exemplo n.º 4
0
 def run(self, workers=None):
     # the sequence of seeds give you the nb of iterations. See also
     # https://numpy.org/devdocs/reference/random/parallel.html
     seeds = SEED_SEQ.spawn(self.n_draws)
     if workers:
         output = concurrent.process_map(self._run,
                                         seeds,
                                         max_workers=workers)
     else:
         output = list(tmap(self._run, seeds))
     self.output = np.array(output)
Exemplo n.º 5
0
 def parallel_executor(func, iterable, **kwargs):
     if "chunksize" in kwargs:
         # Chunk size only applies to thread/process parallel executors
         del kwargs["chunksize"]
     return list(tmap(func, iterable, **kwargs))
from tqdm.contrib import tenumerate, tzip, tmap
import numpy as np

for _ in tenumerate(range(int(1e6)), desc="builtin enumerate"):
    pass

for _ in tenumerate(np.random.random((999, 999)), desc="numpy.ndenumerate"):
    pass

for _ in tzip(np.arange(1e6), np.arange(1e6) + 1, desc="builtin zip"):
    pass

mapped = tmap(lambda x: x + 1, np.arange(1e6), desc="builtin map")
assert (np.arange(1e6) + 1 == list(mapped)).all()
Exemplo n.º 7
0
def run(
    dir_MRI="data/ALFA_PET",
    dir_PET="data/ALFA_PET",
    dir_RR="data/Atlas/CL_2mm",
    outfile="data/ALFA_PET/Quant_realigned.csv",
    glob_PET="*_PET.nii.gz",
    glob_MRI="*_MRI.nii.gz",
):
    """
    Args:
      dir_MRI (str or Path): MRI directory
      dir_PET (str or Path): PET directory
      dir_RR (str or Path): Reference regions ROIs directory
        (standard Centiloid RR from GAAIN Centioid website: 2mm, nifti)
      outfile (str or Path): Output quantification file
    Returns:
      fname (list[str])
      greyCerebellum (list[float])
      wholeCerebellum (list[float])
      wholeCerebellumBrainStem (list[float])
      pons (list[float])
    """
    # PET & MR images lists
    s_PET_dir = list(tmap(gunzip, Path(dir_PET).glob(glob_PET), leave=False))
    s_MRI_dir = list(tmap(gunzip, Path(dir_MRI).glob(glob_MRI), leave=False))
    if len(s_PET_dir) != len(s_MRI_dir):
        raise IndexError("Different number of PET and MR images")

    eng = get_matlab()
    dir_spm = fspath(Path(eng.which("spm")).parent)

    for d_PET, d_MRI in tzip(s_PET_dir, s_MRI_dir):
        with tic("Step 0: Reorient PET subject"):
            eng.f_acpcReorientation(d_PET, nargout=0)

        with tic("Step 0: Reorient MRI subject"):
            eng.f_acpcReorientation(d_MRI, nargout=0)

        with tic("Step 1: CorregisterEstimate"):
            eng.f_1CorregisterEstimate(d_MRI, dir_spm, nargout=0)
        # Check Reg

        with tic("Step 2: CorregisterEstimate"):
            eng.f_2CorregisterEstimate(d_MRI, d_PET, nargout=0)
        # Check Reg

        with tic("Step 3: Segment"):
            eng.f_3Segment(d_MRI, dir_spm, nargout=0)

        with tic("Step 4: Normalise"):
            d_file_norm = fspath(
                Path(d_MRI).parent / ("y_" + Path(d_MRI).name))
            eng.f_4Normalise(d_file_norm, d_MRI, d_PET, nargout=0)

    s_PET = list(
        map(
            fspath,
            Path(dir_PET).glob("w" + (glob_PET[:-3] if glob_PET.lower().
                                      endswith(".gz") else glob_PET))))
    res = eng.f_Quant_centiloid(s_PET, fspath(dir_RR), nargout=5)
    if outfile:
        with open(outfile, "w") as fd:
            f = csv_writer(fd)
            f.writerow(("Fname", "GreyCerebellum", "WholeCerebellum",
                        "WholeCerebellumBrainStem", "Pons"))
            f.writerows(zip(*res))
    return res