def createPyramidLevel(self, resolution=0, subdiv=(1, 2, 2), quiet=False):
     """ Add a level in a multi-level pyramid.
         Provided this function because TeraStitcher does
         not have enough control over the sampling strategy for imaris files
     """
     # find all of the imaris datasets under the specified resolution group
     self._subdiv = subdiv
     datasetnames = list()
     resin = 'ResolutionLevel ' + str(resolution)
     resout = 'ResolutionLevel ' + str(resolution + 1)
     prf = '/DataSet/' + resin
     self._file_object[prf].visit(datasetnames.append)
     tt = [
         type(self._file_object[prf + '/' + x]) == h5py._hl.dataset.Dataset
         for x in datasetnames
     ]
     res = list(compress(datasetnames, tt))
     # Now we need to find the ones ending in '/Data'
     tt = [x.endswith('/Data') for x in res]
     res = list(compress(res, tt))
     outpaths = ['/DataSet/' + resout + '/' + x for x in res]
     inpaths = [prf + '/' + x for x in res]
     pbar = ProgressBar()
     for idx in range(len(inpaths)):
         if not quiet:
             print(inpaths[idx])
             pbar.register()
         self._subdivide(self._file_object, inpaths[idx], outpaths[idx])
         if not quiet:
             pbar.unregister()
Ejemplo n.º 2
0
def show_progress(arr, msg: str = None, nthreads: int = 1):
    from dask.diagnostics import ProgressBar

    if msg is not None:
        logger.info(msg)
    pbar = ProgressBar()
    pbar.register()
    res = controlled_compute(arr, nthreads)
    pbar.unregister()
    return res
Ejemplo n.º 3
0
def main():
    paths = list(Path(args.dir).rglob("*.txt"))
    pbar = ProgressBar()
    pbar.register()
    a_bag = db.from_sequence(paths, npartitions=mp.cpu_count())
    a_bag = a_bag.map(lambda a_path: parse_path(a_path))
    frame_data = a_bag.compute()
    pbar.unregister()

    frame = pd.DataFrame(frame_data)
    frame.to_pickle(args.out)
Ejemplo n.º 4
0
def test_register(capsys):
    try:
        p = ProgressBar()
        p.register()

        assert Callback.active

        get_threaded(dsk, "e")
        check_bar_completed(capsys)

        p.unregister()

        assert not Callback.active
    finally:
        Callback.active.clear()
Ejemplo n.º 5
0
def test_register(capsys):
    try:
        p = ProgressBar()
        p.register()

        assert _globals['callbacks']

        get(dsk, 'e')
        check_bar_completed(capsys)

        p.unregister()

        assert not _globals['callbacks']
    finally:
        _globals['callbacks'].clear()
Ejemplo n.º 6
0
def test_register(capsys):
    try:
        p = ProgressBar()
        p.register()

        assert Callback.active

        get_threaded(dsk, 'e')
        check_bar_completed(capsys)

        p.unregister()

        assert not Callback.active
    finally:
        Callback.active.clear()
Ejemplo n.º 7
0
def test_register(capsys):
    try:
        p = ProgressBar()
        p.register()

        assert _globals['callbacks']

        get_threaded(dsk, 'e')
        check_bar_completed(capsys)

        p.unregister()

        assert not _globals['callbacks']
    finally:
        _globals['callbacks'].clear()
Ejemplo n.º 8
0
def test_register(capsys):
    try:
        p = ProgressBar()
        p.register()

        assert _globals['callbacks']

        get(dsk, 'e')
        out, err = capsys.readouterr()
        bar, percent, time = [i.strip() for i in out.split('\r')[-1].split('|')]
        assert bar == "[########################################]"
        assert percent == "100% Completed"

        p.unregister()

        assert not _globals['callbacks']
    finally:
        _globals['callbacks'].clear()
Ejemplo n.º 9
0
def show_progress(arr, msg: str = None, nthreads: int = 1):
    """
    Performs computation with Dask and shows progress bar.

    Args:
        arr:
        msg: message to log, default None
        nthreads: number of threads to use for computation, default 1

    Returns:
        Result of computation.
    """
    from dask.diagnostics import ProgressBar

    if msg is not None:
        logger.info(msg)
    pbar = ProgressBar()
    pbar.register()
    res = controlled_compute(arr, nthreads)
    pbar.unregister()
    return res
Ejemplo n.º 10
0
def Movie(
        da,
        odir,
        varname=None,
        framedim="time",
        moviename="movie",
        clim=None,
        cmap=None,
        bgcolor=np.array([1, 1, 1]) * 0.3,
        framewidth=1280,
        frameheight=720,
        dpi=100,
        lon=None,
        lat=None,
        dask=True,
        delete=True,
        ffmpeg=True,
        plot_style="simple",
        norm=mpl.colors.Normalize(),
        progbar=False,
):
    # Set defaults:
    if not ffmpeg and delete:
        raise RuntimeError("raw picture deletion makes only \
            sense if ffmpeg conversion is enabled")

    if not isinstance(da, xr.DataArray):
        raise RuntimeError("input has to be an xarray DataStructure, instead\
        is " + str(type(da)))

    if not os.path.exists(odir):
        os.makedirs(odir)

    # Infer defaults from data
    if clim is None:
        print("clim will be inferred from data, this can take very long...")
        clim = [da.min(), da.max()]
    if cmap is None:
        cmap = plt.cm.viridis

    if plot_style in ["map"]:
        if None in [lon, lat]:
            raise RuntimeError("map plotting requires lon and lat")
        else:
            lons = np.array(da[lon].data)
            lats = np.array(da[lat].data)

            if len(lons.shape) != 2:
                lons, lats = np.meshgrid(lons, lats)

            time = np.array(da["time"].data)

    else:
        lons = None
        lats = None
        time = None

    # Annnd here we go
    print("+++ Execute plot function +++")
    if dask:
        data = da.data
        frame_axis = da.get_axis_num(framedim)
        drop_axis = [da.get_axis_num(a) for a in da.dims if not a == framedim]
        chunks = list(data.shape)
        chunks[frame_axis] = 1
        data = data.rechunk(chunks)
        if progbar:
            pbar = ProgressBar()
            pbar.register()
        data.map_blocks(
            FramePrint,
            chunks=[1],
            drop_axis=drop_axis,
            dtype=np.float64,
            dask=dask,
            frame_axis=frame_axis,
            odir=odir,
            cmap=cmap,
            clim=clim,
            framewidth=framewidth,
            frameheight=frameheight,
            bgcolor=bgcolor,
            plot_style=plot_style,
            lons=lons,
            lats=lats,
            time=time,
            norm=norm,
            dpi=dpi,
        ).compute(get=get)
        if progbar:
            pbar.unregister()
    # The .compute(get=get) line is some dask 'magic': it parallelizes the
    # print function with processes and not threads,which is a lot faster
    # for custom functions apparently!
    else:
        # do it with a simple for loop...can this really be quicker?
        print("This is slow! Do it in dask!")
        for ii in range(0, len(da.time)):
            start_time = time.time()
            da_slice = da[{framedim: ii}]
            # fig,ax,h = FramePrint(da_slice,
            FramePrint(
                da_slice,
                frame=ii,
                odir=odir,
                cmap=cmap,
                clim=clim,
                framewidth=framewidth,
                frameheight=dpi,
                bgcolor=bgcolor,
                plot_style=plot_style,
                lons=lons,
                lats=lats,
                norm=norm,
                dpi=dpi,
            )
            if ii % 100 == 0:
                remaining_time = (len(da.time) - ii) * (time.time() -
                                                        start_time) / 60
                print("FRAME---%04d---" % ii)
                print("Estimated time left : %d minutes" % remaining_time)

    query = ('ffmpeg -y -i "frame_%05d.png" -c:v libx264 -preset veryslow \
        -crf 6 -pix_fmt yuv420p \
        -framerate 10 \
        "' + moviename + '.mp4"')

    with cd(odir):
        if ffmpeg:
            print("+++ Convert frames to video +++")
            excode = os.system(query)
            if excode == 0 and delete:
                os.system("rm *.png")
Ejemplo n.º 11
0
        X, Y = self.make_dataset(table=self.t, var='SB', additional=['L_MAX', 'LAYER', 'SOL_Z'])
        X, Y = self.duplicate_dataset(X, Y)
        self.SB = continuous.model('SB', X, Y, x, logger=logger, load_save=True)
        del X, Y

        X, Y = self.make_dataset(table=self.t, var='CS', additional=['L_MAX', 'LAYER', 'SOL_Z', 'SOL_SAND'])
        X, Y = self.duplicate_dataset(X, Y)
        x = self.modify_dataset(x, series=self.SOL_Z.Y_mod, name='SOL_SAND')
        self.CS = continuous.model('CS', X, Y, x, logger=logger, load_save=True)
        del X, Y

        X, Y = self.make_dataset(table=self.t, var='FS', additional=['L_MAX', 'LAYER', 'SOL_Z', 'SOL_SAND'])
        X, Y = self.duplicate_dataset(X, Y)
        self.FS = continuous.model('FS', X, Y, x, logger=logger, load_save=True)
        del X, Y, x

        self.write_results()

if __name__ == '__main__':
    from dask.diagnostics import ProgressBar
    from dask import config
    from multiprocessing import freeze_support
    freeze_support()
    pbar = ProgressBar()
    pbar.register()
    config.set(scheduler='processes')
    main(r"C:\Users\putzr\Documents\GitHub\sleepy\model\training.txt",
         r"C:\Users\putzr\Documents\GitHub\sleepy\model\modelling.txt").run()
    pbar.unregister()
Ejemplo n.º 12
0
 def get(*args, **kwargs):
     pbar = ProgressBar()
     pbar.register()
     out = client.get(*args, **kwargs)
     pbar.unregister()
     return out