Exemple #1
0
def dask_load(sources, geobox, measurements, dask_chunks,
              skip_broken_datasets=False):
    def data_func(measurement):
        return make_dask_array(sources, geobox, measurement,
                               skip_broken_datasets=skip_broken_datasets,
                               dask_chunks=dask_chunks)

    return Datacube.create_storage(OrderedDict((dim, sources.coords[dim]) for dim in sources.dims),
                                   geobox, measurements, data_func)
Exemple #2
0
def dask_load(sources,
              geobox,
              measurements,
              dask_chunks,
              skip_broken_datasets=False):
    def data_func(measurement):
        return make_dask_array(sources,
                               geobox,
                               measurement,
                               skip_broken_datasets=skip_broken_datasets,
                               dask_chunks=dask_chunks)

    return Datacube.create_storage(sources.coords, geobox, measurements,
                                   data_func)
Exemple #3
0
def xr_load(sources, geobox, measurements,
            skip_broken_datasets=False,
            use_threads=False):
    mk_new = get_loader(sources)

    data = Datacube.create_storage(OrderedDict((dim, sources.coords[dim]) for dim in sources.dims),
                                   geobox, measurements)

    # TODO: re-add use_threads
    for index, datasets in np.ndenumerate(sources.values):
        for m in measurements:
            t_slice = data[m.name].values[index]

            fuse_measurement(t_slice, datasets, geobox, m,
                             mk_new=mk_new,
                             skip_broken_datasets=skip_broken_datasets)

    return data
Exemple #4
0
def xr_load(sources,
            geobox,
            measurements,
            skip_broken_datasets=False,
            use_threads=False):
    mk_new = get_loader(sources)

    data = Datacube.create_storage(sources.coords, geobox, measurements)

    if use_threads:

        def work_load_data(index, datasets, m):
            t_slice = data[m.name].values[index]
            fuse_measurement(t_slice,
                             datasets,
                             geobox,
                             m,
                             mk_new=mk_new,
                             skip_broken_datasets=skip_broken_datasets)

        futures = []
        pool = ThreadPoolExecutor(cpu_count() * 2)
        for index, datasets in np.ndenumerate(sources.values):
            for m in measurements:
                futures.append(pool.submit(work_load_data, index, datasets, m))

        wait(futures)
    else:
        for index, datasets in np.ndenumerate(sources.values):
            for m in measurements:
                t_slice = data[m.name].values[index]

                fuse_measurement(t_slice,
                                 datasets,
                                 geobox,
                                 m,
                                 mk_new=mk_new,
                                 skip_broken_datasets=skip_broken_datasets)

    return data