Пример #1
0
def test_calc_data_fast_uint8():
    data = da.random.randint(
        0,
        100,
        size=(1_000, 10, 10),
        chunks=(1, 10, 10),
        dtype=np.uint8,
    )
    assert calc_data_range(data) == [0, 255]
Пример #2
0
def test_calc_data_range():
    # all zeros should return [0, 1] by default
    data = np.zeros((10, 10))
    clim = calc_data_range(data)
    assert np.all(clim == [0, 1])

    # all ones should return [0, 1] by default
    data = np.ones((10, 10))
    clim = calc_data_range(data)
    assert np.all(clim == [0, 1])

    # return min and max
    data = np.random.random((10, 15))
    data[0, 0] = 0
    data[0, 1] = 2
    clim = calc_data_range(data)
    assert np.all(clim == [0, 2])

    # return min and max
    data = np.random.random((6, 10, 15))
    data[0, 0, 0] = 0
    data[0, 0, 1] = 2
    clim = calc_data_range(data)
    assert np.all(clim == [0, 2])

    # Try large data
    data = np.zeros((1000, 2000))
    data[0, 0] = 0
    data[0, 1] = 2
    clim = calc_data_range(data)
    assert np.all(clim == [0, 2])

    # Try large data mutlidimensional
    data = np.zeros((3, 1000, 1000))
    data[0, 0, 0] = 0
    data[0, 0, 1] = 2
    clim = calc_data_range(data)
    assert np.all(clim == [0, 2])
Пример #3
0
def test_calc_data_range_fast_big_plane():
    val = calc_data_range(data_dask_plane)
    assert len(val) > 0
Пример #4
0
def test_calc_data_range_fast_big():
    val = calc_data_range(data_dask)
    assert len(val) > 0
Пример #5
0
def test_calc_data_range_fast(data):
    val = calc_data_range(data)
    assert len(val) > 0
Пример #6
0
def test_calc_data_range_fast(data):
    now = time.monotonic()
    val = calc_data_range(data)
    assert len(val) > 0
    elapsed = time.monotonic() - now
    assert elapsed < 5, "test took too long, computation was likely not lazy"
Пример #7
0
def s2p_reader(pipeline_params, image_path, snr_path, trace_path, cell_path,
               spikes_path):
    stat_all = np.load(pipeline_params, allow_pickle=True)
    is_cell = np.load(cell_path, allow_pickle=True)
    f_traces = np.load(trace_path, allow_pickle=True)
    spikes = np.load(spikes_path)

    # load the shifts
    ops = np.load("ops.npy", allow_pickle=True).item()
    y_offset = ops["yoff"]
    x_offset = ops["xoff"]
    offsets = np.vstack((y_offset, x_offset)).T

    def translate_slice(array, offsets, block_info=None):
        if block_info is not None:
            array_location = block_info[None]["array-location"]
            t_slice = array_location[0][0]
            registered_array = np.roll(
                array,
                (
                    -np.int16(offsets[t_slice][0]),
                    -np.int16(offsets[t_slice][1]),
                ),
                axis=(1, 2),
            )

        else:
            registered_array = array

        return registered_array

    f = h5py.File(image_path, "r")
    im = f["MSession_0/MUnit_0/Channel_0"]
    im_shape = im.shape
    da_im = da.from_array(im, chunks=(1, im_shape[-2], im_shape[-1]))
    data_range = calc_data_range(da_im)
    im_registered = da_im.map_blocks(translate_slice, offsets=offsets)

    # load SNR
    snr_df = pd.read_csv(snr_path)
    snr = snr_df["0"].values

    cell_mask_indicies = create_cell_mask_indices(stat_all, ops)
    initial_state = is_cell[:, 0].astype(np.bool)
    contour_manager = ContourManager(
        cell_mask_indicies,
        initial_state=initial_state,
        im_shape=(im_shape[-2], im_shape[-1]),
    )

    snr_mask = make_scalar_mask(cell_mask_indicies,
                                im_shape=(im_shape[-2], im_shape[-1]),
                                values=snr)

    return (
        im_registered,
        data_range,
        contour_manager,
        f_traces,
        snr,
        snr_mask,
        spikes,
        is_cell,
    )