Beispiel #1
0
def processing_ndarray(data, median_disks, closing_disks,
                       histogram_clip=[0.0, 99.], func=np.sqrt):
    """

    :param data: 3d ndarray of the form (ny,nx,nt)
    :param median_disks: disks used to do median noise cleaning
    :param closing_disks: disks used to morphological closing
    :param histogram_clip: clip the data
    :param func: function used to scale the data nicely
    :return: an AWARE processed
    """
    nt = data.shape[2]

    rdpi = datacube_tools.running_difference(datacube_tools.persistence(data))

    mc_data = func(rdpi)
    mc_data[mc_data < 0] = 0.0
    clip_limit = np.nanpercentile(mc_data, histogram_clip)

    # Rescale the data using the input function, and subtract the lower
    # clip limit so it begins at zero.
    f_data = mc_data - clip_limit[0] / (clip_limit[1]-clip_limit[0])

    # Replace the nans with zeros - the reason for doing this rather than
    # something more sophisticated is that nans will not contribute
    # greatly to the final answer.  The nans are put back in at the end
    # and get carried through in the maps.
    nans_here = np.logical_not(np.isfinite(f_data))
    nans_replaced = deepcopy(f_data)
    nans_replaced[nans_here] = 0.0

    # Final datacube
    processed_datacube = np.zeros_like(data)

    for t in range(0, nt):
        this = f_data[:, :, t]
        for d in range(0, len(median_disks)):

            # Get rid of noise by applying the median filter.  Although the
            # input is a byte array make sure that the output is a floating
            # point array for use with the morphological closing operation.
            new_d = 1.0*median(this, median_disks[d])

            # Apply the morphological closing operation to rejoin separated
            # parts of the wave front.
            new_d = closing(new_d, closing_disks[d])

            # Sum all the final results
            processed_datacube[:, :, t] += new_d*1.0

    return processed_datacube, nans_here
Beispiel #2
0
# Linear amplitude
lin_amp = 4.0

# Noise level
noise_amp = 1.5

osc = 1.0 * np.sin(2 * np.pi * t / osc_per)

linear = lin_amp * t / np.max(t)

noise = noise_amp * np.random.normal(size=nt)

data = osc + linear + noise

npdata = np.zeros((1, 1, nt))
npdata[0, 0, :] = data[:]

persistence = datacube_tools.persistence(npdata)


# Make the figure
plt.plot(t, data, label='simulated data $f(t)$')
plt.plot(t, persistence[0,0,:], label='persistence transform $P(t)$')
plt.legend(loc=4, framealpha=0.5)
plt.xlabel('time (seconds)')
plt.ylabel('data (arbitrary units)')
plt.savefig(os.path.expanduser('~/projects/eitwave-paper/persistence_explanation.eps'))