def plotPathList(paths):
        global ax
        paths = list(paths)
        print(paths)
        fit_data = []

        data_list = []
        for index, file in enumerate(paths):#[repetition:repetition+1]):
            output_file = Path(str(file).replace("_result.txt", "_evaluated.csv"))

            # load the data and the config
            data = getData(file)
            config = getConfig(file)

            """ evaluating data"""
            if not output_file.exists():
                #refetchTimestamps(data, config)

                getVelocity(data, config)

                # take the mean of all values of each cell
                data = data.groupby(['cell_id']).mean()

                correctCenter(data, config)

                data = filterCells(data, config)

                # reset the indices
                data.reset_index(drop=True, inplace=True)

                getStressStrain(data, config)

                #data = data[(data.stress < 50)]
                data.reset_index(drop=True, inplace=True)

                data["area"] = data.long_axis * data.short_axis * np.pi
                data.to_csv(output_file, index=False)

            data = pd.read_csv(output_file)

            #data = data[(data.area > 0) * (data.area < 2000) * (data.stress < 250)]
            #data.reset_index(drop=True, inplace=True)

            data_list.append(data)


        data = pd.concat(data_list)
        data.reset_index(drop=True, inplace=True)

        fitStiffness(data, config)

        #plotDensityScatter(data.stress, data.strain)
        #plotStressStrainFit(data, config)
        #plotBinnedData(data.stress, data.strain, [0, 10, 20, 30, 40, 50, 75, 100, 125, 150, 200, 250])
        #plt.title(f'{config["fit"]["p"][0] * config["fit"]["p"][1]:.2f}')
        fit_data.append([config["fit"]["p"][0], config["fit"]["p"][1], config["fit"]["p"][0] * config["fit"]["p"][1]])

        return fit_data
Esempio n. 2
0
flatfield = output_path + r'/' + filename_base + '.npy'
configfile = output_path + r'/' + filename_base + '_config.txt'

#%% Setup model
# shallow model (faster)
#unet = UNet().create_model((720, 540, 1), 1, d=8)
unet = UNet().create_model((540, 720, 1), 1, d=8)

# change path for weights
unet.load_weights(
    str(
        Path(__file__).parent /
        "weights/Unet_0-0-5_fl_RAdam_20200610-141144.h5"))

#%%
config = getConfig(configfile)

batch_size = 100
print(video)
vidcap = imageio.get_reader(video)
vidcap2 = getRawVideo(video)
progressbar = tqdm.tqdm(vidcap)

cells = []

im = vidcap.get_data(0)
batch_images = np.zeros([batch_size, im.shape[0], im.shape[1]],
                        dtype=np.float32)
batch_image_indices = []
ips = 0
for image_index, im in enumerate(progressbar):
Esempio n. 3
0
# The results such as maximum flow speed, cell mechanical parameters, etc. are stored in 
# the file 'all_data.txt' located at the same directory as this script 
"""
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from scripts.helper_functions import getInputFile, getConfig, getData
from scripts.helper_functions import refetchTimestamps, getVelocity, filterCells, correctCenter, getStressStrain, fitStiffness
from scripts.helper_functions import initPlotSettings, plotVelocityProfile, plotStressStrain, plotMessurementStatus
from scripts.helper_functions import storeEvaluationResults
""" loading data """
# get the results file (by config parameter or user input dialog)
datafile = getInputFile(filetype=[("txt file", '*_result.txt')])

# load the data and the config
data = getData(datafile)
config = getConfig(datafile)
""" evaluating data"""

#refetchTimestamps(data, config)

getVelocity(data, config)

# take the mean of all values of each cell
data = data.groupby(['cell_id']).mean()

correctCenter(data, config)

data = filterCells(data, config)

# reset the indices
data.reset_index(drop=True, inplace=True)
rows = 1
cols = 3
#row_index = 0
data_index = -1
dataset = datasets[0]
datafiles = dataset["datafiles"]
for data_index, datafile in enumerate(datafiles):
    data_index += 1
    paths = []
    pressures = []
    ax = None
    datafiles = dataset["datafiles"]
    #
    for index, file in enumerate(Path(datafile).glob("**/*_result.txt")):
        config = getConfig(file)
        paths.append(file)
        pressures.append(config['pressure_pa'] / 100_000)

    paths = np.array(paths)
    pressures = np.array(pressures)

    unique_pressures = np.unique(pressures)
    unique_pressures = [1, 2, 3]  #unique_pressures[unique_pressures > 0.5]
    print(unique_pressures)

    fit_data = []
    index = 1

    plt.subplot(1, 2, 1)
    f = plotPathList(paths[pressures == 1])
Esempio n. 5
0
def plotPathList(paths):
    global ax
    paths = list(paths)
    print(paths)
    fit_data = []

    data_list = []
    for index, file in enumerate(paths):
        output_file = Path(str(file).replace("_result.txt", "_evaluated.csv"))

        # load the data and the config
        data = getData(file)
        config = getConfig(file)
        """ evaluating data"""
        if not output_file.exists():
            #refetchTimestamps(data, config)

            getVelocity(data, config)

            # take the mean of all values of each cell
            data = data.groupby(['cell_id']).mean()

            correctCenter(data, config)

            data = filterCells(data, config)

            # reset the indices
            data.reset_index(drop=True, inplace=True)

            getStressStrain(data, config)

            #data = data[(data.stress < 50)]
            data.reset_index(drop=True, inplace=True)

            data["area"] = data.long_axis * data.short_axis * np.pi
            data.to_csv(output_file, index=False)

        data = pd.read_csv(output_file)

        if 0:
            plt.plot(data.rp, data.angle, "o")
            plt.axhline(0)
            plt.axvline(0)
            plt.axhline(45)
            plt.axhline(-45)
            print(data.angle)
            plt.show()

        #data = data[(data.area > 0) * (data.area < 2000) * (data.stress < 250)]
        #data.reset_index(drop=True, inplace=True)

        data_list.append(data)

    data = pd.concat(data_list)
    data.reset_index(drop=True, inplace=True)

    getStressStrain(data, config)

    if 0:
        if 1:
            data.strain[(data.angle > 0) & (data.rp > 0)] *= -1
            data.strain[(data.angle < 0) & (data.rp < 0)] *= -1
        else:
            data.strain[(data.angle > 45)] *= -1
            data.strain[(data.angle < -45)] *= -1

    fits = []
    errors = []

    for i in np.arange(30, 250, 10):
        data2 = data[data.stress < i].reset_index(drop=True)
        print(i, len(data2))

        fitStiffness(data2, config)
        fits.append(config["fit"]["p"])
        errors.append(config["fit"]["err"])
        print("err", config["fit"]["err"], errors)

    plotDensityScatter(data.stress, data.strain)
    plotStressStrainFit(data, config)
    plotBinnedData(data.stress, data.strain,
                   [0, 10, 20, 30, 40, 50, 75, 100, 125, 150, 200, 250])
    #plt.title(f'{config["fit"]["p"][0] * config["fit"]["p"][1]:.2f}')
    fit_data.append(config["fit"]["p"][0] * config["fit"]["p"][1])

    return fits, errors  #fit_data
Esempio n. 6
0
    angles = 2 * np.pi * np.arange(num) / num
    if a != b:
        tot_size = sp.special.ellipeinc(2.0 * np.pi, e)
        arc_size = tot_size / num
        arcs = np.arange(num) * arc_size
        res = sp.optimize.root(
            lambda x: (sp.special.ellipeinc(x, e) - arcs), angles)
        angles = res.x
    return angles

r_min = 5   #cells smaller than r_min (in um) will not be analyzed

video = getInputFile()

#%%
config = getConfig(video)

data = getData(video)
getVelocity(data, config)

# take the mean of all values of each cell
#data = data.groupby(['cell_id']).mean()

correctCenter(data, config)
#exit()


data = data[(data.solidity > 0.96) & (data.irregularity < 1.06)]
#data = data[(data.solidity > 0.98) & (data.irregularity < 1.02)]
data.reset_index(drop=True, inplace=True)
Esempio n. 7
0
def plotPathList(paths, cmap=None, alpha=None):
    global ax, global_im
    paths = list(paths)
    print(paths)
    fit_data = []

    data_list = []
    for index, file in enumerate(paths):
        output_file = Path(str(file).replace("_result.txt", "_evaluated.csv"))

        # load the data and the config
        data = getData(file)
        config = getConfig(file)
        """ evaluating data"""
        if not output_file.exists():
            #refetchTimestamps(data, config)

            getVelocity(data, config)

            # take the mean of all values of each cell
            data = data.groupby(['cell_id']).mean()

            correctCenter(data, config)

            data = filterCells(data, config)

            # reset the indices
            data.reset_index(drop=True, inplace=True)

            getStressStrain(data, config)

            #data = data[(data.stress < 50)]
            data.reset_index(drop=True, inplace=True)

            data["area"] = data.long_axis * data.short_axis * np.pi
            data.to_csv(output_file, index=False)

        data = pd.read_csv(output_file)

        #data = data[(data.area > 0) * (data.area < 2000) * (data.stress < 250)]
        #data.reset_index(drop=True, inplace=True)

        data_list.append(data)

    data = pd.concat(data_list)
    data.reset_index(drop=True, inplace=True)

    fitStiffness(data, config)

    #plotDensityScatter(data.stress, data.strain, cmap=cmap, alpha=0.5)
    def densityPlot(x, y, cmap, alpha=0.5):
        global global_im, global_index
        from scipy.stats import kde

        ax = plt.gca()

        # Thus we can cut the plotting window in several hexbins
        nbins = np.max(x) / 10
        ybins = 20

        # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents
        k = kde.gaussian_kde(np.vstack([x, y]))
        if 0:
            xi, yi = np.mgrid[x.min():x.max():nbins * 1j,
                              y.min():y.max():ybins * 1j]
            zi = k(np.vstack([xi.flatten(), yi.flatten()]))

            # plot a density
            ax.set_title('Calculate Gaussian KDE')
            ax.pcolormesh(xi,
                          yi,
                          zi.reshape(xi.shape),
                          shading='gouraud',
                          alpha=alpha,
                          cmap=cmap)
        else:
            xi, yi = np.meshgrid(
                np.linspace(-10, 250, 200), np.linspace(0, 1, 80)
            )  #np.mgrid[x.min():x.max():nbins * 1j, y.min():y.max():ybins * 1j]
            zi = k(np.vstack([xi.flatten(), yi.flatten()]))
            im = zi.reshape(xi.shape)
            if 0:
                if global_im is None:
                    global_im = np.zeros((im.shape[0], im.shape[1], 3),
                                         dtype="uint8")
                if 1:  #global_index == 1:
                    print("_____", im.min(), im.max())
                    im -= np.percentile(im, 10)
                    global_im[:, :, global_index] = im / im.max() * 255
                    print("_____", global_im[:, :, global_index].min(),
                          global_im[:, :, global_index].max())
                print("COLOR", global_index)
                global_index += 1
                if global_index == 3:
                    print(global_im.shape, global_im.dtype)
                    plt.imshow(global_im[::-1],
                               extent=[
                                   np.min(xi),
                                   np.max(xi),
                                   np.min(yi),
                                   np.max(yi)
                               ],
                               aspect="auto")
            else:
                if global_im is None:
                    global_im = []
                im -= im.min()
                im /= im.max()
                global_im.append(plt.get_cmap(cmap)(im**0.5))
                global_im[-1][:, :, 3] = im
                plt.imshow(
                    global_im[-1][::-1],
                    vmin=0,
                    vmax=1,
                    extent=[np.min(xi),
                            np.max(xi),
                            np.min(yi),
                            np.max(yi)],
                    aspect="auto")
                global_index += 1
                if global_index == 3:
                    print("COLOR", global_im[0].shape, global_im[0].min(),
                          global_im[0].max())
                    im = global_im[0] + global_im[1] + global_im[2] - 2
                    #im[im<0] = 0
                    #im[im>255] = 255
                    print("COLOR", im.shape, im.min(), im.max())
                    #plt.imshow(im[::-1], vmin=0, vmax=1, extent=[np.min(xi), np.max(xi), np.min(yi), np.max(yi)], aspect="auto")

    densityPlot(data.stress, data.strain, cmap=cmap, alpha=alpha)

    plotStressStrainFit(data, config)
    #plotStressStrainFit(data, config)
    #plotBinnedData(data.stress, data.strain, [0, 10, 20, 30, 40, 50, 75, 100, 125, 150, 200, 250])
    #plt.title(f'{config["fit"]["p"][0] * config["fit"]["p"][1]:.2f}')
    fit_data.append(config["fit"]["p"][0] * config["fit"]["p"][1])

    return fit_data