def extra_steps(self):
     with dataframes.DataStore(self.data_filename) as data:
         calc = statistics.PropertyCalculator(data)
         calc.count()
         calc.duty_cycle()
         data.set_dtypes({'x': np.float32, 'y': np.float32, 'r': np.uint8})
         data.df = filter_near_edge(data.df, data.metadata['boundary'], 12)
예제 #2
0
    def __init__(self, file):
        self.data = dataframes.DataStore(file)

        self.calc = statistics.PropertyCalculator(self.data)
        self.duty = self.calc.duty()
        self.duty_unique = np.sort(np.unique(self.duty))
        self.setup_figure()
        plt.show()
예제 #3
0
def calculate_corr_data(file=None, rmin=1, rmax=20, dr=0.02):
    if file is None:
        file = filedialogs.load_filename()
    new_file = file[:-5] + '_corr.hdf5'
    if not os.path.exists(new_file):
        data = dataframes.DataStore(file)
        calc = statistics.PropertyCalculator(data)
        res = calc.correlations_all_duties(rmin, rmax, dr)
        res = res.reset_index()
        res.to_hdf(new_file, 'df')
    else:
        print('file already exists')
예제 #4
0
def calculate_angles(vectors):
    angles = np.angle(vectors[:, 0] + 1j * vectors[:, 1])
    return angles


def calculate_orders(angles, list_indices, filtered):
    # calculate summand for every angle
    step = np.exp(6j * angles)
    # set summand to zero if bond length > threshold
    step *= filtered
    list_indices -= 1
    # sum the angles and count neighbours for each particle
    stacked = np.cumsum((step, filtered), axis=1)[:, list_indices[1:]]
    stacked[:, 1:] = np.diff(stacked, axis=1)
    neighbors = stacked[1, :]
    indxs = neighbors != 0
    orders = np.zeros_like(neighbors)
    orders[indxs] = stacked[0, indxs] / neighbors[indxs]
    return orders, neighbors


if __name__ == "__main__":
    from Generic import filedialogs
    from ParticleTracking import dataframes, statistics
    file = filedialogs.load_filename()
    data = dataframes.DataStore(file, load=True)
    calc = statistics.PropertyCalculator(data)
    calc.order()
    print(data.df.head())
    # print(data.df.dtypes)
def run(direc, lattice_spacing=5):
    files = filedialogs.get_files_directory(direc + '/*.png')
    savename = direc + '/data.hdf5'

    N = len(files)

    # Load images
    ims = [images.load(f, 0) for f in tqdm(files, 'Loading images')]

    # Find Circles
    circles = [images.find_circles(im, 27, 200, 7, 16, 16)
               for im in tqdm(ims, 'Finding Circles')]

    # Save data
    data = dataframes.DataStore(savename, load=False)
    for f, info in tqdm(enumerate(circles), 'Adding Circles'):
        data.add_tracking_data(f, info, ['x', 'y', 'r'])

    # Calculate order parameter
    calc = statistics.PropertyCalculator(data)
    calc.order()

    # Get the course graining width
    cgw = get_cgw(data.df.loc[0]) / 2

    # Create the lattice points
    x = np.arange(0, max(data.df.x), lattice_spacing)
    y = np.arange(0, max(data.df.y), lattice_spacing)
    x, y = np.meshgrid(x, y)

    # Calculate the coarse order fields
    fields = [coarse_order_field(data.df.loc[f], cgw, x, y)
              for f in tqdm(range(N), 'Calculating Fields')]

    # Calculate the field threshold
    field_threshold = get_field_threshold(fields, lattice_spacing, ims[0])

    # Find the contours representing the boundary in each frame
    contours = [find_contours(f, field_threshold)
                for f in tqdm(fields, 'Calculating contours')]

    # Multiply the contours by the lattice spacing
    contours = [c * lattice_spacing for c in contours]

    # Find the angle of the image to rotate the boundary to the x-axis
    a, c, p1, p2 = get_angle(ims[0])

    # Rotate the selection points and the contours by the angle
    p1 = rotate_points(np.array(p1), c, a)
    p2 = rotate_points(np.array(p2), c, a)
    contours = [rotate_points(contour.squeeze(), c, a)
                for contour in contours]

    xmin = int(p1[0])
    xmax = int(p2[0])
    h = int(p1[1])

    # Get the heights of the fluctuations from the straight boundary
    hs = [get_h(contour, ims[0].shape, xmin, xmax, h)
          for contour in tqdm(contours, 'Calculating heights')]

    # Calculate the fourier transforms for all the frames
    L = xmax - xmin
    pixels_to_mms = 195/L
    print('One pixel is {:.2f} mm'.format(pixels_to_mms))

    #convert to mm
    hs = [h * pixels_to_mms for h in hs]
    L = L * pixels_to_mms

    k, yplot = get_fourier(hs, L)

    return k, yplot