def binder(positions, orientations, bl, m=4, method='ball', margin=0): """Calculate the binder cumulant, given positions and orientations. bl: the binder length scale, such that B(bl) = 1 - .333 * S4 / S2^2 where SN are <phibl^N> averaged over each block/cluster of size bl in frame. """ if margin: if margin < ss: margin *= ss center = 0.5*(positions.max(0) + positions.min(0)) dmask = d < d.max() - margin positions = positions[dmask] orientations = orientations[dmask] if 'neigh' in method or 'ball' in method: tree = KDTree(positions) balls = tree.query_ball_tree(tree, bl) balls, ball_mask = helpy.pad_uneven(balls, 0, True, int) ball_orient = orientations[balls] ball_orient[ball_mask] = np.nan phis = np.nanmean(np.exp(m*ball_orient*1j), 1) phi2 = np.dot(phis, phis) / len(phis) phiphi = phis*phis phi4 = np.dot(phiphi, phiphi) / len(phiphi) return 1 - phi4 / (3*phi2*phi2) else: # elif method=='block': raise ValueError("method {} not implemented".format(method))
def binder(positions, orientations, bl, m=4, method='ball', margin=0): """ Calculate the binder cumulant for a frame, given positions and orientations. bl: the binder length scale, such that B(bl) = 1 - .333 * S4 / S2^2 where SN are <phibl^N> averaged over each block/cluster of size bl in frame. """ if margin: if margin < ss: margin *= ss center = 0.5*(positions.max(0) + positions.min(0)) dmask = d < d.max() - margin positions = positions[dmask] orientations = orientations[dmask] if 'neigh' in method or 'ball' in method: tree = cKDTree(positions) balls = tree.query_ball_tree(tree, bl) balls, ball_mask = helpy.pad_uneven(balls, 0, True, int) ball_orient = orientations[balls] ball_orient[~ball_mask] = np.nan phis = np.nanmean(np.exp(m*ball_orient*1j), 1) phi2 = np.dot(phis, phis) / len(phis) phiphi = phis*phis phi4 = np.dot(phiphi, phiphi) / len(phiphi) return 1 - phi4 / (3*phi2*phi2) else: raise ValueError, "method {} not implemented".format(method) #elif method=='block': left, right, bottom, top = (positions[:,0].min(), positions[:,0].max(), positions[:,1].min(), positions[:,1].max()) xbins, ybins = np.arange(left, right + bl, bl), np.arange(bottom, top + bl, bl) blocks = np.rollaxis(np.indices((xbins.size, ybins.size)), 0, 3) block_ind = np.column_stack([ np.digitize(positions[:,0], xbins), np.digitize(positions[:,1], ybins)])
def binder(positions, orientations, bl, m=4, method='ball', margin=0): """Calculate the binder cumulant, given positions and orientations. bl: the binder length scale, such that B(bl) = 1 - .333 * S4 / S2^2 where SN are <phibl^N> averaged over each block/cluster of size bl in frame. """ if margin: if margin < ss: margin *= ss center = 0.5 * (positions.max(0) + positions.min(0)) dmask = d < d.max() - margin positions = positions[dmask] orientations = orientations[dmask] if 'neigh' in method or 'ball' in method: tree = KDTree(positions) balls = tree.query_ball_tree(tree, bl) balls, ball_mask = helpy.pad_uneven(balls, 0, True, int) ball_orient = orientations[balls] ball_orient[ball_mask] = np.nan phis = np.nanmean(np.exp(m * ball_orient * 1j), 1) phi2 = np.dot(phis, phis) / len(phis) phiphi = phis * phis phi4 = np.dot(phiphi, phiphi) / len(phiphi) return 1 - phi4 / (3 * phi2 * phi2) else: # elif method=='block': raise ValueError("method {} not implemented".format(method))
def pair_angles(positions, neighborhood=None, ang_type='absolute', margin=0, dub=2*ss): """ do something with the angles a given particle makes with its neighbors `ang_type` can be 'relative', 'delta', or 'absolute' `neighborhood` may be: an integer (probably 4, 6, or 8), giving that many nearest neighbors, or None (which gives voronoi) `margin` is the width of excluded boundary margin `dub` is the distance upper bound (won't use pairs farther apart) """ if neighborhood is None or str(neighborhood).lower() in ['voronoi', 'delauney']: #method = 'voronoi' tess = Delaunay(positions) neighbors = get_neighbors(tess, xrange(tess.npoints)) neighbors, nmask = helpy.pad_uneven(neighbors, 0, True, int) elif isinstance(neighborhood, int): #method = 'nearest' tree = cKDTree(positions) # tree.query(P, N) returns query particle and N-1 neighbors distances, neighbors = tree.query(positions, 1 + neighborhood, distance_upper_bound=dub) assert np.allclose(distances[:,0], 0), "distance to self not zero" distances = distances[:,1:] assert np.allclose(neighbors[:,0], np.arange(tree.n)), "first neighbor not self" neighbors = neighbors[:,1:] nmask = np.isfinite(distances) neighbors[~nmask] = np.where(~nmask)[0] dx, dy = (positions[neighbors] - positions[:, None, :]).T angles = np.arctan2(dy, dx).T % tau assert angles.shape == neighbors.shape if ang_type == 'relative': # subtract off angle to nearest neighbor angles -= angles[:, 0, None] # None to keep dims elif ang_type == 'delta': # sort by angle then take diff angles[~nmask] = np.inf angles.sort(-1) angles -= np.roll(angles, 1, -1) nmask = np.all(nmask, 1) elif ang_type != 'absolute': raise ValueError, "unknown ang_type {}".format(ang_type) angles[~nmask] = np.nan if margin: if margin < ss: margin *= ss center = 0.5*(positions.max(0) + positions.min(0)) d = helpy.dist(positions, center) # distances to center dmask = d < d.max() - margin assert np.allclose(len(dmask), map(len, [angles, nmask])) angles = angles[dmask] nmask = nmask[dmask] return (angles % tau, nmask) + ((dmask,) if margin else ())
sys.stdout.flush() cos = np.cos(o) sin = np.sin(o) coscorr = corr.autocorr(cos, cumulant=False, norm=False) sincorr = corr.autocorr(sin, cumulant=False, norm=False) coscorrs.append(coscorr) sincorrs.append(sincorr) else: coscorrs = [ corr.autocorr(np.cos(trackset['o']), cumulant=False, norm=False) for trackset in tracksets.values() ] sincorrs = [ corr.autocorr(np.sin(trackset['o']), cumulant=False, norm=False) for trackset in tracksets.values() ] # Gather all the track correlations and average them allcorr = coscorrs + sincorrs allcorr = helpy.pad_uneven(allcorr, np.nan) tcorr = np.arange(allcorr.shape[1])/fps meancorr = np.nanmean(allcorr, 0) added = np.sum(np.isfinite(allcorr), 0) errcorr = np.nanstd(allcorr, 0)/np.sqrt(added - 1) sigma = errcorr + 1e-5*np.nanstd(errcorr) # add something small to prevent 0 if args.verbose: print "Merged nn corrs" # Fit to exponential decay tmax = int(50*args.zoom) fmax = np.searchsorted(tcorr, tmax) fitform = lambda s, DR: 0.5*np.exp(-DR*s) fitstr = r"$\frac{1}{2}e^{-D_R t}$" p0 = [1] try:
def neighborhoods(positions, voronoi=False, size=None, reach=None, tess=None, tree=None): """Build a list of lists or padded array of neighborhoods around each point select neighbors by any combination of three basic choices: Voronoi/Delaunay, distance/ball, count/nearest/number parameters positions : array with shape (N, 2) or fields 'x' and 'y' voronoi : whether to require pairs to be voronoi or delaunay neighbors size : maximum size for each neighborhood excluding center/self reach : maximum distance to search (exclusive). scalar for distance/ball for other criteria, it may be an array of distances or a str such as '[min|max|mean]*{factor}' where the function is of neighbor distances tess, tree : optionally provide spatial.Delaunay or spatial.KDTree instance returns neighbors : list of lists (or padded array) with shape (npoints, size) neighbors[i] gives indices in positions to neighbors of positions[i] i.e., the coordinates for all neighbors of positions[i] are given by positions[neighbors[i]], with shape (size, 2) mask : True if not a real neighbor distances : distance to the neighbor, only calculated if needed. """ try: fewest, most = size except TypeError: fewest, most = None, size need_dist = True filter_reach = reach is not None try: dub = float(reach) filter_reach = False except (TypeError, ValueError): dub = np.inf if voronoi: tess = tess or Delaunay(positions) neighbors = get_neighbors(tess, 'all') elif most is not None: tree = tree or KDTree(positions) distances, neighbors = tree.query( positions, np.max(most)+1, distance_upper_bound=dub) distances, neighbors = distances[:, 1:], neighbors[:, 1:] # remove self mask = np.isinf(distances) neighbors[mask] = np.where(mask)[0] need_dist = False elif reach is None: raise ValueError("No limits on neighborhood selection applied") else: tree = tree or KDTree(positions) neighbors = tree.query_ball_tree(tree, dub) for i in xrange(len(neighbors)): neighbors[i].remove(i) # remove self if need_dist: ix = np.arange(len(positions))[:, None] neighbors, mask = helpy.pad_uneven(neighbors, ix, True, int) distances = distance.cdist(positions, positions)[ix, neighbors] distances[mask] = np.inf sort = distances.argsort(1) distances, neighbors = distances[ix, sort], neighbors[ix, sort] if isinstance(reach, basestring): fun, fact = reach.split('*') if '*' in reach else (reach, 1) ix = np.arange(len(positions)) fun = {'mean': np.nanmean, 'min': np.nanmin, 'max': np.nanmax, 'median': np.nanmedian}[fun] fact = float(fact) reach = fun(np.where(mask, np.nan, distances), 1, keepdims=True)*fact if filter_reach: mask[distances >= reach] = True distances[mask] = np.inf if fewest is not None: mask[(~mask).sum(1) < fewest] = True if np.iterable(most): extra = np.clip(mask.shape[1] - most, 0, None) i = np.where(extra) extra = extra[i] i = np.repeat(i[0], extra) j = mask.shape[1] - np.concatenate(map(range, extra)) - 1 mask[i, j] = True most = most.max() return neighbors[:, :most], mask[:, :most], distances[:, :most]
def neighborhoods(positions, voronoi=False, size=None, reach=None, tess=None, tree=None): """Build a list of lists or padded array of neighborhoods around each point select neighbors by any combination of three basic choices: Voronoi/Delaunay, distance/ball, count/nearest/number parameters positions : array with shape (N, 2) or fields 'x' and 'y' voronoi : whether to require pairs to be voronoi or delaunay neighbors size : maximum size for each neighborhood excluding center/self reach : maximum distance to search (exclusive). scalar for distance/ball for other criteria, it may be an array of distances or a str such as '[min|max|mean]*{factor}' where the function is of neighbor distances tess, tree : optionally provide spatial.Delaunay or spatial.KDTree instance returns neighbors : list of lists (or padded array) with shape (npoints, size) neighbors[i] gives indices in positions to neighbors of positions[i] i.e., the coordinates for all neighbors of positions[i] are given by positions[neighbors[i]], with shape (size, 2) mask : True if not a real neighbor distances : distance to the neighbor, only calculated if needed. """ try: fewest, most = size except TypeError: fewest, most = None, size need_dist = True filter_reach = reach is not None try: dub = float(reach) filter_reach = False except (TypeError, ValueError): dub = np.inf if voronoi: tess = tess or Delaunay(positions) neighbors = get_neighbors(tess, 'all') elif most is not None: tree = tree or KDTree(positions) distances, neighbors = tree.query(positions, np.max(most) + 1, distance_upper_bound=dub) distances, neighbors = distances[:, 1:], neighbors[:, 1:] # remove self mask = np.isinf(distances) neighbors[mask] = np.where(mask)[0] need_dist = False elif reach is None: raise ValueError("No limits on neighborhood selection applied") else: tree = tree or KDTree(positions) neighbors = tree.query_ball_tree(tree, dub) for i in xrange(len(neighbors)): neighbors[i].remove(i) # remove self if need_dist: ix = np.arange(len(positions))[:, None] neighbors, mask = helpy.pad_uneven(neighbors, ix, True, int) distances = distance.cdist(positions, positions)[ix, neighbors] distances[mask] = np.inf sort = distances.argsort(1) distances, neighbors = distances[ix, sort], neighbors[ix, sort] if isinstance(reach, basestring): fun, fact = reach.split('*') if '*' in reach else (reach, 1) ix = np.arange(len(positions)) fun = { 'mean': np.nanmean, 'min': np.nanmin, 'max': np.nanmax, 'median': np.nanmedian }[fun] fact = float(fact) reach = fun(np.where(mask, np.nan, distances), 1, keepdims=True) * fact if filter_reach: mask[distances >= reach] = True distances[mask] = np.inf if fewest is not None: mask[(~mask).sum(1) < fewest] = True if np.iterable(most): extra = np.clip(mask.shape[1] - most, 0, None) i = np.where(extra) extra = extra[i] i = np.repeat(i[0], extra) j = mask.shape[1] - np.concatenate(map(range, extra)) - 1 mask[i, j] = True most = most.max() return neighbors[:, :most], mask[:, :most], distances[:, :most]
if __name__=='__main__' and args.nn: # Calculate the <nn> correlation for all the tracks in a given dataset # TODO: fix this to combine multiple datasets (more than one prefix) data, trackids, odata, omask = helpy.load_data(prefix, True, False) tracksets, otracksets = helpy.load_tracksets(data, trackids, odata, omask, min_length=args.stub) coscorrs = [ corr.autocorr(np.cos(otrackset), cumulant=False, norm=False) for otrackset in otracksets.values() ] sincorrs = [ corr.autocorr(np.sin(otrackset), cumulant=False, norm=False) for otrackset in otracksets.values() ] # Gather all the track correlations and average them allcorr = coscorrs + sincorrs allcorr = helpy.pad_uneven(allcorr, np.nan) tcorr = np.arange(allcorr.shape[1])/fps meancorr = np.nanmean(allcorr, 0) errcorr = np.nanstd(allcorr, 0)/sqrt(len(allcorr)) if verbose: print "Merged nn corrs" # Fit to exponential decay tmax = 50 fmax = np.searchsorted(tcorr, tmax) fitform = lambda *args, **kwargs: 0.5*corr.exp_decay(*args, **kwargs) popt, pcov = curve_fit(fitform, tcorr[:fmax], meancorr[:fmax], p0=[1], sigma=errcorr[:fmax].mean() + errcorr[:fmax]) D_R = 1/popt[0] print 'D_R: {:.4f}'.format(D_R)