Ejemplo n.º 1
0
def orient_op(orientations, m=4, positions=None, margin=0,
              ret_complex=True, do_err=False, globl=False, locl=False):
    """orient_op(orientations, m=4, positions=None, margin=0,
                 ret_complex=True, do_err=False, globl=False, locl=False)

       calculate the global m-fold particle orientational order parameter

                1   N    i m theta
        Phi  = --- SUM e          j
           m    N  j=1
    """
    if not (globl or locl):
        globl = True
        locl = orientations.ndim == 2
    np.mod(orientations, tau/m, orientations)
    if margin:
        if margin < ss:
            margin *= ss
        center = 0.5*(positions.max(0) + positions.min(0))
        d = helpy.dist(positions, center)   # distances to center
        orientations = orientations[d < d.max() - margin]
    phis = np.exp(m*orientations*1j)
    if locl:
        phis = np.nanmean(phis, 1)
    if do_err:
        err = np.nanstd(phis, ddof=1)/sqrt(np.count_nonzero(~np.isnan(phis)))
    if not globl:
        return (np.abs(phis), err) if do_err else np.abs(phis)
    phi = np.nanmean(phis) if ret_complex else np.abs(np.nanmean(phis))
    if locl:
        return (np.abs(phis), phi, err) if do_err else (np.abs(phis), phi)
    return (phi, err) if do_err else phi
def bulk(positions, margin=0, full_N=None, center=None, radius=None, ss=ss):
    """ Filter marginal particles from bulk particles to reduce boundary effects
            positions:  (N, 2) array of particle positions
            margin:     width of margin, in units of pixels or particle sides
            full_N:     actual number of particles, to renormalize assuming
                        uniform distribution of undetected particles.
            center:     if known, (2,) array of center position
            radius:     if known, radius of system in pixels

        returns
            bulk_N:     the number particles in the bulk
            bulk_mask:  a mask the shape of `positions`
    """
    #raise StandardError, "not yet tested"
    if center is None:
        center = 0.5*(positions.max(0) + positions.min(0))
    if margin < ss: margin *= ss
    d = helpy.dist(positions, center) # distances to center
    if radius is None:
        if len(positions) > 1e5: raise ValueError, "too many points to calculate radius"
        r = cdist(positions, positions)       # distances between all pairs
        radius = np.maximum(r.max()/2, d.max()) + ss/2
    elif radius < ss:
        radius *= ss
    dmax = radius - margin
    #print 'radius: ', radius/ss
    #print 'margin: ', margin/ss
    #print 'max r: ', dmax/ss
    bulk_mask = d <= dmax # mask of particles in the bulk
    bulk_N = np.count_nonzero(bulk_mask)
    if full_N:
        bulk_N *= full_N/len(positions)
    return bulk_N, bulk_mask
Ejemplo n.º 3
0
def find_start_frame(data, estimate=None, bounds=None, plot=False):
    """Determine the time of the onset of motion

    parameters
    ---------
    data :      the tracked data
    estimate :  an estimate, as frame index number, for start
    bounds :    a scalar indicating lower bound for start,
                or a two-tuple of (lower, upper) bounds for the start
    plot :      whether or not to plot the motion vs time

    returns
    -------
    start :     frame index for onset of motion
    ax :        an axes object, if plot was requested
    """
    estimate = estimate or 10
    if bounds is None:
        first = estimate // 2
        last = estimate * 100
    elif np.isscalar(bounds):
        first = bounds
        last = None
    else:
        first, last = bounds
    if last is None:
        last = first + (data['f'][-1] - first) // 3

    positions = helpy.load_trackstack(data, length=last)['xy']
    displacements = helpy.dist(positions, positions[0])
    distances = helpy.dist(np.gradient(positions, axis=0))
    ds = displacements.mean(1) * distances.mean(1)
    dm = np.minimum.accumulate(ds[::-1])[::-1] == np.maximum.accumulate(ds)
    dmi = np.nonzero(dm[first:last])[0] + first
    start = dmi[0] - 1

    if plot:
        fig, ax = plt.subplots()
        f = np.arange(len(ds))/args.fps
        ax.plot(f, ds, '-')
        ax.plot(f[dmi], ds[dmi], '*')

    return start
Ejemplo n.º 4
0
def find_start_frame(data, estimate=None, bounds=None, plot=False):
    """Determine the time of the onset of motion

    parameters
    ---------
    data :      the tracked data
    estimate :  an estimate, as frame index number, for start
    bounds :    a scalar indicating lower bound for start,
                or a two-tuple of (lower, upper) bounds for the start
    plot :      whether or not to plot the motion vs time

    returns
    -------
    start :     frame index for onset of motion
    ax :        an axes object, if plot was requested
    """
    estimate = estimate or 10
    if bounds is None:
        first = estimate // 2
        last = estimate * 100
    elif np.isscalar(bounds):
        first = bounds
        last = None
    else:
        first, last = bounds
    if last is None:
        last = first + (data['f'][-1] - first) // 3

    positions = helpy.load_trackstack(data, length=last)['xy']
    displacements = helpy.dist(positions, positions[0])
    distances = helpy.dist(np.gradient(positions, axis=0))
    ds = displacements.mean(1) * distances.mean(1)
    dm = np.minimum.accumulate(ds[::-1])[::-1] == np.maximum.accumulate(ds)
    dmi = np.nonzero(dm[first:last])[0] + first
    start = dmi[0] - 1

    if plot:
        fig, ax = plt.subplots()
        f = np.arange(len(ds)) / args.fps
        ax.plot(f, ds, '-')
        ax.plot(f[dmi], ds[dmi], '*')

    return start
def pair_angles(positions, neighborhood=None, ang_type='absolute', margin=0, dub=2*ss):
    """ do something with the angles a given particle makes with its neighbors

        `ang_type` can be 'relative', 'delta', or 'absolute'
        `neighborhood` may be:
            an integer (probably 4, 6, or 8), giving that many nearest neighbors,
            or None (which gives voronoi)
        `margin` is the width of excluded boundary margin
        `dub` is the distance upper bound (won't use pairs farther apart)
    """
    if neighborhood is None or str(neighborhood).lower() in ['voronoi', 'delauney']:
        #method = 'voronoi'
        tess = Delaunay(positions)
        neighbors = get_neighbors(tess, xrange(tess.npoints))
        neighbors, nmask = helpy.pad_uneven(neighbors, 0, True, int)
    elif isinstance(neighborhood, int):
        #method = 'nearest'
        tree = cKDTree(positions)
        # tree.query(P, N) returns query particle and N-1 neighbors
        distances, neighbors = tree.query(positions, 1 + neighborhood,
                                          distance_upper_bound=dub)
        assert np.allclose(distances[:,0], 0), "distance to self not zero"
        distances = distances[:,1:]
        assert np.allclose(neighbors[:,0], np.arange(tree.n)), "first neighbor not self"
        neighbors = neighbors[:,1:]
        nmask = np.isfinite(distances)
        neighbors[~nmask] = np.where(~nmask)[0]
    dx, dy = (positions[neighbors] - positions[:, None, :]).T
    angles = np.arctan2(dy, dx).T % tau
    assert angles.shape == neighbors.shape
    if ang_type == 'relative':
        # subtract off angle to nearest neighbor
        angles -= angles[:, 0, None] # None to keep dims
    elif ang_type == 'delta':
        # sort by angle then take diff
        angles[~nmask] = np.inf
        angles.sort(-1)
        angles -= np.roll(angles, 1, -1)
        nmask = np.all(nmask, 1)
    elif ang_type != 'absolute':
        raise ValueError, "unknown ang_type {}".format(ang_type)
    angles[~nmask] = np.nan
    if margin:
        if margin < ss: margin *= ss
        center = 0.5*(positions.max(0) + positions.min(0))
        d = helpy.dist(positions, center) # distances to center
        dmask = d < d.max() - margin
        assert np.allclose(len(dmask), map(len, [angles, nmask]))
        angles = angles[dmask]
        nmask = nmask[dmask]
    return (angles % tau, nmask) + ((dmask,) if margin else ())
def orient_corr(positions, orientations, m=4, margin=0, bins=10):
    """ orient_corr():
        the orientational correlation function g_m(r)
        given by mean(phi(0)*phi(r))
    """
    center = 0.5*(positions.max(0) + positions.min(0))
    d = helpy.dist(positions, center) # distances to center
    if margin < ss: margin *= ss
    loc_mask = d < d.max() - margin
    r = pdist(positions[loc_mask])
    ind = np.column_stack(pair_indices(np.count_nonzero(loc_mask)))
    pairs = orientations[loc_mask][ind]
    diffs = np.cos(m*dtheta(pairs, m=m))
    return bin_average(r, diffs, bins)
Ejemplo n.º 7
0
def radial_distribution(positions,
                        dr=ss / 5,
                        nbins=None,
                        dmax=None,
                        rmax=None,
                        margin=0,
                        do_err=False,
                        ss=ss):
    """ radial_distribution(positions):
        the pair correlation function g(r)
        calculated using a histogram of distances between particle pairs
        excludes pairs in margin of given width
    """
    center = 0.5 * (positions.max(0) + positions.min(0))
    d = helpy.dist(positions, center)  # distances to center
    # faster than squareform(distance.pdist(positions)) wtf
    r = distance.cdist(positions, positions)
    radius = np.maximum(r.max() / 2, d.max()) + ss / 2
    if rmax is None:
        rmax = 2 * radius  # this will have terrible statistics at large r
    if nbins is None:
        nbins = rmax // dr
    if dmax is None:
        if margin < ss:
            margin *= ss
        dmax = radius - margin
    ind = pair_indices(len(positions))
    # for weighting, use areas of the annulus, which is:
    #   number * arclength * dr = N alpha r dr
    #   where alpha = 2 arccos( (r2 + d2 - R2) / 2 r d )
    cosalpha = 0.5 * (r * r + d * d - radius * radius) / (r * d)
    alpha = 2 * np.arccos(np.clip(cosalpha, -1, None))
    dmask = d <= dmax
    w = np.where(dmask, np.reciprocal(alpha * r * dr), 0)
    w = 0.5 * (w + w.T)
    assert np.all(np.isfinite(w[ind]))
    # Different ways to count `n`:
    # number of 'bulk' (inner) particles
    n = np.count_nonzero(dmask)
    # effective N from number of pairs:
    # n = 0.5*(1 + sqrt(1 + 8*np.count_nonzero(w[ind])))
    # total number of particles:
    # n = len(w)
    w *= 2 / n
    assert np.allclose(positions.shape[0], map(len, [r, d, w, positions]))
    ret = np.histogram(r[ind], bins=nbins, range=(0, rmax), weights=w[ind])
    if do_err:
        return ret, np.histogram(r[ind], bins=nbins, range=(0, rmax)), n
    else:
        return ret + (n, )
Ejemplo n.º 8
0
def bulk(positions,
         margin=0,
         full_N=None,
         center=None,
         radius=None,
         ss=ss,
         verbose=False):
    """ Filter marginal particles from bulk particles to reduce boundary effects
            positions:  (N, 2) array of particle positions
            margin:     width of margin, in units of pixels or particle sides
            full_N:     actual number of particles, to renormalize assuming
                        uniform distribution of undetected particles.
            center:     if known, (2,) array of center position
            radius:     if known, radius of system in pixels

        returns
            bulk_N:     the number particles in the bulk
            bulk_mask:  a mask the shape of `positions`
    """
    if center is None:
        center = 0.5 * (positions.max(0) + positions.min(0))
    if margin < ss:
        margin *= ss
    d = helpy.dist(positions, center)  # distances to center
    if radius is None:
        max_sep = 0
        if len(positions) < 1e4:
            max_sep = distance.cdist(positions, positions).max() / 2
        radius = max(max_sep, d.max()) + ss / 2
    elif radius < ss:
        radius *= ss
    dmax = radius - margin
    depth = d - dmax
    bulk_mask = area_overlap(depth / ss, ss=1, method='aligned_square')
    bulk_N = bulk_mask.sum()
    # bulk_mask = bulk_mask >= 0.5   # mask of centers within bulk
    if full_N:
        bulk_N *= full_N / len(positions)
    if verbose:
        print 'margin:', margin / ss,
        print 'center:', center,
        print 'radius:', radius
        print 'max r: ', dmax / ss,
        print 'bulk_N:', bulk_N,
        print
    return bulk_N, bulk_mask, center, radius
Ejemplo n.º 9
0
def radial_distribution(positions, dr=ss/5, nbins=None, dmax=None, rmax=None,
                        margin=0, do_err=False, ss=ss):
    """ radial_distribution(positions):
        the pair correlation function g(r)
        calculated using a histogram of distances between particle pairs
        excludes pairs in margin of given width
    """
    center = 0.5*(positions.max(0) + positions.min(0))
    d = helpy.dist(positions, center)   # distances to center
    # faster than squareform(distance.pdist(positions)) wtf
    r = distance.cdist(positions, positions)
    radius = np.maximum(r.max()/2, d.max()) + ss/2
    if rmax is None:
        rmax = 2*radius     # this will have terrible statistics at large r
    if nbins is None:
        nbins = rmax//dr
    if dmax is None:
        if margin < ss:
            margin *= ss
        dmax = radius - margin
    ind = pair_indices(len(positions))
    # for weighting, use areas of the annulus, which is:
    #   number * arclength * dr = N alpha r dr
    #   where alpha = 2 arccos( (r2 + d2 - R2) / 2 r d )
    cosalpha = 0.5 * (r*r + d*d - radius*radius) / (r * d)
    alpha = 2 * np.arccos(np.clip(cosalpha, -1, None))
    dmask = d <= dmax
    w = np.where(dmask, np.reciprocal(alpha*r*dr), 0)
    w = 0.5*(w + w.T)
    assert np.all(np.isfinite(w[ind]))
    # Different ways to count `n`:
    # number of 'bulk' (inner) particles
    n = np.count_nonzero(dmask)
    # effective N from number of pairs:
    # n = 0.5*(1 + sqrt(1 + 8*np.count_nonzero(w[ind])))
    # total number of particles:
    # n = len(w)
    w *= 2/n
    assert np.allclose(positions.shape[0], map(len, [r, d, w, positions]))
    ret = np.histogram(r[ind], bins=nbins, range=(0, rmax), weights=w[ind])
    if do_err:
        return ret, np.histogram(r[ind], bins=nbins, range=(0, rmax)), n
    else:
        return ret + (n,)
Ejemplo n.º 10
0
def pair_angles(xy_or_orient, neighbors, nmask, ang_type='absolute', margin=0):
    """do something with the angles a given particle makes with its neighbors

    Parameters
    xy_or_orient:  either (N, 2) array of positions or (N,) array of angles
    neighbors:  (N, k) array of k neighbors
    nmask:      mask for neighbors
    ang_type:   string, choice of 'absolute' (default), 'relative', 'delta'
    margin:     is the width of excluded boundary margin

    Returns
    angles:     array of angles between neighboring pairs
    dmask:      margin mask, only returned if margin > 0
    """

    if xy_or_orient.ndim == 2:
        dx, dy = (xy_or_orient[neighbors] - xy_or_orient[:, None, :]).T
        angles = np.arctan2(dy, dx).T % tau
    else:
        angles = xy_or_orient[neighbors] - xy_or_orient[:, None]
    if ang_type == 'relative':
        # subtract off angle to nearest neighbor
        angles -= angles[:, :1]
    elif ang_type == 'delta':
        # sort by angle then take diff
        angles[nmask] = np.inf
        angles.sort(-1)
        angles -= np.roll(angles, 1, -1)
        # only keep if we have all k neighbors
        nmask = np.all(nmask, 1)
    elif ang_type != 'absolute':
        raise ValueError("unknown ang_type {}".format(ang_type))
    angles[nmask] = np.nan
    if margin:
        if margin < ss:
            margin *= ss
        center = 0.5*(xy_or_orient.max(0) + xy_or_orient.min(0))
        d = helpy.dist(xy_or_orient, center)
        dmask = d < d.max() - margin
        angles = angles[dmask]
        nmask = nmask[dmask]
    return (angles % tau, nmask) + ((dmask,) if margin else ())
Ejemplo n.º 11
0
def pair_angles(xy_or_orient, neighbors, nmask, ang_type='absolute', margin=0):
    """do something with the angles a given particle makes with its neighbors

    Parameters
    xy_or_orient:  either (N, 2) array of positions or (N,) array of angles
    neighbors:  (N, k) array of k neighbors
    nmask:      mask for neighbors
    ang_type:   string, choice of 'absolute' (default), 'relative', 'delta'
    margin:     is the width of excluded boundary margin

    Returns
    angles:     array of angles between neighboring pairs
    dmask:      margin mask, only returned if margin > 0
    """

    if xy_or_orient.ndim == 2:
        dx, dy = (xy_or_orient[neighbors] - xy_or_orient[:, None, :]).T
        angles = np.arctan2(dy, dx).T % tau
    else:
        angles = xy_or_orient[neighbors] - xy_or_orient[:, None]
    if ang_type == 'relative':
        # subtract off angle to nearest neighbor
        angles -= angles[:, :1]
    elif ang_type == 'delta':
        # sort by angle then take diff
        angles[nmask] = np.inf
        angles.sort(-1)
        angles -= np.roll(angles, 1, -1)
        # only keep if we have all k neighbors
        nmask = np.all(nmask, 1)
    elif ang_type != 'absolute':
        raise ValueError("unknown ang_type {}".format(ang_type))
    angles[nmask] = np.nan
    if margin:
        if margin < ss:
            margin *= ss
        center = 0.5 * (xy_or_orient.max(0) + xy_or_orient.min(0))
        d = helpy.dist(xy_or_orient, center)
        dmask = d < d.max() - margin
        angles = angles[dmask]
        nmask = nmask[dmask]
    return (angles % tau, nmask) + ((dmask, ) if margin else ())
def orient_op(orientations, positions, m=4, margin=0, ret_complex=True, do_err=False):
    """ orient_op(orientations, m=4)
        Returns the global m-fold particle orientational order parameter

                1   N    i m theta
        Phi  = --- SUM e          j
           m    N  j=1
    """
    np.mod(orientations, tau/m, orientations) # what's this for? (was tau/4 not tau/m)
    if margin:
        if margin < ss: margin *= ss
        center = 0.5*(positions.max(0) + positions.min(0))
        d = helpy.dist(positions, center) # distances to center
        orientations = orientations[d < d.max() - margin]
    phi = np.exp(m*orientations*1j).mean()
    if do_err:
        err = phi.std(ddof=1)/sqrt(phi.size)
        return (phi, err) if ret_complex else (np.abs(phi), err)
    else:
        return phi if ret_complex else np.abs(phi)
def distribution(positions, rmax=10, bins=10, margin=0, rectang=0):
    if margin < ss: margin *= ss
    center = 0.5*(positions.max(0) + positions.min(0))
    d = helpy.dist(positions, center) # distances to center
    dmask = d < d.max() - margin
    r = cdist(positions, positions[dmask])#.ravel()
    radius = np.maximum(r.max()/2, d.max()) + ss/2
    cosalpha = 0.5 * (r**2 + d[dmask]**2 - radius**2) / (r * d[dmask])
    alpha = 2 * np.arccos(np.clip(cosalpha, -1, None))
    dr = radius / bins
    w = dr**-2 * tau/alpha
    w[~np.isfinite(w)] = 0
    if rmax < ss: rmax *= ss
    rmask = r < rmax
    displacements = positions[:, None] - positions[None, dmask] #origin must be within margin
    if rectang:
        if rectang is True:
            rectang = rectify(positions, margin=margin)[0]
        rotate2d(displacements, rectify(positions, margin=margin))
    return np.histogramdd(displacements[rmask], bins=bins, weights=w[rmask])[0]
Ejemplo n.º 14
0
def bulk(positions, margin=0, full_N=None, center=None, radius=None, ss=ss,
         verbose=False):
    """ Filter marginal particles from bulk particles to reduce boundary effects
            positions:  (N, 2) array of particle positions
            margin:     width of margin, in units of pixels or particle sides
            full_N:     actual number of particles, to renormalize assuming
                        uniform distribution of undetected particles.
            center:     if known, (2,) array of center position
            radius:     if known, radius of system in pixels

        returns
            bulk_N:     the number particles in the bulk
            bulk_mask:  a mask the shape of `positions`
    """
    if center is None:
        center = 0.5*(positions.max(0) + positions.min(0))
    if margin < ss:
        margin *= ss
    d = helpy.dist(positions, center)   # distances to center
    if radius is None:
        max_sep = 0
        if len(positions) < 1e4:
            max_sep = distance.cdist(positions, positions).max()/2
        radius = max(max_sep, d.max()) + ss/2
    elif radius < ss:
        radius *= ss
    dmax = radius - margin
    depth = d - dmax
    bulk_mask = area_overlap(depth/ss, ss=1, method='aligned_square')
    bulk_N = bulk_mask.sum()
    # bulk_mask = bulk_mask >= 0.5   # mask of centers within bulk
    if full_N:
        bulk_N *= full_N/len(positions)
    if verbose:
        print 'margin:', margin/ss,
        print 'center:', center,
        print 'radius:', radius
        print 'max r: ', dmax/ss,
        print 'bulk_N:', bulk_N,
        print
    return bulk_N, bulk_mask, center, radius
Ejemplo n.º 15
0
def orient_op(orientations,
              m=4,
              positions=None,
              margin=0,
              ret_complex=True,
              do_err=False,
              globl=False,
              locl=False):
    """orient_op(orientations, m=4, positions=None, margin=0,
                 ret_complex=True, do_err=False, globl=False, locl=False)

       calculate the global m-fold particle orientational order parameter

                1   N    i m theta
        Phi  = --- SUM e          j
           m    N  j=1
    """
    if not (globl or locl):
        globl = True
        locl = orientations.ndim == 2
    np.mod(orientations, tau / m, orientations)
    if margin:
        if margin < ss:
            margin *= ss
        center = 0.5 * (positions.max(0) + positions.min(0))
        d = helpy.dist(positions, center)  # distances to center
        orientations = orientations[d < d.max() - margin]
    phis = np.exp(m * orientations * 1j)
    if locl:
        phis = np.nanmean(phis, 1)
    if do_err:
        err = np.nanstd(phis, ddof=1) / sqrt(np.count_nonzero(~np.isnan(phis)))
    if not globl:
        return (np.abs(phis), err) if do_err else np.abs(phis)
    phi = np.nanmean(phis) if ret_complex else np.abs(np.nanmean(phis))
    if locl:
        return (np.abs(phis), phi, err) if do_err else (np.abs(phis), phi)
    return (phi, err) if do_err else phi