Beispiel #1
0
def run_cluster(complPG, qfib, qsym, cl_radius=cl_radius, min_compl=min_compl):
    """
    """
    start = time.clock()  # time this

    # # use transforms module for distance
    # quatDistance = lambda x, y: xf.quat_distance(x, y, qsym)

    # use compiled module for distance
    # just to be safe, must order qsym as C-contiguous
    qsym = np.array(qsym.T, order='C').T
    quatDistance = lambda x, y: xfcapi.quat_distance(np.array(x, order='C'), \
                                                     np.array(y, order='C'), \
                                                     qsym)

    qfib_r = qfib[:, np.r_[complPG] > min_compl]

    print "Feeding %d orientations above %.1f%% to clustering" % (
        qfib_r.shape[1], 100 * min_compl)

    if haveScikit:
        print "Using scikit..."
        pdist = pairwise_distances(qfib_r.T, metric=quatDistance, n_jobs=-1)
        core_samples, labels = dbscan(pdist,
                                      eps=d2r * cl_radius,
                                      min_samples=1,
                                      metric='precomputed')
        cl = np.array(labels, dtype=int) + 1
    else:
        print "Using fclusterdata with a tolerance of %f degrees..." % (
            cl_radius)
        cl = cluster.hierarchy.fclusterdata(qfib_r.T,
                                            d2r * cl_radius,
                                            criterion='distance',
                                            metric=quatDistance)

    nblobs = len(np.unique(cl))

    qbar = np.zeros((4, nblobs))
    for i in range(nblobs):
        npts = sum(cl == i + 1)
        # qbar[:, i] = mutil.unitVector(
        #     np.sum(qfib_r[:, cl == i + 1].reshape(4, npts), axis=1).reshape(4, 1)).flatten()
        qbar[:, i] = rot.quatAverage(qfib_r[:, cl == i + 1].reshape(4, npts),
                                     qsym).flatten()
    elapsed = (time.clock() - start)

    print "clustering took %f seconds" % (elapsed)
    return qbar, cl
Beispiel #2
0
def run_cluster(complPG, qfib, qsym,
                cl_radius=cl_radius, min_compl=min_compl):
    """
    """
    start = time.clock()                      # time this

    # # use transforms module for distance
    # quatDistance = lambda x, y: xf.quat_distance(x, y, qsym)

    # use compiled module for distance
    # just to be safe, must order qsym as C-contiguous
    qsym  = np.array(qsym.T, order='C').T
    quatDistance = lambda x, y: xfcapi.quat_distance(np.array(x, order='C'), \
                                                     np.array(y, order='C'), \
                                                     qsym)

    qfib_r = qfib[:, np.r_[complPG] > min_compl]

    print "Feeding %d orientations above %.1f%% to clustering" % (qfib_r.shape[1], 100*min_compl)

    if haveScikit:
        print "Using scikit..."
        pdist = pairwise_distances(qfib_r.T, metric=quatDistance, n_jobs=-1)
        core_samples, labels = dbscan(pdist, eps=d2r*cl_radius, min_samples=1, metric='precomputed')
        cl = np.array(labels, dtype=int) + 1
    else:
        print "Using fclusterdata with a tolerance of %f degrees..." % (cl_radius)
        cl = cluster.hierarchy.fclusterdata(qfib_r.T, d2r*cl_radius, criterion='distance', metric=quatDistance)

    nblobs = len(np.unique(cl))

    qbar = np.zeros((4, nblobs))
    for i in range(nblobs):
        npts = sum(cl == i + 1)
        # qbar[:, i] = mutil.unitVector(
        #     np.sum(qfib_r[:, cl == i + 1].reshape(4, npts), axis=1).reshape(4, 1)).flatten()
        qbar[:, i] = rot.quatAverage(qfib_r[:, cl == i + 1].reshape(4, npts),
                                     qsym).flatten()
    elapsed = (time.clock() - start)

    print "clustering took %f seconds" % (elapsed)
    return qbar, cl
Beispiel #3
0
 def quat_distance(x, y):
     return xfcapi.quat_distance(np.array(x, order='C'), np.array(y, order='C'), qsym)
Beispiel #4
0
def run_cluster(compl, qfib, qsym, cfg):
    """
    """
    cl_radius = cfg.find_orientations.clustering.radius
    min_compl = cfg.find_orientations.clustering.completeness
    algorithm = cfg.find_orientations.clustering.algorithm

    start = time.clock() # time this

    num_above = sum(np.array(compl) > min_compl)
    if num_above == 0:
        # nothing to cluster
        qbar = cl = np.array([])
    elif num_above == 1:
        # short circuit
        qbar = qfib[:, np.array(compl) > min_compl]
        cl = [1]
    else:
        # use compiled module for distance
        # just to be safe, must order qsym as C-contiguous
        qsym  = np.array(qsym.T, order='C').T
        quat_distance = lambda x, y: xfcapi.quat_distance(
            np.array(x, order='C'),
            np.array(y, order='C'),
            qsym
            )

        qfib_r = qfib[:, np.array(compl) > min_compl]

        logger.info(
            "Feeding %d orientations above %.1f%% to clustering",
            qfib_r.shape[1], 100*min_compl
            )

        if algorithm == 'dbscan' and not have_sklearn:
            algorithm = 'fclusterdata'
            logger.warning(
                "sklearn >= 0.14 required for dbscan, using fclusterdata"
                )
        if algorithm == 'dbscan':
            pdist = pairwise_distances(
                qfib_r.T, metric=quat_distance, n_jobs=-1
                )
            core_samples, labels = dbscan(
                pdist,
                eps=np.radians(cl_radius),
                min_samples=1,
                metric='precomputed'
                )
            cl = np.array(labels, dtype=int) + 1
        elif algorithm == 'fclusterdata':
            cl = cluster.hierarchy.fclusterdata(
                qfib_r.T,
                np.radians(cl_radius),
                criterion='distance',
                metric=quat_distance
                )
        else:
            raise RuntimeError(
                "Clustering algorithm %s not recognized" % algorithm
                )

        nblobs = len(np.unique(cl))

        qbar = np.zeros((4, nblobs))
        for i in range(nblobs):
            npts = sum(cl == i + 1)
            qbar[:, i] = rot.quatAverage(
                qfib_r[:, cl == i + 1].reshape(4, npts), qsym
                ).flatten()

    logger.info("clustering took %f seconds", time.clock() - start)
    logger.info(
        "Found %d orientation clusters with >=%.1f%% completeness"
        " and %2f misorientation",
        qbar.size/4,
        100.*min_compl,
        cl_radius
        )

    return np.atleast_2d(qbar), cl
Beispiel #5
0
def run_cluster(compl, qfib, qsym, cfg):
    """
    """
    cl_radius = cfg.find_orientations.clustering.radius
    min_compl = cfg.find_orientations.clustering.completeness
    algorithm = cfg.find_orientations.clustering.algorithm

    start = time.clock()  # time this

    num_above = sum(np.array(compl) > min_compl)
    if num_above == 0:
        # nothing to cluster
        qbar = cl = np.array([])
    elif num_above == 1:
        # short circuit
        qbar = qfib[:, np.array(compl) > min_compl]
        cl = [1]
    else:
        # use compiled module for distance
        # just to be safe, must order qsym as C-contiguous
        qsym = np.array(qsym.T, order='C').T
        quat_distance = lambda x, y: xfcapi.quat_distance(
            np.array(x, order='C'), np.array(y, order='C'), qsym)

        qfib_r = qfib[:, np.array(compl) > min_compl]

        logger.info("Feeding %d orientations above %.1f%% to clustering",
                    qfib_r.shape[1], 100 * min_compl)

        if algorithm == 'dbscan' and not have_sklearn:
            algorithm = 'fclusterdata'
            logger.warning(
                "sklearn >= 0.14 required for dbscan, using fclusterdata")
        if algorithm == 'dbscan':
            pdist = pairwise_distances(qfib_r.T,
                                       metric=quat_distance,
                                       n_jobs=-1)
            core_samples, labels = dbscan(pdist,
                                          eps=np.radians(cl_radius),
                                          min_samples=1,
                                          metric='precomputed')
            cl = np.array(labels, dtype=int) + 1
        elif algorithm == 'fclusterdata':
            cl = cluster.hierarchy.fclusterdata(qfib_r.T,
                                                np.radians(cl_radius),
                                                criterion='distance',
                                                metric=quat_distance)
        else:
            raise RuntimeError("Clustering algorithm %s not recognized" %
                               algorithm)

        nblobs = len(np.unique(cl))

        qbar = np.zeros((4, nblobs))
        for i in range(nblobs):
            npts = sum(cl == i + 1)
            qbar[:,
                 i] = rot.quatAverage(qfib_r[:, cl == i + 1].reshape(4, npts),
                                      qsym).flatten()

    logger.info("clustering took %f seconds", time.clock() - start)
    logger.info(
        "Found %d orientation clusters with >=%.1f%% completeness"
        " and %2f misorientation", qbar.size / 4, 100. * min_compl, cl_radius)

    return np.atleast_2d(qbar), cl