Example #1
0
def pyquest_spin_bintrees(data,row_alpha=0.5,col_alpha=0.5,beta=1.0,bal_constant=1.0,n_iters=3,n_spin=10):
    """
    runs what is momentarily the standard questionnaire algorithm:
    initial affinity = mutual cosine similarity
    initial tree based on median of successive eigenvectors
    dual affinities based on earth mover distance.
    dual trees based on eigen_cut method
    """
    #Generate initial affinity
    init_row_aff = affinity.mutual_cosine_similarity(data.T,False,0,threshold=0.1)
    
    #Compute diffusion embedding of initial affinities
    init_row_vecs,init_row_vals = markov.markov_eigs(init_row_aff, 12)
    #Generate median trees
    init_row_tree = bintree_construct.median_tree(init_row_vecs,init_row_vals,max_levels=12)

    row_trees, col_trees = [],[]

    for _ in xrange(n_spin):
        dual_col_trees = []
        dual_row_trees = [init_row_tree]
        
        for _ in xrange(n_iters):
            dual_col_trees.append(bintree_construct.eigen_tree(data,dual_row_trees[-1],alpha=col_alpha,beta=beta,bal_constant=bal_constant))
            dual_row_trees.append(bintree_construct.eigen_tree(data.T,dual_col_trees[-1],alpha=row_alpha,beta=beta,bal_constant=bal_constant))

        row_trees.append(dual_row_trees[-1])
        col_trees.append(dual_col_trees[-1])
            
    return row_trees,col_trees
Example #2
0
def flex_tree_diffusion(affinity, penalty_constant, n_eigs=12):
    """
    affinity is an nxn affinity matrix.
    Creates a flexible tree by calculating the diffusion on the given affinity.
    Then clusters at each level by the flexible tree algorithm. For each level
    up, doubles the diffusion time.
    penalty_constant is the multiplier of the median diffusion distance.
    """
    #First, we calculate the first n eigenvectors and eigenvalues of the
    #diffusion
    cluster_list = []
    vecs, vals = markov.markov_eigs(affinity, n_eigs)
    diff_time = 1.0
    q = np.eye(affinity.shape[0])
    while 1:
        #now we calculate the diffusion distances between points at the
        #current diffusion time.
        diff_vecs = vecs.dot(np.diag(vals**diff_time))
        diff_dists = spsp.distance.squareform(spsp.distance.pdist(diff_vecs))
        #we take the affinity between clusters to be the average diffusion
        #distance between them.
        avg_dists = q.dot(diff_dists).dot(q.T)
        #now we cluster the points based on this distance
        cluster_list.append(cluster_from_distance(avg_dists, penalty_constant))
        #if there is only one node left, then we are done.
        #otherwise, add another level to the tree, double the diffusion time
        #and keep going.
        if len(cluster_list[-1]) == 1:
            break
        temp_tree = clusterlist_to_tree(cluster_list)
        cpart = ClusteringPartition(
            [x.elements for x in temp_tree.dfs_level(2)])
        q, _ = cluster_transform_matrices(cpart)
        diff_time *= 2.0
    return clusterlist_to_tree(cluster_list)
Example #3
0
def flex_tree_diffusion(affinity, penalty_constant, n_eigs=12):
    """
    affinity is an nxn affinity matrix.
    Creates a flexible tree by calculating the diffusion on the given affinity.
    Then clusters at each level by the flexible tree algorithm. For each level
    up, doubles the diffusion time.
    penalty_constant is the multiplier of the median diffusion distance.
    """
    # First, we calculate the first n eigenvectors and eigenvalues of the
    # diffusion
    cluster_list = []
    vecs, vals = markov.markov_eigs(affinity, n_eigs)
    diff_time = 1.0
    q = np.eye(affinity.shape[0])
    while 1:
        # now we calculate the diffusion distances between points at the
        # current diffusion time.
        diff_vecs = vecs.dot(np.diag(vals ** diff_time))
        diff_dists = spsp.distance.squareform(spsp.distance.pdist(diff_vecs))
        # we take the affinity between clusters to be the average diffusion
        # distance between them.
        avg_dists = q.dot(diff_dists).dot(q.T)
        # now we cluster the points based on this distance
        cluster_list.append(cluster_from_distance(avg_dists, penalty_constant))
        # if there is only one node left, then we are done.
        # otherwise, add another level to the tree, double the diffusion time
        # and keep going.
        if len(cluster_list[-1]) == 1:
            break
        temp_tree = clusterlist_to_tree(cluster_list)
        cpart = ClusteringPartition([x.elements for x in temp_tree.dfs_level(2)])
        q, _ = cluster_transform_matrices(cpart)
        diff_time *= 2.0
    return clusterlist_to_tree(cluster_list)
Example #4
0
def pyquest_newtree(data,tree_constant=0.25,row_alpha=0.5,col_alpha=0.5,beta=1.0,n_iters=3):

    init_row_aff = affinity.mutual_cosine_similarity(data.T,False,0,threshold=0.1)
    
    #Compute diffusion embedding of initial affinities
    init_row_vecs,init_row_vals = markov.markov_eigs(init_row_aff, 12)
    init_row_vals[np.isnan(init_row_vals)] = 0.0
    row_embedding = init_row_vecs.dot(np.diag(init_row_vals))
    row_distances = spsp.distance.squareform(spsp.distance.pdist(row_embedding))
    row_affinity = np.max(row_distances) - row_distances
    
    #Generate initial tree
    #print "call1 tree_constant:{}".format(tree_constant)
    init_row_tree = tree_building.make_tree_embedding(row_affinity,tree_constant)
    
    dual_col_trees = []
    dual_row_trees = [init_row_tree]
    
    for _ in xrange(n_iters):
        #print "Beginning iteration {}".format(i)
        col_emd = dual_affinity.calc_emd(data,dual_row_trees[-1],alpha=col_alpha,beta=beta)
        col_aff = dual_affinity.emd_dual_aff(col_emd)
        #print "call2 tree_constant:{}".format(tree_constant)
        dual_col_trees.append(tree_building.make_tree_embedding(col_aff,tree_constant))
    
        row_emd = dual_affinity.calc_emd(data.T,dual_col_trees[-1],alpha=row_alpha,beta=beta)
        row_aff = dual_affinity.emd_dual_aff(row_emd)
        #print "call3 tree_constant:{}".format(tree_constant)
        dual_row_trees.append(tree_building.make_tree_embedding(row_aff,tree_constant))
        
    col_tree = dual_col_trees[-1]
    row_tree = dual_row_trees[-1]
    
    col_emd = dual_affinity.calc_emd(data,row_tree,alpha=col_alpha,beta=beta)
    row_emd = dual_affinity.calc_emd(data.T,col_tree,alpha=row_alpha,beta=beta)
    
    row_aff = dual_affinity.emd_dual_aff(row_emd)
    col_aff = dual_affinity.emd_dual_aff(col_emd)
    
    row_vecs,row_vals = markov.markov_eigs(row_aff, 12)
    col_vecs,col_vals = markov.markov_eigs(col_aff, 12)   

    return row_tree,col_tree,row_vecs,col_vecs,row_vals,col_vals
Example #5
0
def zero_eigen_cut(node,affinity):
    """
    Returns the cut of the affinity matrix (cutting at zero) 
    corresponding to the elements in node, under the condition of bal_constant.
    """ 
    new_data = affinity[node.elements,:][:,node.elements]
    
    vecs,_ = markov.markov_eigs(new_data, 2)
    labels = vecs[:,1] < 0.0
    
    return labels
Example #6
0
def zero_eigen_cut(node, affinity):
    """
    Returns the cut of the affinity matrix (cutting at zero) 
    corresponding to the elements in node, under the condition of bal_constant.
    """
    new_data = affinity[node.elements, :][:, node.elements]

    vecs, _ = markov.markov_eigs(new_data, 2)
    labels = vecs[:, 1] < 0.0

    return labels
Example #7
0
def random_dyadic_cut(node,affinity,left,right):
    """
    Returns a randomized cut of the affinity matrix (cutting at zero) 
    corresponding to the elements in node, under the condition of bal_constant.
    """ 
    new_data = affinity[node.elements,:][:,node.elements]
    
    vecs,_ = markov.markov_eigs(new_data, 2)
    eig = vecs[:,1]
    eig_sorted = eig.argsort().argsort()
    cut_loc = np.random.randint(left,right+1)
    labels = eig_sorted < cut_loc
    
    return labels
Example #8
0
def random_dyadic_cut(node, affinity, left, right):
    """
    Returns a randomized cut of the affinity matrix (cutting at zero) 
    corresponding to the elements in node, under the condition of bal_constant.
    """
    new_data = affinity[node.elements, :][:, node.elements]

    vecs, _ = markov.markov_eigs(new_data, 2)
    eig = vecs[:, 1]
    eig_sorted = eig.argsort().argsort()
    cut_loc = np.random.randint(left, right + 1)
    labels = eig_sorted < cut_loc

    return labels
Example #9
0
def break_node(train_data,
               col_tree_node,
               row_tree,
               regressors=None,
               k=5,
               alpha=0.0,
               beta=1.0,
               col_emd=None):
    """
    First calculates the EMD on the columns of train_data 
    in col_tree_node.elements using row_tree. Converts that to an affinity.
    Calculates the second eigenvector of the markov matrix based on that
    affinity.
    Then fits a linear model using the rows in regressors (all if it's None)
    and uses the LASSO path to identify the best k rows.
    Splits the node using the predicted eigenvector values.
    """
    import sklearn.linear_model as sklm

    col_indices = col_tree_node.elements
    node_data = train_data[:, col_indices].astype(np.float64)

    if col_emd is None:
        col_emd = dual_affinity.calc_emd(node_data, row_tree, alpha, beta)
        col_aff = dual_affinity.emd_dual_aff(col_emd)
    else:
        col_aff = dual_affinity.emd_dual_aff(
            col_emd[:, col_indices][col_tree_node.elements, :])

    vecs, _ = markov.markov_eigs(col_aff, 2)
    eig = vecs[:, 1]

    if regressors is None:
        regressors = range(row_tree.size)

    _, active, _ = sklm.lars_path(node_data[regressors, :].T, eig, max_iter=50)

    regr_indices = active[0:k]

    lm = sklm.LinearRegression()
    lm.fit(node_data[regr_indices, :].T, eig)
    pred_eigs = lm.predict(node_data[regr_indices, :].T)

    labels = pred_eigs > 0.0
    partition = labels * np.ones(labels.shape[0])
    col_tree_node.create_subclusters(partition)
    return np.array([regressors[x] for x in regr_indices]), lm
def eigen_cut_zero(node,emd,eps=1.0):
    affinity = dual_affinity.emd_dual_aff(emd[node.elements,:][:,node.elements]
                                          ,eps)
    
    try:
        vecs,_ = markov.markov_eigs(affinity,2)
    except:
        print affinity
        print emd
        print node.elements
        raise
    eig = vecs[:,1]
    n = len(eig)
    labels = np.ones(n)
    labels *= (eig > 0.0)
    
    return labels
def bal_eigen_cut(node,emd,bal_constant=1.0,eps=1.0):
    affinity = dual_affinity.emd_dual_aff(emd[node.elements,:][:,node.elements]
                                          ,eps)
    
    try:
        vecs,_ = markov.markov_eigs(affinity,2)
    except:
        print affinity
        print emd
        print node.elements
        raise
    eig = vecs[:,1]
    eig_sorted = np.argsort(eig)
    n = len(eig)
    l,r = bal_cut(n,bal_constant)
    cut_loc = np.random.randint(l,r+1)
    labels = np.zeros(n,np.int)
    labels[eig_sorted[0:cut_loc]] = 1
    
    return labels
Example #12
0
def break_node(train_data,col_tree_node,row_tree,regressors=None,
               k=5,alpha=0.0,beta=1.0,col_emd=None):
    """
    First calculates the EMD on the columns of train_data 
    in col_tree_node.elements using row_tree. Converts that to an affinity.
    Calculates the second eigenvector of the markov matrix based on that
    affinity.
    Then fits a linear model using the rows in regressors (all if it's None)
    and uses the LASSO path to identify the best k rows.
    Splits the node using the predicted eigenvector values.
    """
    import sklearn.linear_model as sklm

    col_indices = col_tree_node.elements
    node_data = train_data[:,col_indices].astype(np.float64)
    
    if col_emd is None:
        col_emd = dual_affinity.calc_emd(node_data,row_tree,alpha,beta)
        col_aff = dual_affinity.emd_dual_aff(col_emd)
    else:
        col_aff = dual_affinity.emd_dual_aff(col_emd[:,col_indices][col_tree_node.elements,:])
        
    vecs,_ = markov.markov_eigs(col_aff,2)
    eig = vecs[:,1]
    
    if regressors is None:
        regressors = range(row_tree.size)
    
    _,active,_ = sklm.lars_path(node_data[regressors,:].T,eig,max_iter=50)
    
    regr_indices = active[0:k]
    
    lm = sklm.LinearRegression()
    lm.fit(node_data[regr_indices,:].T,eig)
    pred_eigs = lm.predict(node_data[regr_indices,:].T)
    
    labels = pred_eigs > 0.0
    partition = labels*np.ones(labels.shape[0])
    col_tree_node.create_subclusters(partition)
    return np.array([regressors[x] for x in regr_indices]),lm
def eigen_cut(node,emd,noise,eps=1.0):
    affinity = dual_affinity.emd_dual_aff(emd[node.elements,:][:,node.elements]
                                          ,eps)
    
    try:
        vecs,_ = markov.markov_eigs(affinity,2)
    except:
        print affinity
        print emd
        print node.elements
        raise
    eig = vecs[:,1]
    eig_sorted = np.sort(eig)
    n = len(eig_sorted)
    rnoise = np.random.uniform(-noise,noise)
    if noise < 1e-8:
        labels = np.zeros(n)
        labels[np.argsort(eig)[0:int(n/2)]] = 1
    else:
        cut_loc = eig_sorted[int((n/2)+(rnoise*n))]
        labels = np.ones(n)*(eig > cut_loc)
    
    return labels
Example #14
0
 def calc_col_embedding(self):
     self.col_vecs, self.col_vals = markov.markov_eigs(self.col_affinity, 8)
     Publisher.sendMessage("embed.col.calc")
Example #15
0
 def calc_row_embedding(self):
     self.row_vecs, self.row_vals = markov.markov_eigs(self.row_affinity, 8)
     Publisher.sendMessage("embed.row.calc")
Example #16
0
def pyquest(data,params):
    #params should be a PyQuestParams object

    Publisher.sendMessage("status.bar", "Calculating initial affinity...")
    if params.init_aff_type == INIT_AFF_COS_SIM:
        init_row_aff = affinity.mutual_cosine_similarity(
                            data.T,False,0,threshold=params.init_aff_threshold)
    elif params.init_aff_type == INIT_AFF_GAUSSIAN:
        #add KNN to the page
        init_row_aff = affinity.gaussian_euclidean(
                            data.T, 5, params.init_aff_epsilon)
    
    #Compute diffusion embedding of initial affinities
    init_row_vecs,init_row_vals = markov.markov_eigs(init_row_aff, 12)
    init_row_vals[np.isnan(init_row_vals)] = 0.0
    row_embedding = init_row_vecs.dot(np.diag(init_row_vals))
    row_distances = spsp.distance.squareform(spsp.distance.pdist(row_embedding))
    row_affinity = np.max(row_distances) - row_distances
    
    #Generate initial tree
    #print "call1 tree_constant:{}".format(tree_constant)
    Publisher.sendMessage("status.bar", "Calculating initial row tree...")

    if params.tree_type == TREE_TYPE_BINARY:
        init_row_tree = bintree_construct.median_tree(
                                init_row_vecs,init_row_vals,max_levels=12)
    elif params.tree_type == TREE_TYPE_FLEXIBLE:
#        init_row_tree = tree_building.make_tree_embedding(
#                                row_affinity,params.tree_constant)
        init_row_tree = tree_building.make_tree_embedding(
                                row_affinity,params.tree_constant)    
    dual_col_trees = []
    dual_row_trees = [init_row_tree]
    
    row_tree_descs = ["Initial tree"]
    col_tree_descs = []
    
    for i in xrange(params.n_iters):
        message = "Iteration {}: calculating column affinity...".format(i)
        Publisher.sendMessage("status.bar", message)

        #print "Beginning iteration {}".format(i)
        if params.col_affinity_type == DUAL_EMD:
            col_emd = dual_affinity.calc_emd(data,dual_row_trees[-1],
                     params.col_alpha,params.col_beta)
            col_aff = dual_affinity.emd_dual_aff(col_emd)
        elif params.col_affinity_type == DUAL_GAUSSIAN:
            print "Gaussian dual affinity not supported at the moment."
            return None
        
        message = "Iteration {}: calculating column tree...".format(i)
        Publisher.sendMessage("status.bar", message)

        if params.tree_type == TREE_TYPE_BINARY:
            col_tree = bintree_construct.eigen_tree(data,dual_row_trees[-1],
                    params.col_alpha,params.col_beta,params.tree_bal_constant)
        elif params.tree_type == TREE_TYPE_FLEXIBLE:
            col_tree = tree_building.make_tree_embedding(col_aff,
                                     params.tree_constant)
        dual_col_trees.append(col_tree)
        col_tree_descs.append("Iteration {}".format(i))

        message = "Iteration {}: calculating row affinity...".format(i)
        Publisher.sendMessage("status.bar", message)

        if params.row_affinity_type == DUAL_EMD:
            row_emd = dual_affinity.calc_emd(data.T,dual_col_trees[-1],
                     params.row_alpha,params.row_beta)
            row_aff = dual_affinity.emd_dual_aff(row_emd)
        elif params.row_affinity_type == DUAL_GAUSSIAN:
            print "Gaussian dual affinity not supported at the moment."
            return None
 
        message = "Iteration {}: calculating row tree...".format(i)
        Publisher.sendMessage("status.bar", message)
       
        if params.tree_type == TREE_TYPE_BINARY:
            row_tree = bintree_construct.eigen_tree(data.T,dual_col_trees[-1],
                    params.row_alpha,params.row_beta,params.tree_bal_constant)
        elif params.tree_type == TREE_TYPE_FLEXIBLE:
            row_tree = tree_building.make_tree_embedding(row_aff,
                                     params.tree_constant)
        dual_row_trees.append(row_tree)
        row_tree_descs.append("Iteration {}".format(i))
        quest_run_desc = "{}".format(datetime.datetime.now())

    return PyQuestRun(quest_run_desc,dual_row_trees,dual_col_trees,
                      row_tree_descs,col_tree_descs,params)