def main():
    file = "comTraj.npz"
    L, com_lipids, com_chol = trajIO.decompress(file)
    com_lipids, com_chol = trajIO.translateZ(com_lipids, com_chol)

    com_lipids = displacement.block_displacement(L, com_lipids)
    com_chol = displacement.block_displacement(L, com_chol)

    t = 28
    lipids = com_lipids[t]
    chol = com_chol[t]

    lipids, trash = trajIO.layering(lipids)
    chol, trash = trajIO.layering(chol)

    total = np.concatenate((lipids, chol), axis=0)
    total1 = iter.combine(lipids, chol)

    cluster = percentages.cluster(total, [0.25, 0.25, 0.25, 0.25])
    cluster1 = percentages.cluster(total1, [0.25, 0.25, 0.25, 0.25])

    #edm = euclideanDist.edm(L[t],cluster[0])
    #edm1 = euclideanDist.edm(L[t],cluster1[0])
    #print(np.array_equiv(edm,edm1))

    cutoff = 1.15

    labels1 = dc.dbscan_wrapper(cluster[0], L[t], cutoff)
    labels2 = iter.cluster_labels('upper', L[t], cluster1[0])

    return labels1, labels2
Exemple #2
0
        #linearWeighted[size] = np.zeros(size)

    #block
    for block in range(Nblock):
        start = block * nlog
        for time in times:
            t = start + time
            #print(t) #progress tracker
            upper, lower = trajIO.layering(com_lipids[t])
            original = {}
            original['upper'] = upper
            original['lower'] = lower

            for layer in ['upper', 'lower']:
                #clustering
                clusters = percentages.cluster(original[layer], percentage)

                for size in cluster_sizes:
                    for i in range(size):
                        Nparticles = len(clusters[i])
                        normSizes[block][time][layer][size][
                            i], weightedNormSizes[block][time][layer][size][
                                i] = dc.mean_cluster_size(
                                    clusters[i], L[t], cutoff)

                        if time == 1:
                            alpha, beta = dc.meanRandom(
                                original[layer], L[t], cutoff, Nparticles)
                            logNorm[size][i] += alpha
                            logWeighted[size][i] += beta
            t = start + time
            print(t)  #progress tracker
            ul, ll = trajIO.layering(com_lipids[t])
            uc, lc = trajIO.layering(com_chol[t])

            original = {}
            original['upper'] = iter.combine(ul, uc)
            original['lower'] = iter.combine(ll, lc)

            random = {}
            random['upper'] = (ul, uc)
            random['lower'] = (ll, lc)

            for layer in ['upper', 'lower']:
                #clustering
                clusters = percentages.cluster(
                    original[layer], c.percentages['all']['higher'][4])

                for size in cluster_sizes:
                    for i in range(size):
                        nlipids, nchol = iter_cluster.counter(cluster[i])
                        normSizes[block][time][layer][size][
                            i], weightedNormSizes[block][time][layer][size][
                                i] = iter.mean_cluster_size(
                                    clusters[i], L[t], flag)

                        if time == 1:
                            alpha, beta = iter.meanRandom(
                                original[layer], L[t], Nparticles, flag)
                            logNorm[size][i] += alpha
                            logWeighted[size][i] += beta