コード例 #1
0
traj_list = io.load("1_traj_seg.dt")
traj_list = traj_list[:1000]

X = np.fromfile("gaussian_representation.dat", dtype=float)
D = io.load_distance_matrix("distance1.npz")

no_of_cluster = 12
gmm = GMM(n_components=no_of_cluster, n_iter=1000)
labels = gmm.fit_predict(X)

#  Postprocessing

clusters = [[] for i in range(no_of_cluster)]
no = len(traj_list)
for i in range(no):
    label = int(labels[i])
    clusters[label].append(traj_list[i])

silhoutte_score = metrics.silhouette_score(D, labels, sample_size=1000)
print("Silhoutte Coefficient : %.3f" % silhoutte_score)

#  Plotting Clustered Trajectories
color_list = plt.rcParams['axes.prop_cycle'].by_key()['color']
for i in range(no_of_cluster):
    for traj in clusters[i]:
        next_color = color_list[i % len(color_list)]
        tplot.plot_traj(traj, next_color, alpha=1)

tplot.plot_map()
コード例 #2
0
    if not flied:
        suspi = path

    traj_list.append(path)

print(countFlied)
print(countFlied2)

'''
for traj in traj_list:
    tplot.plot_traj(traj)
'''

print(suspi)
tplot.plot_traj(suspi)

tplot.plot_map()

exit(0)


filename1 = "3.traj"
filename2 = "6.traj"

d1 = io.load(filename1)
d2 = io.load(filename2)

key1 = set(d1.keys())
key2 = set(d2.keys())
common = key1.intersection(key2)
    for i in range(no_of_cluster):
        traj_list = generate_cluster(no_of_traj_each_cluster, traj_length,
                                     noise)
        normal_traj_list += traj_list

    outlier_traj_list = []
    for i in range(no_of_outlier):
        outlier_traj_list += generate_cluster(1, traj_length, noise)

    #  Plotting Generated Trajectories

    plt.figure(1)
    plt.subplot(121)

    for traj in normal_traj_list:
        aplt.plot_traj(traj)

    for traj in outlier_traj_list:
        aplt.plot_traj(traj, "r")

    #  Precomputation

    traj_list = normal_traj_list + outlier_traj_list
    random.shuffle(traj_list)

    #  Calculating Distance Matrix
    s.start()
    D = calculate_distance_matrix(traj_list, eps)
    s.stop("Distance matrix calculated")

    #  Clustering