def template_clustering(number_clusters, path, links): sample = read_sample(path); clusters_centroid_link = None; clusters_single_link = None; clusters_complete_link = None; clusters_average_link = None; visualizer = cluster_visualizer(len(links)); index_canvas = 0; if (type_link.CENTROID_LINK in links): agglomerative_centroid_link = agglomerative(sample, number_clusters, type_link.CENTROID_LINK); (ticks, result) = timedcall(agglomerative_centroid_link.process); clusters_centroid_link = agglomerative_centroid_link.get_clusters(); visualizer.append_clusters(clusters_centroid_link, sample, index_canvas); visualizer.set_canvas_title('Link: Centroid', index_canvas); index_canvas += 1; print("Sample: ", path, "Link: Centroid", "\tExecution time: ", ticks, "\n"); if (type_link.SINGLE_LINK in links): agglomerative_simple_link = agglomerative(sample, number_clusters, type_link.SINGLE_LINK); (ticks, result) = timedcall(agglomerative_simple_link.process); clusters_single_link = agglomerative_simple_link.get_clusters(); visualizer.append_clusters(clusters_single_link, sample, index_canvas); visualizer.set_canvas_title('Link: Single', index_canvas); index_canvas += 1; print("Sample: ", path, "Link: Single", "\tExecution time: ", ticks, "\n"); if (type_link.COMPLETE_LINK in links): agglomerative_complete_link = agglomerative(sample, number_clusters, type_link.COMPLETE_LINK); (ticks, result) = timedcall(agglomerative_complete_link.process); clusters_complete_link = agglomerative_complete_link.get_clusters(); visualizer.append_clusters(clusters_complete_link, sample, index_canvas); visualizer.set_canvas_title('Link: Complete', index_canvas); index_canvas += 1; print("Sample: ", path, "Link: Complete", "\tExecution time: ", ticks, "\n"); if (type_link.AVERAGE_LINK in links): agglomerative_average_link = agglomerative(sample, number_clusters, type_link.AVERAGE_LINK); (ticks, result) = timedcall(agglomerative_average_link.process); clusters_average_link = agglomerative_average_link.get_clusters(); visualizer.append_clusters(clusters_average_link, sample, index_canvas); visualizer.set_canvas_title('Link: Average', index_canvas); index_canvas += 1; print("Sample: ", path, "Link: Average", "\tExecution time: ", ticks, "\n"); visualizer.show();
def template_clustering(file, radius, order, show_dyn=False, show_conn=False, show_clusters=True, ena_conn_weight=False, ccore_flag=False): sample = read_sample(file) network = syncnet(sample, radius, enable_conn_weight=ena_conn_weight, ccore=ccore_flag) (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, order, solve_type.FAST, show_dyn) print("Sample: ", file, "\t\tExecution time: ", ticks, "\n") if (show_dyn == True): draw_dynamics(dyn_time, dyn_phase, x_title="Time", y_title="Phase", y_lim=[0, 2 * 3.14]) if (show_conn == True): network.show_network() if (show_clusters == True): clusters = network.get_clusters(0.1) draw_clusters(sample, clusters)
def template_clustering(number_clusters, path, branching_factor = 5, max_node_entries = 5, initial_diameter = 0.0, type_measurement = measurement_type.CENTROID_EUCLIDIAN_DISTANCE, entry_size_limit = 200, ccore = True): sample = read_sample(path); birch_instance = birch(sample, number_clusters, branching_factor, max_node_entries, initial_diameter, type_measurement, entry_size_limit, ccore) (ticks, result) = timedcall(birch_instance.process); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); clusters = birch_instance.get_clusters(); draw_clusters(sample, clusters);
def template_clustering(number_clusters, path, ccore=True): sample = read_sample(path) hierarchical_instance = hierarchical(sample, number_clusters, ccore) (ticks, result) = timedcall(hierarchical_instance.process) print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") clusters = hierarchical_instance.get_clusters() draw_clusters(sample, clusters)
def template_clustering(start_centers, path, tolerance=0.25, ccore=True): sample = read_sample(path) kmeans_instance = kmeans(sample, start_centers, tolerance, ccore) (ticks, result) = timedcall(kmeans_instance.process) clusters = kmeans_instance.get_clusters() print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") draw_clusters(sample, clusters)
def template_clustering(start_medoids, path, tolerance = 0.25): sample = read_sample(path); kmedoids_instance = kmedoids(sample, start_medoids, tolerance); (ticks, result) = timedcall(kmedoids_instance.process); clusters = kmedoids_instance.get_clusters(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); draw_clusters(sample, clusters);
def template_segmentation_image(source, map_som_size = [5, 5], average_neighbors = 5, sync_order = 0.998, show_dyn = False, show_som_map = False): data = read_image(source); network = syncsom(data, map_som_size[0], map_som_size[1]); (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, average_neighbors, show_dyn, sync_order); print("Sample: ", source, "\t\tExecution time: ", ticks, "\t\tWinners: ", network.som_layer.get_winner_number(), "\n"); if (show_dyn is True): draw_dynamics(dyn_time, dyn_phase); clusters = network.get_clusters(); draw_image_mask_segments(source, clusters);
def template_clustering(path, radius, cluster_numbers, threshold, draw = True, ccore = True): sample = read_sample(path); rock_instance = rock(sample, radius, cluster_numbers, threshold, ccore); (ticks, result) = timedcall(rock_instance.process); clusters = rock_instance.get_clusters(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); if (draw == True): draw_clusters(sample, clusters);
def template_clustering(radius, neighb, path, invisible_axes = False, ccore = True): sample = read_sample(path); dbscan_instance = dbscan(sample, radius, neighb, ccore); (ticks, result) = timedcall(dbscan_instance.process); clusters = dbscan_instance.get_clusters(); noise = dbscan_instance.get_noise(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); draw_clusters(sample, clusters, [], '.', hide_axes = invisible_axes);
def template_clustering(number_clusters, path, number_represent_points = 5, compression = 0.5, draw = True, ccore_flag = False): sample = read_sample(path); cure_instance = cure(sample, number_clusters, number_represent_points, compression, ccore_flag); (ticks, result) = timedcall(cure_instance.process); clusters = cure_instance.get_clusters(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); if (draw is True): if (ccore_flag is True): draw_clusters(sample, clusters); else: draw_clusters(None, clusters);
def template_clustering(start_centers, path, tolerance = 0.025, criterion = splitting_type.BAYESIAN_INFORMATION_CRITERION, ccore = False): sample = read_sample(path); xmeans_instance = xmeans(sample, start_centers, 20, tolerance, criterion, ccore); (ticks, result) = timedcall(xmeans_instance.process); clusters = xmeans_instance.get_clusters(); criterion_string = "UNKNOWN"; if (criterion == splitting_type.BAYESIAN_INFORMATION_CRITERION): criterion_string = "BAYESIAN_INFORMATION_CRITERION"; elif (criterion == splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH): criterion_string = "MINIMUM_NOISELESS_DESCRIPTION_LENGTH"; print("Sample: ", path, "\tExecution time: ", ticks, "Number of clusters: ", len(clusters), criterion_string, "\n"); draw_clusters(sample, clusters);
def template_clustering(radius, neighb, path, invisible_axes = False, ccore = True): sample = read_sample(path); dbscan_instance = dbscan(sample, radius, neighb, ccore); (ticks, result) = timedcall(dbscan_instance.process); clusters = dbscan_instance.get_clusters(); noise = dbscan_instance.get_noise(); visualizer = cluster_visualizer(); visualizer.append_clusters(clusters, sample); visualizer.append_cluster(noise, sample, marker = 'x'); visualizer.show(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n");
def template_clustering(radius, neighb, path, invisible_axes=False, ccore=True): sample = read_sample(path) dbscan_instance = dbscan(sample, radius, neighb, ccore) (ticks, result) = timedcall(dbscan_instance.process) clusters = dbscan_instance.get_clusters() noise = dbscan_instance.get_noise() print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") draw_clusters(sample, clusters, [], '.', hide_axes=invisible_axes)
def template_clustering(file, radius, order, show_dyn = False, show_conn = False, show_clusters = True, ena_conn_weight = False, ccore_flag = False): sample = read_sample(file); network = syncnet(sample, radius, enable_conn_weight = ena_conn_weight, ccore = ccore_flag); (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, order, solve_type.FAST, show_dyn); print("Sample: ", file, "\t\tExecution time: ", ticks, "\n"); if (show_dyn == True): draw_dynamics(dyn_time, dyn_phase, x_title = "Time", y_title = "Phase", y_lim = [0, 2 * 3.14]); if (show_conn == True): network.show_network(); if (show_clusters == True): clusters = network.get_clusters(0.1); draw_clusters(sample, clusters);
def template_clustering(start_centers, path, tolerance = 0.25, ccore = True): sample = read_sample(path); kmeans_instance = kmeans(sample, start_centers, tolerance, ccore); (ticks, result) = timedcall(kmeans_instance.process); clusters = kmeans_instance.get_clusters(); centers = kmeans_instance.get_centers(); print("Sample: ", path, "\t\tExecution time: ", ticks, "\n"); visualizer = cluster_visualizer(); visualizer.append_clusters(clusters, sample); visualizer.append_cluster(start_centers, marker = '*', markersize = 20); visualizer.append_cluster(centers, marker = '*', markersize = 20); visualizer.show();
def template_clustering(path, radius, cluster_numbers, threshold, draw=True, ccore=True): sample = read_sample(path) rock_instance = rock(sample, radius, cluster_numbers, threshold, ccore) (ticks, result) = timedcall(rock_instance.process) clusters = rock_instance.get_clusters() print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") if (draw == True): draw_clusters(sample, clusters)
def template_clustering(file, radius, order, show_dyn = False, show_conn = False, show_clusters = True, ena_conn_weight = False, ccore_flag = True): sample = read_sample(file); network = syncnet(sample, radius, enable_conn_weight = ena_conn_weight, ccore = ccore_flag); (ticks, analyser) = timedcall(network.process, order, solve_type.FAST, show_dyn); print("Sample: ", file, "\t\tExecution time: ", ticks, "\n"); if (show_dyn == True): sync_visualizer.show_output_dynamic(analyser); sync_visualizer.animate_output_dynamic(analyser); if ( (show_conn == True) and (ccore_flag == False) ): network.show_network(); if (show_clusters == True): clusters = analyser.allocate_clusters(); draw_clusters(sample, clusters);
def template_segmentation_image(source, map_som_size=[5, 5], average_neighbors=5, sync_order=0.998, show_dyn=False, show_som_map=False): data = read_image(source) network = syncsom(data, map_som_size[0], map_som_size[1]) (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, average_neighbors, show_dyn, sync_order) print("Sample: ", source, "\t\tExecution time: ", ticks, "\t\tWinners: ", network.som_layer.get_winner_number(), "\n") if (show_dyn is True): draw_dynamics(dyn_time, dyn_phase) clusters = network.get_clusters() draw_image_mask_segments(source, clusters)
def template_clustering(number_clusters, path, number_represent_points=5, compression=0.5, draw=True, ccore_flag=False): sample = read_sample(path) cure_instance = cure(sample, number_clusters, number_represent_points, compression, ccore_flag) (ticks, result) = timedcall(cure_instance.process) clusters = cure_instance.get_clusters() print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") if (draw is True): if (ccore_flag is True): draw_clusters(sample, clusters) else: draw_clusters(None, clusters)
def template_clustering( number_clusters, path, branching_factor=5, max_node_entries=5, initial_diameter=0.0, type_measurement=measurement_type.CENTROID_EUCLIDIAN_DISTANCE, entry_size_limit=200, ccore=True): sample = read_sample(path) birch_instance = birch(sample, number_clusters, branching_factor, max_node_entries, initial_diameter, type_measurement, entry_size_limit, ccore) (ticks, result) = timedcall(birch_instance.process) print("Sample: ", path, "\t\tExecution time: ", ticks, "\n") clusters = birch_instance.get_clusters() draw_clusters(sample, clusters)
def template_clustering(file, map_size, trust_order, sync_order=0.999, show_dyn=False, show_layer1=False, show_layer2=False, show_clusters=True): # Read sample sample = read_sample(file) # Create network network = syncsom(sample, map_size[0], map_size[1]) # Run processing (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, trust_order, show_dyn, sync_order) print("Sample: ", file, "\t\tExecution time: ", ticks, "\n") # Show dynamic of the last layer. if (show_dyn == True): draw_dynamics(dyn_time, dyn_phase, x_title="Time", y_title="Phase", y_lim=[0, 2 * 3.14]) if (show_clusters == True): clusters = network.get_som_clusters() draw_clusters(network.som_layer.weights, clusters) # Show network stuff. if (show_layer1 == True): network.show_som_layer() if (show_layer2 == True): network.show_sync_layer() if (show_clusters == True): clusters = network.get_clusters() draw_clusters(sample, clusters)
def template_clustering( start_centers, path, tolerance=0.025, criterion=splitting_type.BAYESIAN_INFORMATION_CRITERION, ccore=False): sample = read_sample(path) xmeans_instance = xmeans(sample, start_centers, 20, tolerance, criterion, ccore) (ticks, result) = timedcall(xmeans_instance.process) clusters = xmeans_instance.get_clusters() criterion_string = "UNKNOWN" if (criterion == splitting_type.BAYESIAN_INFORMATION_CRITERION): criterion_string = "BAYESIAN_INFORMATION_CRITERION" elif (criterion == splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH): criterion_string = "MINIMUM_NOISELESS_DESCRIPTION_LENGTH" print("Sample: ", path, "\tExecution time: ", ticks, "Number of clusters: ", len(clusters), criterion_string, "\n") draw_clusters(sample, clusters)
def template_segmentation_image(source, color_radius, object_radius, noise_size, show_dyn): data = read_image(source); print("Pixel dimension: ", len(data[0])); network = syncnet(data, color_radius, ccore = True); print("Network has been created"); (ticks, (t, dyn)) = timedcall(network.process, 0.9995, solve_type.FAST, show_dyn); # (t, dyn) = network.process(0.998, solve_type.FAST, show_dyn); print("Sample: ", source, "\t\tExecution time: ", ticks, "\n"); if (show_dyn is True): draw_dynamics(t, dyn); clusters = network.get_clusters(); real_clusters = [cluster for cluster in clusters if len(cluster) > noise_size]; draw_image_mask_segments(source, real_clusters); if (object_radius is None): return; # continue analysis pointer_image = Image.open(source); image_size = pointer_image.size; object_colored_clusters = []; object_colored_dynamics = []; total_dyn = []; for cluster in clusters: coordinates = []; for index in cluster: y = floor(index / image_size[0]); x = index - y * image_size[0]; coordinates.append([x, y]); print(coordinates); # perform clustering analysis of the colored objects if (network is not None): del network; network = None; if (len(coordinates) < noise_size): continue; network = syncnet(coordinates, object_radius, ccore = True); (t, dyn) = network.process(0.999, solve_type.FAST, show_dyn); if (show_dyn is True): object_colored_dynamics.append( (t, dyn) ); object_clusters = network.get_clusters(); # decode it real_description_clusters = []; for object_cluster in object_clusters: real_description = []; for index_object in object_cluster: real_description.append(cluster[index_object]); real_description_clusters.append(real_description); if (len(real_description) > noise_size): object_colored_clusters.append(real_description); # draw_image_mask_segments(source, [ cluster ]); # draw_image_mask_segments(source, real_description_clusters); draw_image_mask_segments(source, object_colored_clusters); if (show_dyn is True): draw_dynamics_set(object_colored_dynamics, None, None, None, [0, 2 * 3.14], False, False);
def template_segmentation_image(source, color_radius, object_radius, noise_size, show_dyn): data = read_image(source) print("Pixel dimension: ", len(data[0])) network = syncnet(data, color_radius, ccore=True) print("Network has been created") (ticks, (t, dyn)) = timedcall(network.process, 0.9995, solve_type.FAST, show_dyn) # (t, dyn) = network.process(0.998, solve_type.FAST, show_dyn); print("Sample: ", source, "\t\tExecution time: ", ticks, "\n") if (show_dyn is True): draw_dynamics(t, dyn) clusters = network.get_clusters() real_clusters = [ cluster for cluster in clusters if len(cluster) > noise_size ] draw_image_mask_segments(source, real_clusters) if (object_radius is None): return # continue analysis pointer_image = Image.open(source) image_size = pointer_image.size object_colored_clusters = [] object_colored_dynamics = [] total_dyn = [] for cluster in clusters: coordinates = [] for index in cluster: y = floor(index / image_size[0]) x = index - y * image_size[0] coordinates.append([x, y]) print(coordinates) # perform clustering analysis of the colored objects if (network is not None): del network network = None if (len(coordinates) < noise_size): continue network = syncnet(coordinates, object_radius, ccore=True) (t, dyn) = network.process(0.999, solve_type.FAST, show_dyn) if (show_dyn is True): object_colored_dynamics.append((t, dyn)) object_clusters = network.get_clusters() # decode it real_description_clusters = [] for object_cluster in object_clusters: real_description = [] for index_object in object_cluster: real_description.append(cluster[index_object]) real_description_clusters.append(real_description) if (len(real_description) > noise_size): object_colored_clusters.append(real_description) # draw_image_mask_segments(source, [ cluster ]); # draw_image_mask_segments(source, real_description_clusters); draw_image_mask_segments(source, object_colored_clusters) if (show_dyn is True): draw_dynamics_set(object_colored_dynamics, None, None, None, [0, 2 * 3.14], False, False)