示例#1
0
def template_clustering(file, map_size, radius, sync_order = 0.999, show_dyn = False, show_layer1 = False, show_layer2 = False, show_clusters = True):
    # Read sample
    sample = read_sample(file);

    # Create network
    network = syncsom(sample, map_size[0], map_size[1], radius);
    
    # Run processing
    (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, show_dyn, sync_order);
    print("Sample: ", file, "\t\tExecution time: ", ticks, "\n");
    
    # Show dynamic of the last layer.
    if (show_dyn == True):
        draw_dynamics(dyn_time, dyn_phase, x_title = "Time", y_title = "Phase", y_lim = [0, 3.14]);
    
    if (show_clusters == True):
        clusters = network.get_som_clusters();
        
        visualizer = cluster_visualizer();
        visualizer.append_clusters(clusters, network.som_layer.weights);
        visualizer.show();
    
    # Show network stuff.
    if (show_layer1 == True):
        network.show_som_layer();
    
    if (show_layer2 == True):
        network.show_sync_layer();
    
    if (show_clusters == True):
        clusters = network.get_clusters();
        
        visualizer = cluster_visualizer();
        visualizer.append_clusters(clusters, sample);
        visualizer.show();
示例#2
0
 def templateLengthProcessData(self, file, som_map_size, avg_num_conn, eps, expected_cluster_length):
     result_testing = False;
     
     # If phases crosses each other because of random part of the network then we should try again.
     for attempt in range(0, 5, 1):
         sample = read_sample(file);
         network = syncsom(sample, som_map_size[0], som_map_size[1]);
         network.process(avg_num_conn, collect_dynamic = False, order = eps);
         
         clusters = network.get_clusters();
         
         obtained_cluster_sizes = [len(cluster) for cluster in clusters];
         if (len(sample) != sum(obtained_cluster_sizes)):
             continue;
         
         obtained_cluster_sizes.sort();
         expected_cluster_length.sort();
         #print(obtained_cluster_sizes, expected_cluster_length);
         if (obtained_cluster_sizes != expected_cluster_length):
             continue;
         
         # Unit-test is passed
         result_testing = True;
         break;
         
     assert result_testing;
示例#3
0
    def templateLengthProcessData(self, file, som_map_size, radius, eps,
                                  expected_cluster_length):
        result_testing = False

        # If phases crosses each other because of random part of the network then we should try again.
        for _ in range(0, 5, 1):
            sample = read_sample(file)
            network = syncsom(sample, som_map_size[0], som_map_size[1], radius)
            network.process(collect_dynamic=False, order=eps)

            clusters = network.get_clusters()

            obtained_cluster_sizes = [len(cluster) for cluster in clusters]
            if (len(sample) != sum(obtained_cluster_sizes)):
                continue

            obtained_cluster_sizes.sort()
            expected_cluster_length.sort()
            #print(obtained_cluster_sizes, expected_cluster_length);
            if (obtained_cluster_sizes != expected_cluster_length):
                continue

            # Unit-test is passed
            result_testing = True
            break

        assert result_testing
示例#4
0
def template_clustering(file, map_size, trust_order, sync_order = 0.999, show_dyn = False, show_layer1 = False, show_layer2 = False, show_clusters = True):
    # Read sample
    sample = read_sample(file);

    # Create network
    network = syncsom(sample, map_size[0], map_size[1]);
    
    # Run processing
    (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, trust_order, show_dyn, sync_order);
    print("Sample: ", file, "\t\tExecution time: ", ticks, "\n");
    
    # Show dynamic of the last layer.
    if (show_dyn == True):
        draw_dynamics(dyn_time, dyn_phase, x_title = "Time", y_title = "Phase", y_lim = [0, 2 * 3.14]);
    
    if (show_clusters == True):
        clusters = network.get_som_clusters();
        draw_clusters(network.som_layer.weights, clusters);
    
    # Show network stuff.
    if (show_layer1 == True):
        network.show_som_layer();
    
    if (show_layer2 == True):
        network.show_sync_layer();
    
    if (show_clusters == True):
        clusters = network.get_clusters();
        draw_clusters(sample, clusters);
示例#5
0
 def templateLengthProcessData(self, file, som_map_size, avg_num_conn, eps, expected_cluster_length):
     result_testing = False;
     
     # If phases crosses each other because of random part of the network then we should try again.
     for attempt in range(0, 3, 1):
         sample = read_sample(file);
         network = syncsom(sample, som_map_size[0], som_map_size[1]);
         network.process(avg_num_conn, collect_dynamic = False, order = eps);
         
         clusters = network.get_clusters();
         
         obtained_cluster_sizes = [len(cluster) for cluster in clusters];
         if (len(sample) != sum(obtained_cluster_sizes)):
             continue;
         
         obtained_cluster_sizes.sort();
         expected_cluster_length.sort();
         #print(obtained_cluster_sizes, expected_cluster_length);
         if (obtained_cluster_sizes != expected_cluster_length):
             continue;
         
         # Unit-test is passed
         result_testing = True;
         break;
         
     assert result_testing;
示例#6
0
def experiment_execution_one_cluster_dependence(layer_first_size, radius,
                                                order):
    print("Experiment: map size =", layer_first_size[0] * layer_first_size[1],
          "radius =", radius, "order =", order)
    cluster_sizes = [
        10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150
    ]

    for cluster_size in cluster_sizes:
        # generate data sets
        dataset = []
        dataset += [[random(), random()] for _ in range(cluster_size)]

        general_value = 0.0
        amount_attempt = 5
        for _ in range(amount_attempt):
            network = syncsom(dataset, layer_first_size[0],
                              layer_first_size[1], radius)
            (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, False,
                                                       order)
            general_value += ticks

        print("Sample: ", cluster_size, "\t\tExecution time: ",
              general_value / float(amount_attempt))

    print("\n")
示例#7
0
    def testShowLayersProcessing(self):
        sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1)

        network = syncsom(sample, 4, 4, 1.0)
        network.process(collect_dynamic=False, order=0.99)

        network.show_som_layer()
        network.show_sync_layer()
示例#8
0
 def testShowLayersProcessing(self):
     sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1);
     
     network = syncsom(sample, 4, 4, 1.0);
     network.process(collect_dynamic = False, order = 0.99);
     
     network.show_som_layer();
     network.show_sync_layer();
def template_segmentation_image(source, map_som_size = [5, 5], radius = 128.0, sync_order = 0.998, show_dyn = False, show_som_map = False):
    data = read_image(source);
    
    network = syncsom(data, map_som_size[0], map_som_size[1], 1.0);
    (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, show_dyn, sync_order);
    print("Sample: ", source, "\t\tExecution time: ", ticks, "\t\tWinners: ", network.som_layer.get_winner_number(), "\n");
    
    if (show_dyn is True):
        draw_dynamics(dyn_time, dyn_phase);
    
    clusters = network.get_clusters();
    draw_image_mask_segments(source, clusters);
def template_segmentation_image(source, map_som_size = [5, 5], average_neighbors = 5, sync_order = 0.998, show_dyn = False, show_som_map = False):
    data = read_image(source);
    
    network = syncsom(data, map_som_size[0], map_som_size[1]);
    (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, average_neighbors, show_dyn, sync_order);
    print("Sample: ", source, "\t\tExecution time: ", ticks, "\t\tWinners: ", network.som_layer.get_winner_number(), "\n");
    
    if (show_dyn is True):
        draw_dynamics(dyn_time, dyn_phase);
    
    clusters = network.get_clusters();
    draw_image_mask_segments(source, clusters);
示例#11
0
 def templateLengthSomCluster(self, file, som_map_size, avg_num_conn, eps):
     sample = read_sample(file);
     network = syncsom(sample, som_map_size[0], som_map_size[1]);   
     network.process(avg_num_conn, collect_dynamic = False, order = eps);
     
     # Check unique
     som_clusters = network.get_som_clusters();
     indexes = set();
     
     for som_cluster in som_clusters:
         for index in som_cluster:
             assert (index in indexes) is False;
             indexes.add(index);    
示例#12
0
    def templateLengthSomCluster(self, file, som_map_size, avg_num_conn, eps):
        sample = read_sample(file)
        network = syncsom(sample, som_map_size[0], som_map_size[1])
        network.process(avg_num_conn, collect_dynamic=False, order=eps)

        # Check unique
        som_clusters = network.get_som_clusters()
        indexes = set()

        for som_cluster in som_clusters:
            for index in som_cluster:
                assert (index in indexes) is False
                indexes.add(index)
示例#13
0
    def templateLengthSomCluster(self, file, som_map_size, radius, eps):
        sample = read_sample(file);
        
        network = syncsom(sample, som_map_size[0], som_map_size[1], radius);
        network.process(collect_dynamic = False, order = eps);

        # Check unique
        som_clusters = network.get_som_clusters();
        indexes = set();
        
        for som_cluster in som_clusters:
            for index in som_cluster:
                assert (index in indexes) is False;
                indexes.add(index);
示例#14
0
def experiment_execution_one_cluster_dependence(layer_first_size, radius, order):
    print("Experiment: map size =", layer_first_size[0] * layer_first_size[1], "radius =", radius, "order =", order);
    cluster_sizes = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150];
    
    for cluster_size in cluster_sizes:
        # generate data sets
        dataset = [];
        dataset += [ [random(), random()] for _ in range(cluster_size) ];
        
        general_value = 0.0;
        amount_attempt = 5;
        for _ in range(amount_attempt):
            network = syncsom(dataset, layer_first_size[0], layer_first_size[1], radius);
            (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, False, order);
            general_value += ticks;
                
        print("Sample: ", cluster_size, "\t\tExecution time: ", general_value / float(amount_attempt));
        
    print("\n");
def template_clustering(file,
                        map_size,
                        trust_order,
                        sync_order=0.999,
                        show_dyn=False,
                        show_layer1=False,
                        show_layer2=False,
                        show_clusters=True):
    # Read sample
    sample = read_sample(file)

    # Create network
    network = syncsom(sample, map_size[0], map_size[1])

    # Run processing
    (ticks, (dyn_time, dyn_phase)) = timedcall(network.process, trust_order,
                                               show_dyn, sync_order)
    print("Sample: ", file, "\t\tExecution time: ", ticks, "\n")

    # Show dynamic of the last layer.
    if (show_dyn == True):
        draw_dynamics(dyn_time,
                      dyn_phase,
                      x_title="Time",
                      y_title="Phase",
                      y_lim=[0, 2 * 3.14])

    if (show_clusters == True):
        clusters = network.get_som_clusters()
        draw_clusters(network.som_layer.weights, clusters)

    # Show network stuff.
    if (show_layer1 == True):
        network.show_som_layer()

    if (show_layer2 == True):
        network.show_sync_layer()

    if (show_clusters == True):
        clusters = network.get_clusters()
        draw_clusters(sample, clusters)
def process_syncsom(sample):
    instance = syncsom(sample, 1, NUMBER_CLUSTERS)
    (ticks, _) = timedcall(instance.process, 0, False, 0.998)
    return ticks
示例#17
0
from pyclustering.nnet.cnn import cnn_network, cnn_visualizer
dataset = get_train(read_articles('dr_medi'))
# labels = dataset.target
# true_k = np.unique(dataset).shape[0]
true_k = np.unique(dataset[:8]).shape[0]

vectorizer = TfidfVectorizer()
# X = vectorizer.fit_transform(dataset)
X = vectorizer.fit_transform(get_train(read_articles('dr_medi')))
print(X.shape)
svd = TruncatedSVD(true_k)
lsa = make_pipeline(svd, Normalizer(copy=False))

X = lsa.fit_transform(X)

network = syncsom(X, 2, 2, 0.9)
(dyn_time, dyn_phase) = network.process(True, 0.999)
# network.show_sync_layer();
clusters = network.get_clusters()
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, X)
visualizer.show()

# kmeans_instance = kmeans(X, [[ 0.93036182, -0.20536988, -0.17834053,  0.03102577,  0.01767965, -0.06377806, 0.22167392, -0.077218]]);
#
# kmeans_instance.process();
# clusters = kmeans_instance.get_centers();
# print(clusters)
#
# network = som(5, 5, X, 100);
#
示例#18
0
def process_syncsom(sample):
    instance = syncsom(sample, 1, NUMBER_CLUSTERS)
    (ticks, _) = timedcall(instance.process, 0, False, 0.998)
    return ticks