Пример #1
0
    def som(self, m, n, dim, iterations):
        # step 4
        trainData = numpy.zeros((len(self.contrastPoints), 1))

        for i in range(len(self.contrastPoints)):
            trainData[i] = [self.contrastPoints[i]]

        som = SOM(m, n, dim, iterations)
        som.train(trainData)    
        mapped = som.map_vects(trainData)
        d0 = 0
        d1 = 0
        d2 = 0

        for d, m in zip(trainData, mapped):
            mapv = m[0] + m[1]
            
            if (d0 == 0 and mapv == 0):
                d0 = d[0] if d0 < d[0] else d0

            if (d1 == 0  and mapv == 1):    
                d1 = d[0] if d1 < d[0] else d1
                
            if (d2 == 0  and mapv == 2): 
                d2 = d[0] if d2 < d[0] else d2
                
        least = 0 if d0 < d1 and d0 < d2 else (1 if d1 < d0 and d1 < d2 else (2 if d2 < d1 and d2 < d0 else 0))
        most = 0 if d0 > d1 and d0 > d2 else (1 if d1 > d0 and d1 > d2 else (2 if d2 > d1 and d2 > d0 else 0))
        mean = 0 if d0 > d1 and d0 < d2 else (1 if d1 > d0 and d1 < d2 else (2 if d2 > d1 and d2 < d0 else 0))
        print(d0, d1, d2)
        print(least, most, mean)
        self.mappingDict = {'least':  least, 'mean': mean, 'most': most}  
        self.blocksMapping = mapped
Пример #2
0
def SelfOrganizingMap(Feature_List):
    som = SOM(SOM_LINES, SOM_COLS, len(Feature_List[0]), SOM_ITERATIONS)
    som.train(Feature_List)

    image_grid = som.get_centroids()
    mapped = som.map_vects(Feature_List)

    return mapped, image_grid
Пример #3
0
def call_som(m, n, dim, input_data, weight_after_insertion=None, alpha=None, sigma=None):
    # get the shape of input data inorder to calc iternumber in SOM
    iter_no = input_data.shape[0]*10
    som = SOM(m, n, dim, weight_after_insertion, n_iterations= iter_no)
    trained_weight = som.train(input_data)
    mapped = som.map_vects(input_data)
    result = np.array(mapped)
    return trained_weight, result
def SelfOrganizingMap(feature_list):
    no_features = len(feature_list[0])
    print('Clustering', no_features, 'features')
    som = SOM(SOM_LINES, SOM_COLS, no_features, SOM_ITERATIONS, alpha=ALPHA, sigma=SIGMA)
    som.train(feature_list)

    image_grid = som.get_centroids()
    mapped = som.map_vects(feature_list)

    return mapped, image_grid
Пример #5
0
def call_som(m,
             n,
             dim,
             input_data,
             weight_after_insertion=None,
             alpha=None,
             sigma=None):
    som = SOM(m, n, dim, weight_after_insertion)
    trained_weight = som.train(input_data)
    mapped = som.map_vects(input_data)
    result = np.array(mapped)
    return trained_weight, result
Пример #6
0
def main():
    # Training inputs for RGBcolors
    credits_data = pd.read_csv("testdata/credit_train.csv", header=None)
    columns = credits_data.columns
    train_data = credits_data[columns[:-1]].copy().values
    label_data = credits_data[columns[-1]].copy().values
    train_data_scale = preprocessing.scale(train_data)
    # colors = np.array(
    #     [[0., 0., 0.],
    #      [0., 0., 1.],
    #      [0., 0., 0.5],
    #      [0.125, 0.529, 1.0],
    #      [0.33, 0.4, 0.67],
    #      [0.6, 0.5, 1.0],
    #      [0., 1., 0.],
    #      [1., 0., 0.],
    #      [0., 1., 1.],
    #      [1., 0., 1.],
    #      [1., 1., 0.],
    #      [1., 1., 1.],
    #      [.33, .33, .33],
    #      [.5, .5, .5],
    #      [.66, .66, .66]])
    # color_names = \
    #     ['black', 'blue', 'darkblue', 'skyblue',
    #      'greyblue', 'lilac', 'green', 'red',
    #      'cyan', 'violet', 'yellow', 'white',
    #      'darkgrey', 'mediumgrey', 'lightgrey']

    # Train a 20x30 SOM with 400 iterations
    som = SOM(1, 2, len(columns) - 1, 400)
    som.train(train_data_scale)

    # Get output grid
    image_grid = som.get_centroids()

    # Map colours to their closest neurons
    mapped = som.map_vects(train_data_scale)

    # Plot
    plt.imshow(image_grid)
    plt.title('Color SOM')
    for i, m in enumerate(mapped):
        plt.text(m[1],
                 m[0],
                 label_data[i],
                 ha='center',
                 va='center',
                 bbox=dict(facecolor='white', alpha=0.5, lw=0))
    plt.show()
def SortingSelfOrganizingMap(Feature_List):
    # Train a SS_LINES x SS_COLS self-organizing map using NUM_ITERATIONS iterations
    som = SOM(SS_LINES, SS_COLS, len(Feature_List[0]), NUM_ITERATIONS)
    som.train(Feature_List)
    mapped = som.map_vects(Feature_List)

    # Grayscale landscape
    sorting_landscape = np.zeros((SS_LINES, SS_COLS), np.float16)

    for i in range(len(mapped)):
        lin = mapped[i][0]
        col = mapped[i][1]
        sorting_landscape[lin][col] = Feature_List[i][0]

    io.imsave('sorted_landscape.png', sorting_landscape)
Пример #8
0
    def som3(self, m, n, dim, iterations):
        # step 4
        trainData = numpy.zeros((len(self.contrastPoints), 3))

        for i in range(len(self.contrastPoints)):
            trainData[i] = [self.diagonalContrast[i], self.verticalContrast[i], self.horizontalContrast[i]]

        som = SOM(m, n, dim, iterations)
        som.train(trainData)    
        mapped = som.map_vects(trainData)
        d0 = 0
        d1 = 0
        d2 = 0

        for d, m in zip(trainData, mapped):
            mapv = m[0] + m[1]
            
            if (d0 == 0 and mapv == 0):
                d0 = d[0] if d0 < d[0] else d0

            if (d1 == 0  and mapv == 1):    
                d1 = d[0] if d1 < d[0] else d1
                
            if (d2 == 0  and mapv == 2): 
                d2 = d[0] if d2 < d[0] else d2
                
        arr = numpy.array([d0, d1, d2])
        most = numpy.argmax(arr)
        numpy.delete(arr, most, 0)
        least = numpy.argmin(arr)
        numpy.delete(arr, least, 0)
        mean = 0 if (arr[most] == d1 or arr[most] == d2) and (arr[least] == d1 or arr[least] == d2) else (1 if (arr[most] == d0 or arr[most] == d2) and (arr[least] == d0 or arr[least] == d2) else 2)
       
        self.mappingDict = {'least':  least, 'mean': mean, 'most': most}  
        self.blocksMapping = mapped
        self.neuronsMap = som.get_centroids()
Пример #9
0
n_iter = 100
som = SOM(m, n, 3, n_iter)
for iter_no in range(n_iter):
    #Train with each vector one by one
    for i in range(len(data)):
        som(data[i], iter_no)

#Store a centroid grid for easy retrieval later on
centroid_grid = [[] for i in range(m)]
weights = som.get_weights()
locations = som.get_locations()
for i, loc in enumerate(locations):
    centroid_grid[loc[0]].append(weights[i].numpy())

#Get output grid
image_grid = centroid_grid

#Map colours to their closest neurons
mapped = som.map_vects(torch.Tensor(colors))

#Plot
plt.imshow(image_grid)
plt.title('Color SOM')
for i, m in enumerate(mapped):
    plt.text(m[1],
             m[0],
             color_names[i],
             ha='center',
             va='center',
             bbox=dict(facecolor='white', alpha=0.5, lw=0))
plt.show()
Пример #10
0
n_iter = 100
som = SOM(m, n, 3, n_iter)
for iter_no in range(n_iter):
    #Train with each vector one by one
    for i in range(len(data)):
        som(data[i], iter_no)

#Store a centroid grid for easy retrieval later on
centroid_grid = [[] for i in range(m)]
weights = som.get_weights()
locations = som.get_locations()
for i, loc in enumerate(locations):
    centroid_grid[loc[0]].append(weights[i].numpy())

#Get output grid
image_grid = centroid_grid

#Map colours to their closest neurons
mapped = som.map_vects(colors)

#Plot
plt.imshow(image_grid)
plt.title('Color SOM')
for i, m in enumerate(mapped):
    plt.text(m[1],
             m[0],
             color_names[i],
             ha='center',
             va='center',
             bbox=dict(facecolor='white', alpha=0.5, lw=0))
plt.show()
    image = x_train[num].reshape([28, 28])
    plt.title('Example: %d  Label: %d' % (num, label))
    plt.imshow(image, cmap=plt.get_cmap('gray_r'))
    plt.show()


display_digit(ran.randint(0, x_train.shape[0]))

# Import som class and train into 30 * 30 sized of SOM lattice
from som import SOM

som = SOM(30, 30, x_train.shape[1], 200)
som.train(x_train)

# Fit train data into SOM lattice
mapped = som.map_vects(x_train)
mappedarr = np.array(mapped)
x1 = mappedarr[:, 0]
y1 = mappedarr[:, 1]

index = [np.where(r == 1)[0][0] for r in y_train]
index = list(map(str, index))

## Plots: 1) Train 2) Test+Train ###

plt.figure(1, figsize=(12, 6))
plt.subplot(121)
# Plot 1 for Training only
plt.scatter(x1, y1)
# Just adding text
for i, m in enumerate(mapped):
flags = np.concatenate((flag_sig, flag_bg), axis=0) # concatenates the flag arrays into one array, each entry corresponding to the entry of the data array
### TRAINING ###
#som = SOM(dimx, dimy, 8, 400) # Train a dimx X dimy SOM with 400 iterations, 8 is the number of variables in the data array
som = SOM(dimx, dimy, 8)
som.train(data) # trains on the dataset prepared
### THIS WILL TAKE AWHILE ###
### TO STORE THE TRAINING RESULTS INTERACTIVELY IN ipython DO: 
### weightages = som._weightages 
### %store weightages 
### THEN TO RECOVER DO: 
### %store -r 
### som = SOM(dimx, dimy, 8, 400)
### som._weightages = weightages
### som._trained = True
print(str(datetime.datetime.now())) # print the time to observe how long the training will take
mapped = np.array(som.map_vects(data)) # map each datapoint to its nearest neuron
# post training manipulations to the final dataset (one big numpy array)
data = np.append(data,mapped,axis=1) # append the mapped neurons to data
data = np.column_stack((data,flags)) # append the flags to the dataset 
##test U-matrix
weights = som.output_weights
umatrix = get_umatrix(weights, dimx, dimy)
fig = plt.figure()
plt.imshow(umatrix, origin='lower')
plt.show(block=True)
plt.savefig('Umatrix_march29_v2.png')
##### APPLICATION #####
# get new data
### PREPARE THE SIGNAL ARRAY ###
sig2 = np.loadtxt('ksigma_mc_som_signal_feb2019.txt') # convert to np array
np.random.shuffle(sig2)
def SelfOrganizingMap(Feature_List, where):
    if not os.path.exists(where):
        os.makedirs(where)

    som = SOM(SOM_LINES, SOM_COLS, len(Feature_List[0]), NUM_ITERATIONS)
    som.train(Feature_List)
    # Get output grid
    image_grid = som.get_centroids()
    #print 'Image grid shows weights for all the input features'
    #print image_grid
    # Get vector mappings
    mapped = som.map_vects(Feature_List)
    #print 'Mapping', mapped

    # Visualization part
    # Needs to be refactored to produce output for any number of clusters
    avg_line = []
    for i in range(NUM_CLUST):
        avg_line.append(0)
    avg_column = []
    for i in range(NUM_CLUST):
        avg_column.append(0)

    for i in range(0, len(mapped), NUM_CLUST):
        for j in range(NUM_CLUST):
            avg_line[(i + j) % NUM_CLUST] += mapped[i + j][0]
            avg_column[(i + j) % NUM_CLUST] += mapped[i + j][1]

    for i in range(len(avg_line)):
        avg_line[i] = avg_line[i] / (len(mapped) / NUM_CLUST)

    for i in range(len(avg_column)):
        avg_column[i] = avg_column[i] / (len(mapped) / NUM_CLUST)

    'Average location'
    print avg_line, avg_column

    # Grayscale landscape
    sorting_landscape = []
    for i in range(NUM_CLUST):
        sorting_landscape.append(np.zeros((SOM_LINES, SOM_COLS), np.float16))

    for i in range(len(mapped)):
        lin = mapped[i][0]
        col = mapped[i][1]
        sorting_landscape[i % NUM_CLUST][lin][col] += 0.05
        if sorting_landscape[i % NUM_CLUST][lin][col] > 1.0:
            sorting_landscape[i % NUM_CLUST][lin][col] = 1.0

    for i in range(NUM_CLUST):
        sorting_landscape[i][avg_line[i]][avg_column[i]] = 1.0

    for i in range(NUM_CLUST):
        io.imsave(where + str(i) + '_sorting_cluster.png',
                  sorting_landscape[i])

    # Colored landscape
    colored_landscape = np.zeros((SOM_LINES, SOM_COLS, 3), np.float16)
    white = np.array([1.0, 1.0, 1.0])

    # sorting
    insertion_color = np.array([0.0, 0.0, 1.0])  # blue
    bubble_color = np.array([0.0, 1.0, 0.0])  # green
    heap_color = np.array([1.0, 0.0, 0.0])  # red
    quick_color = np.array([1.0, 0.0, 1.0])  # magenta
    random_color = np.array([1.0, 1.0, 0.0])  # yellow

    # non-sorting
    reverseSorted_color = np.array([0.0, 1.0, 1.0])  # cyan
    intervalSwapSorted_color = np.array([1.0, 0.5, 0.0])  # orange
    reverse_color = np.array([0.5, 1.0, 0.0])  # lime-green
    intervalSwap_color = np.array([1.0, 0.0, 0.5])  # fuchsia

    colored = [
        insertion_color, bubble_color, heap_color, quick_color, random_color
    ]
    #add_colored = [reverseSorted_color, intervalSwapSorted_color, reverse_color, intervalSwap_color]
    #colored.extend(add_colored)

    grad = 0.005
    for i in range(len(mapped)):
        lin = mapped[i][0]
        col = mapped[i][1]
        colored_landscape[lin][col] += grad * colored[i % NUM_CLUST]
        #if colored_landscape[lin][col].any() > 1.0:
        #	colored_landscape[lin][col] -= 0.05*colored[i%NUM_CLUST]

    for i in range(NUM_CLUST):
        colored_landscape[avg_line[i]][
            avg_column[i]] = 0.3 * white + 0.7 * colored[i]

    io.imsave(where + 'all_sorting_clusters.png', colored_landscape)