Ejemplo n.º 1
0
def main():
        # read data
    train_images, train_labels, test_images, test_labels, class_list = data.import_data(
        FLAGS.use_classes)
    print('Training image size:', train_images.shape)
    print('Testing_image size:', test_images.shape)

    kernel_sizes = saab.parse_list_string(FLAGS.kernel_sizes)
    stride = FLAGS.stride
    if FLAGS.num_kernels:
        num_kernels = saab.parse_list_string(FLAGS.num_kernels)
    else:
        num_kernels = None
    energy_percent = FLAGS.energy_percent
    use_num_images = FLAGS.use_num_images
    print('Parameters:')
    print('use_classes:', class_list)
    print('Kernel_sizes:', kernel_sizes)
    print('Stride:', stride)
    print('Number_kernels:', num_kernels)
    print('Energy_percent:', energy_percent)
    print('Number_use_images:', use_num_images)

    pca_params = saab.multi_Saab_transform(train_images, train_labels,
                                           kernel_sizes=kernel_sizes,
                                           stride=stride,
                                           num_kernels=num_kernels,
                                           energy_percent=energy_percent,
                                           use_num_images=use_num_images,
                                           use_classes=class_list)

    # save data
    fw = open('pca_params.pkl', 'wb')
    pickle.dump(pca_params, fw)
    fw.close()
Ejemplo n.º 2
0
def import_data(use_classes):
    (train_images, train_labels), (test_images,
                                   test_labels) = mnist.load_data()
    train_images = train_images.reshape(-1, 28, 28, 1)
    test_images = test_images.reshape(-1, 28, 28, 1)
    train_images = train_images / 255.
    test_images = test_images / 255.

    train_images = np.float32(train_images)
    test_images = np.float32(test_images)

    print 'initiali dtype: ', train_images.dtype
    # print(train_images.shape) # 60000*28*28*1

    # zeropadding
    train_images = np.pad(train_images, ((0, 0), (2, 2), (2, 2), (0, 0)),
                          mode='constant')
    test_images = np.pad(test_images, ((0, 0), (2, 2), (2, 2), (0, 0)),
                         mode='constant')
    # print(train_images.shape) # 60000*32*32*1

    if use_classes != '0-9':
        class_list = saab.parse_list_string(use_classes)
        train_images, train_labels = get_data_for_class(
            train_images, train_labels, class_list)
        test_images, test_labels = get_data_for_class(test_images, test_labels,
                                                      class_list)
        # print(class_list)
    else:
        class_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]

    return train_images, train_labels, test_images, test_labels, class_list
def main():
    # read data
    train_images, train_labels, test_images, test_labels, class_list = data.import_data(
        FLAGS.use_classes)
    print('Training image size:', train_images.shape)
    print('Testing_image size:', test_images.shape)

    kernel_sizes = saab.parse_list_string(FLAGS.kernel_sizes)
    if FLAGS.num_kernels:
        num_kernels = saab.parse_list_string(FLAGS.num_kernels)
    else:
        num_kernels = None
    energy_percent = FLAGS.energy_percent
    use_num_images = FLAGS.use_num_images
    print('Parameters:')
    print('use_classes:', class_list)
    print('Kernel_sizes:', kernel_sizes)
    print('Number_kernels:', num_kernels)
    print('Energy_percent:', energy_percent)
    print('Number_use_images:', use_num_images)

    #Level_filter=np.array([1,4,6,4,1])
    #Edge_filter=np.array([-1,-2, 0 ,2, 1])
    #Spot_filter=np.array([-1, 0, 2, 0, -1])
    Wave_filter = np.array([-1, 2, 0, -2, 1])
    Ripple_filter = np.array([1, -4, 6, -4, 1])

    Filter = Wave_filter.T * Ripple_filter
    '''
    Filter1=Level_filter.T*Level_filter
    Filter3=Level_filter.T*Spot_filter
    Filter4=Level_filter.T*Wave_filter
    Filter5=Level_filter.T*Ripple_filter
    
    Filter6=Edge_filter.T*Level_filter
    Filter7=Edge_filter.T*Edge_filter
    Filter8=Edge_filter.T*Spot_filter
    Filter9=Edge_filter.T*Wave_filter
    Filter10=Edge_filter.T*Ripple_filter
    
    Filter11=Spot_filter.T*Level_filter
    Filter12=Spot_filter.T*Edge_filter
    Filter13=Spot_filter.T*Spot_filter
    Filter14=Spot_filter.T*Wave_filter
    Filter15=Spot_filter.T*Ripple_filter
    
    Filter16=Wave_filter.T*Level_filter
    Filter17=Wave_filter.T*Edge_filter
    Filter18=Wave_filter.T*Spot_filter
    Filter19=Wave_filter.T*Wave_filter
    Filter20=Wave_filter.T*Ripple_filter
    
    Filter21=Ripple_filter.T*Level_filter
    Filter22=Ripple_filter.T*Edge_filter
    Filter23=Ripple_filter.T*Spot_filter
    Filter24=Ripple_filter.T*Wave_filter
    Filter25=Ripple_filter.T*Ripple_filter
    '''

    altered_train = train_images.copy()
    for i in range(train_images.shape[0]):
        altered_train[i, :, :, 0] = cv2.filter2D(train_images[i, :, :, 0], -1,
                                                 Filter)
    train_images = altered_train.copy()

    pca_params = saab.multi_Saab_transform(train_images,
                                           train_labels,
                                           kernel_sizes=kernel_sizes,
                                           num_kernels=num_kernels,
                                           energy_percent=energy_percent,
                                           use_num_images=use_num_images,
                                           use_classes=class_list)
    # save data
    fw = open('pca_params_10.pkl', 'wb')
    pickle.dump(pca_params, fw)
    fw.close()

    # load data
    fr = open('pca_params_10.pkl', 'rb')
    data1 = pickle.load(fr)
    print(data1)
    fr.close()
def main():
	# read data
    train_images, train_labels, test_images, test_labels, class_list = data.import_data(FLAGS.use_classes)
    print('Training image size:', train_images.shape)
    print('Testing_image size:', test_images.shape)

    train_images = train_images[:6000]
    train_labels = train_labels[:6000]
    kernel_sizes=saab.parse_list_string(FLAGS.kernel_sizes)
    if FLAGS.num_kernels:
    	num_kernels=saab.parse_list_string(FLAGS.num_kernels)
    else:
    	num_kernels=None
    energy_percent=FLAGS.energy_percent
    use_num_images=FLAGS.use_num_images
    print('Parameters:')
    print('use_classes:', class_list)
    print('Kernel_sizes:', kernel_sizes)
    print('Number_kernels:', num_kernels)
    print('Energy_percent:', energy_percent)
    print('Number_use_images:', use_num_images)
    
    def creating_law_filters(a,b):
        ten_product = np.tensordot(a,b,axes=0);
        return ten_product;
    
    #Function to apply Boundary Extension
    def boundary_extension(Image,n):
        Ext_image = np.zeros((Row_Size+(2*n),Column_Size+(2*n)))
        #Complete image
        #Ext_image = np.pad(Image,n,'reflect');
        for i in range(n,(Row_Size+n)):
            for j in range(n,(Column_Size+n)):
                Ext_image[i][j] = Image[i-n][j-n];
        #Upper rows    
        for i in range(0,n):
            for j in range(n, (Column_Size+n)):
                Ext_image[i][j] = Image[0][j-n];
        #Left columns
        for j in range(0,n):
            for i in range(n, (Row_Size+n)):
                Ext_image[i][j] = Image[i-n][0];
        #Bottom rows
        for i in range(Row_Size+n, (Row_Size+(2*n))):
            for j in range(n,(Column_Size+n)):
                Ext_image[i][j] = Image[Row_Size-1][j-n];
        #Right columns
        for j in range(Column_Size+n, Column_Size+(2*n)):
            for i in range(n,(Row_Size+n)):
                Ext_image[i][j] = Image[i-n][Column_Size-1];
        #Corners
        for i in range(0,n):
            for j in range(0,n):
                Ext_image[i][j] = Image[0][0];
    
        for i in range(0,n):
            for j in range(Column_Size+n,Column_Size+(2*n)):
                Ext_image[i][j] = Image[0][Column_Size-1];
    
        for j in range(0,n):
            for i in range(Row_Size+n,Row_Size+(2*n)):
                Ext_image[i][j] = Image[Row_Size-1][0];
    
        for j in range(Column_Size+n, Column_Size+(2*n)):
            for i in range(Row_Size+n, Row_Size+(2*n)):
                Ext_image[i][j] = Image[Row_Size-1][Column_Size-1];
        
        return Ext_image;
    
    
    def law_filter_application(Ext_image,Law_filter):
        Law_applied = np.zeros((Row_Size,Column_Size))
        for i in range(2, Row_Size+2):
            for j in range(2, Column_Size+2):
                m = 0;
                k = 0;
                l = 0;
                for k in range(i-2, i+3):
                    n = 0;
                    for l in range(j-2, j+3):
                        Law_applied[i-2][j-2] = Law_applied[i-2][j-2] + (Ext_image[k][l]*Law_filter[m][n]);
                        n += 1 ;
                    m += 1 ;
        return Law_applied;  
    
    Row_Size = 32;
    Column_Size = 32;
    Window_Size = 5;
    n1 = 2
    features = 25;
    samples = 32*32;
    
    
    L5 = np.array([1, 4, 6, 4, 1]);      #Level
    E5 = np.array([-1, -2, 0, 2, 1]);    #Edge
    S5 = np.array([-1, 0, 2, 0, -1]);    #Spot
    W5 = np.array([-1, 2, 0, -2, 1]);    #Wave
    R5 = np.array([1, -4, 6, -4, 1]);
    
    #L5S5 = creating_law_filters(L5,S5);
    L5W5 = creating_law_filters(L5,W5);
    
    #Reading the image
    for i in range(0,6000):
        Input_image = train_images[i,:,:,0]
        #Boundary extension of the image
        Ext_image = boundary_extension(Input_image,n1);
        #Applying the law filters
        
        train_images[i,:,:,0] = law_filter_application(Ext_image,L5S5);
        
    '''train_img1 = color.rgb2gray(io.imread('1.png'))
    Input_image = train_img1
        #Boundary extension of the image
    Ext_image = boundary_extension(Input_image,n1);
        #Applying the law filters
        
    train_img1 = law_filter_application(Ext_image,L5S5)
    plt.matshow(train_img1,cmap='gray') '''  
    
    

    pca_params=saab.multi_Saab_transform(train_images, train_labels,
    	                 kernel_sizes=kernel_sizes,
    	                 num_kernels=num_kernels,
    	                 energy_percent=energy_percent,
    	                 use_num_images=use_num_images,
    	                 use_classes=class_list)
    # save data
    fw=open('pca_params.pkl','wb')    
    pickle.dump(pca_params, fw)    
    fw.close()

    # load data
    fr=open('pca_params.pkl','rb')  
    data1=pickle.load(fr)
    print(data1)
    fr.close()
def main():
    # read data
    train_images, train_labels, test_images, test_labels, class_list = data.import_data(
        FLAGS.use_classes)
    print('Training image size:', train_images.shape)
    print('Testing_image size:', test_images.shape)

    #Laws Filter creation
    L5 = np.array([1, 4, 6, 4, 1]).reshape(5, 1)
    E5 = np.array([-1, -2, 0, 2, 1]).reshape(5, 1)
    S5 = np.array([-1, 0, 2, 0, -1]).reshape(5, 1)
    R5 = np.array([-1, 2, 0, -2, 1]).reshape(5, 1)
    W5 = np.array([1, -4, 6, -4, 1]).reshape(5, 1)

    laws_filters = {'L5': L5, 'E5': E5, 'S5': S5, 'R5': R5, 'W5': W5}

    _2d_laws_filters = {}
    for k1, v1 in laws_filters.items():
        for k2, v2 in laws_filters.items():
            _2d_laws_filters[k1 + k2] = np.matmul(v1, v2.T)

    #boundary extension by pixel replication
    extended_images = []
    for img in train_images[:10000, :, :, 0]:
        new_img = np.pad(img, 2, 'reflect')
        extended_images.append(new_img)

    #Laws feature extraction
    final_images = []
    for img in extended_images:
        new_img = np.empty((1, 32, 32), np.uint8)
        for i in range(2, 32 + 2):
            for j in range(2, 32 + 2):
                new_img[0][i - 2][j - 2] = convolve(i, j,
                                                    _2d_laws_filters['S5R5'],
                                                    img)
        final_images.append(new_img)
    train_images = np.vstack(final_images)
    train_images = train_images.reshape(-1, 32, 32, 1)
    print(train_images.shape)

    kernel_sizes = saab.parse_list_string(FLAGS.kernel_sizes)
    if FLAGS.num_kernels:
        num_kernels = saab.parse_list_string(FLAGS.num_kernels)
    else:
        num_kernels = None
    energy_percent = FLAGS.energy_percent
    use_num_images = FLAGS.use_num_images
    print('Parameters:')
    print('use_classes:', class_list)
    print('Kernel_sizes:', kernel_sizes)
    print('Number_kernels:', num_kernels)
    print('Energy_percent:', energy_percent)
    print('Number_use_images:', use_num_images)

    pca_params = saab.multi_Saab_transform(train_images,
                                           train_labels,
                                           kernel_sizes=kernel_sizes,
                                           num_kernels=num_kernels,
                                           energy_percent=energy_percent,
                                           use_num_images=use_num_images,
                                           use_classes=class_list)

    #print(pca_params)
    # save data
    fw = open('pca_params_S5R5.pkl', 'wb')
    pickle.dump(pca_params, fw)
    fw.close()