コード例 #1
0
ファイル: features.py プロジェクト: alexandreyy/cbir_netshoes
def extract_lbp(image, radius=1.5):
    '''
    Extract LBP features.
    '''
    image = Image(data=image.data)
    image.convert_to_gray()
    image.equalize_clahe()

    n_points = 8 * radius
    lbp = local_binary_pattern(image.data, n_points, radius)

    n_bins = lbp.max() + 1
    hist, _ = np.histogram(lbp, normed=True, bins=n_bins, range=(0, n_bins))
    hist, _, _ = z_norm_by_feature(hist)
    plt.plot(hist)
    plt.show()

    return lbp
コード例 #2
0
ファイル: features.py プロジェクト: alexandreyy/cbir_netshoes
def extract_gch(image):
    '''
    Extract global color histogram from image.
    '''

    # Equalize image before extracting features.
    image = Image(data=image.data)
    image.equalize_clahe()
    image.convert_to_hsv()

    chans = cv2.split(image.data)
    colors = ("b", "g", "r")
    #     plt.figure()
    #     plt.title("'Flattened' Color Histogram")
    #     plt.xlabel("Bins")
    #     plt.ylabel("# of Pixels")
    features = []

    # loop over the image channels
    for (chan, color) in zip(chans, colors):
        # create a histogram for the current channel and
        # concatenate the resulting histograms for each
        # channel
        hist = cv2.calcHist([chan], [0], None, [256], [0, 256])
        features.extend(hist)

        # plot the histogram
        # plt.plot(hist, color = color)
        # plt.xlim([0, 256])


#
#     print "flattened feature vector size: %d" % (np.array(features).flatten().shape)
#     plt.show()

    hist, _ = np.histogram(np.array(features).flatten(), normed=True, bins=758)
    hist, _, _ = z_norm_by_feature(hist)
    plt.plot(hist)
    plt.show()

    return np.array(features).flatten()
コード例 #3
0
    '''

    print "Loading data."
    # music_train = MusicData("resources/YearPredictionMSD_samples_train.txt")
    # music_test = MusicData("resources/YearPredictionMSD_samples_test.txt")
    music_train = MusicData("resources/YearPredictionMSD_train.txt")
    music_validation = MusicData("resources/YearPredictionMSD_validation.txt")
    music_test = MusicData("resources/YearPredictionMSD_test.txt")

    # Normalize data.
    print "Normalize data."
    # music_train.X = z_norm(music_train.X)
    # music_validation.X = z_norm(music_validation.X)
    # music_test.X = z_norm(music_test.X)
    music_train.balance_data_oversampling_smote_regular()
    music_train.X, mean_X, std_X = z_norm_by_feature(music_train.X)
    #music_train.balance_data_undersampling_custom()
    music_validation.X = z_norm_by_feature(music_validation.X, mean_X, std_X)
    music_test.X = z_norm_by_feature(music_test.X, mean_X, std_X)

    # Balacing train data.
    # print "Balacing train data."
    # music_train.balance_data_oversampling_smote_regular()

    # Set train parameters.
    # lambdav = 0.00001
    lambdav = 0
    # alpha = 0.0000001
    # iterations = 1000000
    alpha = 0.1
    iterations = 1200
コード例 #4
0
if __name__ == '__main__':
    '''
    Train and test neural network.
    '''

    # Load data.
    path_weights = "resources/nn_weights_reduced.bin"
    path_train = "resources/crimes_training_ones.bin"
    # path_train = "resources/crimes_samples_training.bin"
    path_tests = "resources/crimes_testing_ones.bin"
    # path_tests = "resources/crimes_samples_testing.bin"

    print "Normalizing train"
    crime_train = CrimeData(path_train)
    crime_train.data[:, 22:24], mean_x_y, std_x_y = z_norm_by_feature(
        crime_train.data[:, 22:24])
    crime_train.data[:, 1:5], mean_time, std_time = z_norm_by_feature(
        crime_train.data[:, 1:5])
    crime_train.data = np.hstack(
        (crime_train.data[:, 0:12], crime_train.data[:, 22:24]))

    print "Normalizing test"
    crime_test = CrimeData(path_tests)
    crime_test.data[:, 22:24] = z_norm_by_feature(crime_test.data[:, 22:24],
                                                  mean_x_y, std_x_y)
    crime_test.data[:, 1:5] = z_norm_by_feature(crime_test.data[:, 1:5],
                                                mean_time, std_time)
    crime_test.data = np.hstack(
        (crime_test.data[:, 0:12], crime_test.data[:, 22:24]))
    n = np.max(np.hstack((crime_test.y, crime_train.y))) + 1
コード例 #5
0
from k_means import kmeans
from models.features import extract_hog, extract_gch, extract_bic, extract_lbp
from models.file_list import FileList
from models.image import Image
from normalize import z_norm_by_feature
import numpy as np

if __name__ == "__main__":
    '''
    Load features.
    '''
    features = load_lbp("features/features.bin")
    print features.shape
    # features, _, _ = z_norm_by_feature(features)
    # features, _, _ = z_norm_by_feature(features)
    features, _, _ = z_norm_by_feature(features[0:20000, :])
    print features.shape
    rets = []

    for index in range(500, 501):
        if index == 2:
            n = 2
        else:
            # n = int(math.pow(10, (index - 2)))
            n = 100 * (index - 2)

        n = index

        print "K-means for " + str(n) + " centroids."

        center = None
コード例 #6
0
@author: Alexandre Yukio Yamashita
'''
from matplotlib import pyplot as plt

from extract_features import load_pca_features
from extract_features import save_clustering
from k_means import kmeans
from normalize import z_norm_by_feature
import numpy as np

if __name__ == "__main__":
    print "Loading"

    features = load_pca_features("pca_features.bin")
    features, _, _ = z_norm_by_feature(features)
    rets = []

    for index in range(500, 501):
        if index == 2:
            n = 2
        else:
            # n = int(math.pow(10, (index - 2)))
            n = 100 * (index - 2)

        n = index

        print "K-means for " + str(n) + " centroids."

        center = None
        label = None
コード例 #7
0
    image_list = FileList("resources/images/", "jpg")
    c_label, c_center = load_clustering("features/lbp_cluster.bin")

#     hist, _ = np.histogram(c_label, normed = True, bins = np.max(c_label))
#     plt.plot(hist)
#     plt.show()

    image_list.paths = np.array(image_list.paths)
    image_list.paths = image_list.paths[0:20000]
    image_paths = image_list.paths[np.where(c_label == 120)]

#     for path in image_paths:
#         image = Image(path)
#         image.plot()

    features = []
    for path in image_paths:
        image = Image(path)
        lbp_image = extract_lbp(image)
        # features.append(lbp_image)

    # features = np.array(features)
    # data = np.mean(features, axis = 0)
    # image = Image(data = data)
    # image.plot()

    hist, _, _ = z_norm_by_feature(c_center[120])
    plt.plot(hist)
    plt.show()