コード例 #1
0
def main():
	#flag for debugging
	flag_subset = False
	boosting_type = 'Real' #'Real' or 'Ada'
	training_epochs = 100 if not flag_subset else 20
	act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
	chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

	#data configurations
	pos_data_dir = 'newface16'
	neg_data_dir = 'nonface16'
	image_w = 16
	image_h = 16
	data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h, flag_subset)
	data = integrate_images(normalize(data))

	#number of bins for boosting
	num_bins = 25

	#number of cpus for parallel computing
	num_cores = 6 if not flag_subset else 1 #always use 1 when debugging
	
	#create Haar filters
	filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h, flag_subset)

	#create visualizer to draw histograms, roc curves and best weak classifier accuracies
	drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])
	
	#create boost classifier with a pool of weak classifier
	boost = Boosting_Classifier(filters, data, labels, training_epochs, num_bins, drawer, num_cores, boosting_type)
コード例 #2
0
def main():
    # flag for debugging
    flag_subset = False
    boosting_type = 'Ada'  # 'Real' or 'Ada'
    training_epochs = 100 if not flag_subset else 20
    act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'
    #chosen_wc_cache_dir_real = 'chosen_wcs_real.pkl' if not flag_subset else 'chosen_wcs_subset_real.pkl'

    # data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h, flag_subset)
    data = integrate_images(normalize(data))

    # number of bins for boosting
    num_bins = 25

    # number of cpus for parallel computing
    num_cores = 1 if not flag_subset else 1  # always use 1 when debugging

    # create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h, flag_subset)

    # create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    # Adaboost
    boost = Boosting_Classifier(filters, data, labels, training_epochs, num_bins, drawer, num_cores, boosting_type)

    # calculate filter values for all training images
    #start = time.clock()
    #boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    #end = time.clock()
    # print('%f seconds for activation calculation' % (end - start))
    #boost.train(chosen_wc_cache_dir)
    #boost.load_trained_wcs('chosen_wcs_plus.pkl')
    boost.load_trained_wcs('chosen_wcs.pkl')
    for i in range(20):
        id = boost.chosen_wcs[i][1].id
        print("Filter ID = "+str(id)+" Alpha = "+str(boost.chosen_wcs[i][0]))
        boost.draw_filter(filters[id],id,boost.chosen_wcs[i][1].polarity)

    # Test
    st = 10
    original_img = cv2.imread('./test/Face_1.jpg', cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img, scale_step=st)
    cv2.imwrite('Result_img_No_1_%s_plus.png' % boosting_type, result_img)
    original_img = cv2.imread('./test/Face_2.jpg', cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img, scale_step=st)
    cv2.imwrite('Result_img_No_2_%s_plus.png' % boosting_type, result_img)
    original_img = cv2.imread('./test/Face_3.jpg', cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img, scale_step=st)
    cv2.imwrite('Result_img_No_3_%s_plus.png' % boosting_type, result_img)
コード例 #3
0
def get_data_and_labels(reload = False):
    if not reload:
        data = get_data('data.pkl') # (37194,16,16)
        labels = get_data('labels.pkl') # (37194,)
    else:
        image_w, image_h = 16, 16
        pos_data_dir, neg_data_dir = './newface16', './nonface16'
        data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h, False)
        data = integrate_images(normalize(data))
    return data, labels
コード例 #4
0
ファイル: main.py プロジェクト: shwetha1729/FaceDetection
def main_real():
    flag_subset = False
    boosting_type = 'Real'  # 'Real' or 'Ada'
    training_epochs = 100 if not flag_subset else 20
    act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

    # data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                             flag_subset)
    data = integrate_images(normalize(data))

    # number of bins for boosting
    num_bins = 25

    # number of cpus for parallel computing
    num_cores = 8 if not flag_subset else 1  # always use 1 when debugging

    # create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h,
                                    flag_subset)

    # create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    # create boost classifier with a pool of weak classifier
    boost = Boosting_Classifier(filters, data, labels, training_epochs,
                                num_bins, drawer, num_cores, boosting_type)

    # calculate filter values for all training images
    start = time.clock()
    # boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir, None)

    boost.visualize()
    original_img = cv2.imread('./Testing_Images/Face_1.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)
    original_img = cv2.imread('./Testing_Images/Face_2.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)
    original_img = cv2.imread('./Testing_Images/Face_3.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)
コード例 #5
0
def main_real():
    f_subset = False
    boosting_type = 'Real'  # 'Real' or 'Ada'
    te = 100 if not f_subset else 20
    acd = 'wc_activations.npy' if not f_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not f_subset else 'chosen_wcs_subset.pkl'

    # data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    img_w = 16
    img_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, img_w, img_h,
                             f_subset)
    data = integrate_imgs(normalize(data))

    # number of bins for boosting
    nb = 25

    # number of cpus for parallel computing
    nc = 8 if not f_subset else 1  # always use 1 when debugging

    filters = generate_Haar_filters(4, 4, 16, 16, img_w, img_h, f_subset)

    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    boost = Boosting_Classifier(filters, data, labels, te, nb, drawer, nc,
                                boosting_type)

    # calculating filter values for all training imgs
    start = time.clock()

    boost.calculate_training_activations(acd, acd)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir, None)

    boost.visualize()
    org_img1 = cv2.imread('./Testing_imgs/Face_1.jpg', cv2.IMREAD_GRAYSCALE)
    result_img1 = boost.face_detection(org_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img1)
    org_img2 = cv2.imread('./Testing_imgs/Face_2.jpg', cv2.IMREAD_GRAYSCALE)
    result_img2 = boost.face_detection(org_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img2)
コード例 #6
0
def main():
    #flag for debugging
    flag_subset = False
    boosting_type = 'Ada'  #'Real' or 'Ada'
    training_epochs = 101 if not flag_subset else 20
    act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'
    plot_haar_filter = 'haar_filters' if not flag_subset else 'haar_filters_subset'
    plot_sc_errors = 'sc_errors' if not flag_subset else 'sc_errors_subset'
    steps = [0, 10, 50, 100] if not flag_subset else [0, 10]

    #data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                             flag_subset)

    #---HARD NEGATIVE MINING--
    #putting non-faces into training data for hard-negative mining
    '''for i in range(3):
		negative_patches = pickle.load(open('wrong_patches_'+str(i)+'.pkl', 'rb'))
		data = np.append(data, negative_patches, axis = 0)
		labels = np.append(labels, np.full(len(negative_patches), -1))'''
    #pdb.set_trace()
    data = integrate_images(normalize(data))

    #number of bins for boosting
    num_bins = 25

    #number of cpus for parallel computing
    num_cores = 8 if not flag_subset else 1  #always use 1 when debugging

    #create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h,
                                    flag_subset)
    print("Length of filters " + str(len(filters)))

    #create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    #create boost classifier with a pool of weak classifier
    boost = Boosting_Classifier(filters, data, labels, training_epochs,
                                num_bins, drawer, num_cores, boosting_type,
                                chosen_wc_cache_dir)

    #calculate filter values for all training images
    start = time.clock()
    boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    print("Start of train process")
    boost.train(chosen_wc_cache_dir)
    print("End of train process")

    print("Plotting Haar Filters")
    boost.display_haar_filters(chosen_wc_cache_dir, plot_haar_filter)

    print("Plotting training error of strong classifier")
    boost.draw_sc_errors(chosen_wc_cache_dir, plot_sc_errors)

    #Histogram, ROC, weak classfier errors
    boost.visualize(steps, chosen_wc_cache_dir)

    print("------Face Detection---------")

    original_img = cv2.imread('./Testing_Images/Face_2.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_Face2_hardneg.png', result_img)

    original_img = cv2.imread('./Testing_Images/Face_3.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_Face3_hardneg.png', result_img)

    #HARD NEGATIVE MINING
    '''
コード例 #7
0
def main():
    #flag for debugging
    f_subset = True
    hard_neg = False
    boosting_type = 'Ada'
    te = 100 if not f_subset else 20  # training epochs
    acd = 'wc_activations.npy' if not f_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not f_subset else 'chosen_wcs_subset.pkl'

    #data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'  # HERE,  WE CONSIDER THE DIMENSIONS OF  THE FACE AND THE NON- FACE IMAGES TO BE 16X16 FOR PERFORMIMNG THE ADABOOST FACE RECOGNITION
    img_w = 16
    img_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, img_w, img_h,
                             f_subset)
    data = integrate_imgs(normalize(data))

    #number of bins for boosting
    nb = 25

    #number of cpus for parallel computing
    nc = 8 if not f_subset else 1  #always use 1 when debugging

    filters = generate_Haar_filters(4, 4, 16, 16, img_w, img_h, f_subset)

    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    boost = Boosting_Classifier(filters, data, labels, te, nb, drawer, nc,
                                boosting_type)

    #calculate filter values for all training imgs
    start = time.clock()
    #boost.calculate_training_activations(acd, acd)
    boost.calculate_training_activations(acd, acd)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)

    boost.visualize()

    #face detection starts from here
    org_img = cv2.imread('./Testing_imgs/Face_1.jpg', cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(org_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)

    if hard_neg == True:
        #neg_img

        start = time.clock()
        for i in range(1, 3):
            import os
            sd = os.path.join(os.path.curdir, 'Testing_imgs')
            print(os.listdir(sd))
            ipath = os.path.join(sd, 'Non_Face_' + str(i) + '.jpg')
            print(ipath)
            img = cv2.imread(ipath, cv2.IMREAD_GRAYSCALE)
            print("i", i)
            new_data = boost.get_hard_negative_patches(img)
            #boost.calculate_training_activations(, )
            load_dir = "hard_neg_" + str(i) + acd
            save_dir = "hard_neg_" + str(i) + acd
            print(
                'Calculate activations for %d weak classifiers, using %d imags.'
                % (len(boost.weak_classifiers), new_data.shape[0]))
            if load_dir is not None and os.path.exists(load_dir):
                print('[Find cached activations, %s loading...]' % load_dir)
                wc_activations = np.load(load_dir)
            else:
                if boost.nc == 1:
                    wc_activations = [
                        wc.apply_filter(new_data)
                        for wc in boost.weak_classifiers
                    ]
                else:
                    from joblib import Parallel, delayed
                    wc_activations = Parallel(n_jobs=boost.nc)(
                        delayed(wc.apply_filter)(new_data)
                        for wc in boost.weak_classifiers)
                wc_activations = np.array(wc_activations)
                if save_dir is not None:
                    print('Writing results to disk...')
                    np.save(save_dir, wc_activations)
                    print('[Saved calculated activations to %s]' % save_dir)
            for wc in boost.weak_classifiers:
                wc.activations = np.concatenate(
                    (wc.activations, wc_activations[wc.id, :]))
            boost.data = np.concatenate((boost.data, new_data))
            newlabels = np.ones((new_data.shape[0]))
            newlabels = newlabels * -1
        boost.labels = np.concatenate((boost.labels, newlabels))
        end = time.clock()

        print('%f seconds for activation calculation' % (end - start))

        boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)

        boost.visualize()
コード例 #8
0
ファイル: Test.py プロジェクト: pisomnia/CS231_Project
gc.collect()
flag_subset = False
boosting_type = 'Ada'  #'Real' or 'Ada'
training_epochs = 100 if not flag_subset else 20
act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

# data configurations
pos_data_dir = 'newface16'
neg_data_dir = 'nonface16'
image_w = 16
image_h = 16
data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                         flag_subset)
data = integrate_images(normalize(data))

# number of bins for boosting
num_bins = 25

# number of cpus for parallel computing
#num_cores = 8 if not flag_subset else 1 #always use 1 when debugging
num_cores = 4
# create Haar filters
filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h, flag_subset)

# create visualizer to draw histograms, roc curves and best weak classifier accuracies
drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

# create boost classifier with a pool of weak classifier
boost = Boosting_Classifier(filters, data, labels, training_epochs, num_bins,
コード例 #9
0
def main():
    #flag for debugging
    flag_subset = False
    boosting_type = 'Real'  #'Real' or 'Ada'
    training_epochs = 100 if not flag_subset else 20
    #act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    act_cache_dir = 'wc_activations_45k.npy' if not flag_subset else 'wc_activations_subset.npy'
    #chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'
    chosen_wc_cache_dir = 'chosen_wcs_45k.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

    #data configurations
    pos_data_dir = '/Users/paul/Desktop/UCLA/2018fall/stat231/project2/newface16'
    neg_data_dir = '/Users/paul/Desktop/UCLA/2018fall/stat231/project2/nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                             flag_subset)
    data = integrate_images(normalize(data))

    #number of bins for boosting
    num_bins = 25

    #number of cpus for parallel computing
    num_cores = 8 if not flag_subset else 1  #always use 1 when debugging

    #create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h,
                                    flag_subset)

    #create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    #create boost classifier with a pool of weak classifier
    boost = Boosting_Classifier(filters, data, labels, training_epochs,
                                num_bins, drawer, num_cores, boosting_type)

    #calculate filter values for all training images
    #start = time.clock()
    boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    #end = time.clock()
    #print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)

    #boost.visualize()

    # Hard Negative Mining
    original_img = cv2.imread(
        '/Users/paul/Desktop/UCLA/2018fall/stat231/project2/Testing_Images/Non_Face_1.jpg',
        cv2.IMREAD_GRAYSCALE)
    name = 'Non_Face_1'
    result_img = boost.face_detection(original_img, name)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)

    hard_neg_patches = boost.get_hard_negative_patches(original_img, name)
    new_data = np.array(list(data) + list(hard_neg_patches[0, :, :, :]))
    new_labels = np.array(
        list(labels) + [-1 for i in range(hard_neg_patches.shape[1])])
    hard_neg_cache_dir = 'wc_activations_hard_neg.npy'
    boost_hard_neg = Boosting_Classifier(filters, new_data, new_labels,
                                         training_epochs, num_bins, drawer,
                                         num_cores, boosting_type)
    boost_hard_neg.calculate_training_activations(hard_neg_cache_dir,
                                                  hard_neg_cache_dir)
    hard_neg_chosen_wc_cache_dir = 'chosen_wcs_hard_neg.pkl'
    boost_hard_neg.train(hard_neg_chosen_wc_cache_dir,
                         hard_neg_chosen_wc_cache_dir)
    boost_hard_neg.visualize()

    # Test
    original_img = cv2.imread(
        '/Users/paul/Desktop/UCLA/2018fall/stat231/project2/Testing_Images/Face_1.jpg',
        cv2.IMREAD_GRAYSCALE)
    name = 'Face_1'
    result_img = boost_hard_neg.face_detection(original_img, name)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)
コード例 #10
0
def main():
	#flag for debugging
	flag_subset = False
	boosting_type = 'Ada' #'Real' or 'Ada'
	training_epochs = 150 if not flag_subset else 20
	act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
	chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

	#data configurations
	pos_data_dir = 'newface16'
	neg_data_dir = 'nonface16'
	image_w = 16
	image_h = 16
	data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h, flag_subset)
	data = integrate_images(normalize(data))

	#number of bins for boosting
	num_bins = 25

	#number of cpus for parallel computing
	num_cores = 32 if not flag_subset else 1 #always use 1 when debugging
	
	#create Haar filters
	filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h, flag_subset)

	#create visualizer to draw histograms, roc curves and best weak classifier accuracies
	drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])
	
	#create boost classifier with a pool of weak classifier
	boost = Boosting_Classifier(filters, data, labels, training_epochs, num_bins, drawer, num_cores, boosting_type)

	#calculate filter values for all training images
	start = time.clock()
	boost.calculate_training_activations(act_cache_dir, act_cache_dir)
	end = time.clock()
	print('%f seconds for activation calculation' % (end - start))

	boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)
	#boost.train("bullshit_train.pkl")
	boost.visualizer.display_haar(boost.chosen_wcs,20)
	#boost.visualize()

	# test_img = './Test/Face_3.jpg'
	# original_img = cv2.imread(test_img, cv2.IMREAD_GRAYSCALE)
	# img_rgb = cv2.imread(test_img)
	# result_img = boost.face_detection(original_img, img_rgb)
	# cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)

	#negative training
	# negative_patches, negative_patches_nms = boost.get_hard_negative_patches(cv2.imread('./Test/Non_face_1.jpg', cv2.IMREAD_GRAYSCALE))
	# print(negative_patches.shape, negative_patches_nms.shape)
	# pickle.dump(negative_patches, open("negative_patches_1.pkl", 'wb'))
	# pickle.dump(negative_patches_nms, open("negative_patches_nms_1.pkl", 'wb'))
	#
	# negative_patches, negative_patches_nms = boost.get_hard_negative_patches(cv2.imread('./Test/Non_face_2.jpg', cv2.IMREAD_GRAYSCALE))
	# print(negative_patches.shape, negative_patches_nms.shape)
	# pickle.dump(negative_patches, open("negative_patches_2.pkl", 'wb'))
	# pickle.dump(negative_patches_nms, open("negative_patches_nms_2.pkl", 'wb'))
	#
	# negative_patches, negative_patches_nms = boost.get_hard_negative_patches(cv2.imread('./Test/Non_face_3.jpg', cv2.IMREAD_GRAYSCALE))
	# print(negative_patches.shape, negative_patches_nms.shape)
	# pickle.dump(negative_patches, open("negative_patches_3.pkl", 'wb'))
	# pickle.dump(negative_patches_nms, open("negative_patches_nms_3.pkl", 'wb'))
	#
	#load negative hard mining data
	# negative1 = pickle.load(open("negative_patches_1.pkl", 'rb'))
	# negative2 = pickle.load(open("negative_patches_2.pkl", 'rb'))
	# negative3 = pickle.load(open("negative_patches_3.pkl", 'rb'))
	# data = np.r_[data, negative1, negative2, negative3]
	# print("Starting with data of shape ", data.shape)
	# labels = np.r_[labels, np.ones(negative1.shape[0])*-1, np.ones(negative2.shape[0])*-1, np.ones(negative3.shape[0])*-1]
	# print("Starting with labels of shape ", labels.shape)
	#
	# boost = Boosting_Classifier(filters, data, labels, training_epochs, num_bins, drawer, num_cores, boosting_type)
	# # calculate filter values for all training images
	# start = time.time()
	# boost.calculate_training_activations("wc_activations_nhm.npy","wc_activations_nhm.npy")
	# end = time.time()
	# print('%f seconds for activation calculation' % (end - start))
	#
	# boost.train("chosen_wcs_nhm.pkl","chosen_wcs_nhm.pkl")

	test_img = './Test/Non_face_1.jpg'
	original_img = cv2.imread(test_img, cv2.IMREAD_GRAYSCALE)
	img_rgb = cv2.imread(test_img)
	result_img = boost.face_detection(original_img, img_rgb,10)
	cv2.imwrite('Result_nms_1.png', result_img)
	test_img = './Test/Non_face_2.jpg'
	original_img = cv2.imread(test_img, cv2.IMREAD_GRAYSCALE)
	img_rgb = cv2.imread(test_img)
	result_img = boost.face_detection(original_img, img_rgb,10)
	cv2.imwrite('Result_nms_2.png', result_img)
	test_img = './Test/Non_face_3.jpg'
	original_img = cv2.imread(test_img, cv2.IMREAD_GRAYSCALE)
	img_rgb = cv2.imread(test_img)
	result_img = boost.face_detection(original_img, img_rgb,10)
	cv2.imwrite('Result_nms_3.png', result_img)
コード例 #11
0
def main():
	#flag for debugging
	flag_subset = False
	boosting_type = 'Ada' #'Real' or 'Ada'
	training_epochs = 100 if not flag_subset else 20
	act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
	chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

	#data configurations
	pos_data_dir = 'newface16/newface16'
	neg_data_dir = 'nonface16/nonface16'
    
	image_w = 16
	image_h = 16
	data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h, flag_subset)
	
	#compute integral image
	data = integrate_images(normalize(data))

	#number of bins for boosting
	num_bins = 25

	#number of cpus for parallel computing
	num_cores = -1 if not flag_subset else 1 #always use 1 when debugging
	
	#create Haar filters
	filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h, flag_subset)
    
	#create visualizer to draw histograms, roc curves and best weak classifier accuracies
	drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])
	
	#create boost classifier with a pool of weak classifier
	boost = Boosting_Classifier(filters, data, labels, training_epochs,num_bins, drawer, num_cores, boosting_type)
    
	
    #calculate filter values for all training images
	start = time.clock()
	
	#calculating activations by applying the Haar filters to the integral images.
	boost.calculate_training_activations(act_cache_dir, act_cache_dir)
	end = time.clock()
	print('%f seconds for activation calculation' % (end - start))

	#Calling the training function for error calculationa nd weight updation
	boost.train(chosen_wc_cache_dir)

	#visualisation of results: Plotting of Histograms, ROC curves, graphs
	boost.visualize()
	
	#Face Detection for 3 test images
	for i in range(3):
		original_img = cv2.imread('Face_%d.jpg' %i, cv2.IMREAD_GRAYSCALE)
		result_img = boost.face_detection(original_img)
		cv2.imwrite('Result_img_%s_%d.png' % boosting_type, %i, result_img)


    #hard negative mining
	for i in range(3):
		hard_neg_data_dir='Hard_neg_data_%d.pkl' %i
		hard_neg_label_dir='Hard_neg_labels_%d.pkl' %i
		#load hard negatives into original data and retrain
		if hard_neg_data_dir is not None and os.path.exists(hard_neg_data_dir):
			print('[Find cached hard negative data %s loading...]' % hard_neg_data_dir)
			patches=pickle.load(open(hard_neg_data_dir, 'rb'))
			hard_neg_labels=pickle.load(open(hard_neg_label_dir,'rb'))
			hard_neg_labels=hard_neg_labels[:,0,0]
			boost.data=np.concatenate((boost.data,patches), axis=0)
			boost.labels=np.concatenate((boost.labels, hard_neg_labels), axis=0)
		#compute hard negatives from non-face images and store result in pickle file
		else:
			back_img = cv2.imread('Non_face_%d.jpg' %(i+1), cv2.IMREAD_GRAYSCALE)
			patches = boost.get_hard_negative_patches( back_img,scale_step = 10)
			patches=patches[0]
			hard_labels=np.full(boost.labels.shape, fill_value=-1)
			pickle.dump(patches, open('Hard_neg_data_%d.pkl' %i, 'wb'))
			pickle.dump(hard_labels,open('Hard_neg_labels_%d.pkl' %i, 'wb'))
コード例 #12
0
ファイル: main.py プロジェクト: shwetha1729/FaceDetection
def main():
    #flag for debugging
    flag_subset = True
    hard_neg = False
    boosting_type = 'Ada'  #'Real' or 'Ada'
    training_epochs = 100 if not flag_subset else 20
    act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

    #data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                             flag_subset)
    data = integrate_images(normalize(data))

    #number of bins for boosting
    num_bins = 25

    #number of cpus for parallel computing
    num_cores = 8 if not flag_subset else 1  #always use 1 when debugging

    #create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h,
                                    flag_subset)

    #create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    #create boost classifier with a pool of weak classifier
    boost = Boosting_Classifier(filters, data, labels, training_epochs,
                                num_bins, drawer, num_cores, boosting_type)

    #calculate filter values for all training images
    start = time.clock()
    #boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)

    boost.visualize()
    #face detection
    original_img = cv2.imread('./Testing_Images/Face_1.jpg',
                              cv2.IMREAD_GRAYSCALE)
    result_img = boost.face_detection(original_img)
    cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)

    if hard_neg == True:
        #neg_img

        start = time.clock()
        for i in range(1, 3):
            import os
            sd = os.path.join(os.path.curdir, 'Testing_images')
            print(os.listdir(sd))
            ipath = os.path.join(sd, 'Non_Face_' + str(i) + '.jpg')
            print(ipath)
            img = cv2.imread(ipath, cv2.IMREAD_GRAYSCALE)
            print("i", i)
            new_data = boost.get_hard_negative_patches(img)
            #boost.calculate_training_activations(, )
            load_dir = "hard_neg_" + str(i) + act_cache_dir
            save_dir = "hard_neg_" + str(i) + act_cache_dir
            print(
                'Calculate activations for %d weak classifiers, using %d imags.'
                % (len(boost.weak_classifiers), new_data.shape[0]))
            if load_dir is not None and os.path.exists(load_dir):
                print('[Find cached activations, %s loading...]' % load_dir)
                wc_activations = np.load(load_dir)
            else:
                if boost.num_cores == 1:
                    wc_activations = [
                        wc.apply_filter(new_data)
                        for wc in boost.weak_classifiers
                    ]
                else:
                    from joblib import Parallel, delayed
                    wc_activations = Parallel(n_jobs=boost.num_cores)(
                        delayed(wc.apply_filter)(new_data)
                        for wc in boost.weak_classifiers)
                wc_activations = np.array(wc_activations)
                if save_dir is not None:
                    print('Writing results to disk...')
                    np.save(save_dir, wc_activations)
                    print('[Saved calculated activations to %s]' % save_dir)
            for wc in boost.weak_classifiers:
                wc.activations = np.concatenate(
                    (wc.activations, wc_activations[wc.id, :]))
            boost.data = np.concatenate((boost.data, new_data))
            newlabels = np.ones((new_data.shape[0]))
            newlabels = newlabels * -1
        boost.labels = np.concatenate((boost.labels, newlabels))
        end = time.clock()

        print('%f seconds for activation calculation' % (end - start))

        boost.train(chosen_wc_cache_dir, chosen_wc_cache_dir)

        boost.visualize()
コード例 #13
0
def main():
    #flag for debugging
    flag_subset = False
    boosting_type = 'Ada'  #'Real' or 'Ada'
    training_epochs = 100 if not flag_subset else 20
    act_cache_dir = 'wc_activations.npy' if not flag_subset else 'wc_activations_subset.npy'
    chosen_wc_cache_dir = 'chosen_wcs.pkl' if not flag_subset else 'chosen_wcs_subset.pkl'

    #data configurations
    pos_data_dir = 'newface16'
    neg_data_dir = 'nonface16'
    image_w = 16
    image_h = 16
    data, labels = load_data(pos_data_dir, neg_data_dir, image_w, image_h,
                             flag_subset)
    data = integrate_images(normalize(data))
    #number of bins for boosting
    num_bins = 25

    #number of cpus for parallel computing
    num_cores = 6 if not flag_subset else 1  #always use 1 when debugging

    #create Haar filters
    filters = generate_Haar_filters(4, 4, 16, 16, image_w, image_h,
                                    flag_subset)

    #create visualizer to draw histograms, roc curves and best weak classifier accuracies
    drawer = Visualizer([10, 20, 50, 100], [1, 10, 20, 50, 100])

    #create boost classifier with a pool of weak classifier
    boost = Boosting_Classifier(filters, data, labels, training_epochs,
                                num_bins, drawer, num_cores, boosting_type)

    #calculate filter values for all training images
    start = time.clock()
    boost.calculate_training_activations(act_cache_dir, act_cache_dir)
    end = time.clock()
    print('%f seconds for activation calculation' % (end - start))

    boost.train(chosen_wc_cache_dir)

    if (boosting_type == 'Ada'):
        boost.visualize()
        original_img = cv2.imread('./Testing_Images/Face_4.jpg',
                                  cv2.IMREAD_GRAYSCALE)
        original_rgb_img = cv2.imread('./Testing_Images/Face_4.jpg')
        result_img = boost.face_detection(original_img, original_rgb_img)
        cv2.imwrite('Result_img_%s.png' % boosting_type, result_img)
        save_hard_neg = 'hard_neg.pkl'
        save_hard_neg_labels = 'hard_neg_label.pkl'
        if (os.path.exists(save_hard_neg)):
            final_hard_negatives = pickle.load(open(save_hard_neg, 'rb'))
            final_neg_labels = pickle.load(open(save_hard_neg_labels, 'rb'))
        else:
            for i in range(3):
                hard_negative_img = cv2.imread(
                    './Testing_Images/Non_face_' + str(i + 1) + '.jpg',
                    cv2.IMREAD_GRAYSCALE)
                hard_negatives = boost.get_hard_negative_patches(
                    hard_negative_img)
                print("No. of hard negative patches: ", hard_negatives.shape)
                hard_neg_labels = np.full((hard_negatives.shape[0]), -1)
                if (i == 0):
                    final_hard_negatives = hard_negatives
                    final_neg_labels = hard_neg_labels
                else:
                    final_hard_negatives = np.append(final_hard_negatives,
                                                     hard_negatives,
                                                     axis=0)
                    final_neg_labels = np.append(final_neg_labels,
                                                 hard_neg_labels,
                                                 axis=0)
            pickle.dump(final_hard_negatives, open(save_hard_neg, 'wb'))
            pickle.dump(final_neg_labels, open(save_hard_neg_labels, 'wb'))
        boost.data = np.append(boost.data, final_hard_negatives, axis=0)
        boost.labels = np.append(boost.labels, final_neg_labels, axis=0)
        new_act_cache_dir = 'new_wc_activations.npy' if not flag_subset else 'new_wc_activations_subset.npy'
        new_chosen_wc_cache_dir = 'new_chosen_wcs.pkl' if not flag_subset else 'new_chosen_wcs_subset.pkl'
        start = time.clock()
        boost.calculate_training_activations(new_act_cache_dir,
                                             new_act_cache_dir)
        end = time.clock()
        boost.train(new_chosen_wc_cache_dir)
        original_img = cv2.imread('./Testing_Images/Face_4.jpg',
                                  cv2.IMREAD_GRAYSCALE)
        original_rgb_img = cv2.imread('./Testing_Images/Face_4.jpg')
        result_img = boost.face_detection(original_img, original_rgb_img)
        cv2.imwrite('New_result_img_%s.png' % boosting_type, result_img)
    else:
        boost.real_visualize()