def convert_to_3D_png(self, in_path, out_path):
     print("reading.")
     imgs = io.imread_collection(in_path)
     print("concatenating")
     imgs = collection.concatenate_images(imgs)
     print("writing")
     np.save(out_path, imgs)
def evaluate_trained_net_on_patches(state,folder="data/train_set/dataset/13/validation/faces/*.png"):
    img_collection = imread_collection(folder)
    
    arr_imgs = concatenate_images(img_collection)
    arr_imgs=arr_imgs[:,:,:,np.newaxis]
    arr_imgs = np.rollaxis(arr_imgs, 3, 1)
    
    
    #arr_imgs = arr_imgs[0:10,:,:,:]
    borrow = True
    shared_x = theano.shared(np.asarray(arr_imgs, dtype=theano.config.floatX),  # @UndefinedVariable
                             borrow=borrow)
    
   
    num_arr_imgs= arr_imgs.shape[0]
    iT = T.lscalar()
    x = T.tensor3("x")
    layer0_input = x.reshape((1, NUM_CHANNELS, 13, 13))

    net = twelve_net(layer0_input, None, relu, state)
    prediction = net.log_regression_layer.y_pred
    py_x = net.log_regression_layer.p_y_given_x

    test_model = theano.function(
            [iT],
            [prediction, py_x, layer0_input],
            givens={
                x: shared_x[iT, :, :, :]
            }
    )
    
    min_py_x = 99999;
    max_py_x = 0
    num_faces = 0
    thresholds = []
    for i in xrange(num_arr_imgs):
        [out_predict,out_py_x,out_face]=test_model(i)
        thresholds.append(out_py_x[0,1])
        if out_py_x[0,1] < min_py_x :
            min_py_x = out_py_x[0,1]
        
        if out_py_x[0,1] > max_py_x :
            max_py_x = out_py_x[0,1]
            
        if out_predict ==1:
            num_faces =num_faces +1
    
    
    recall_index = int(num_arr_imgs * .01)
    
    thresholds.sort()
    t = thresholds[recall_index]
    
    print "Num faces: %d" %(num_faces) 
    print "min_py_x(for face): %f" %(min_py_x) 
    print "max_py_x(for face): %f" %(max_py_x) 
    print "Num images evaluated: %d" %(num_arr_imgs)
    print "Threshold %f" %(t)
 def flat_convert(self, in_path, out_path):
     print("reading.")
     imgs = io.imread_collection(path)
     print("concatenating")
     imgs = collection.concatenate_images(imgs)
     print("reshaping")
     imgs = imgs.reshape(imgs.shape[0], imgs.shape[1] * imgs.shape[2])
     print("writing")
     np.save(out_path, imgs)
def evaluate_trained_net_on_patches(
        state, folder="data/train_set/dataset/13/validation/faces/*.png"):
    img_collection = imread_collection(folder)

    arr_imgs = concatenate_images(img_collection)
    arr_imgs = arr_imgs[:, :, :, np.newaxis]
    arr_imgs = np.rollaxis(arr_imgs, 3, 1)

    #arr_imgs = arr_imgs[0:10,:,:,:]
    borrow = True
    shared_x = theano.shared(
        np.asarray(arr_imgs, dtype=theano.config.floatX),  # @UndefinedVariable
        borrow=borrow)

    num_arr_imgs = arr_imgs.shape[0]
    iT = T.lscalar()
    x = T.tensor3("x")
    layer0_input = x.reshape((1, NUM_CHANNELS, 13, 13))

    net = twelve_net(layer0_input, None, relu, state)
    prediction = net.log_regression_layer.y_pred
    py_x = net.log_regression_layer.p_y_given_x

    test_model = theano.function([iT], [prediction, py_x, layer0_input],
                                 givens={x: shared_x[iT, :, :, :]})

    min_py_x = 99999
    max_py_x = 0
    num_faces = 0
    thresholds = []
    for i in xrange(num_arr_imgs):
        [out_predict, out_py_x, out_face] = test_model(i)
        thresholds.append(out_py_x[0, 1])
        if out_py_x[0, 1] < min_py_x:
            min_py_x = out_py_x[0, 1]

        if out_py_x[0, 1] > max_py_x:
            max_py_x = out_py_x[0, 1]

        if out_predict == 1:
            num_faces = num_faces + 1

    recall_index = int(num_arr_imgs * .01)

    thresholds.sort()
    t = thresholds[recall_index]

    print "Num faces: %d" % (num_faces)
    print "min_py_x(for face): %f" % (min_py_x)
    print "max_py_x(for face): %f" % (max_py_x)
    print "Num images evaluated: %d" % (num_arr_imgs)
    print "Threshold %f" % (t)
Exemple #5
0
def prepare_data(faces_collection, bkgs_collection):

    # To get a subset:
    # arr_faces = concatenate_images(faces_collection)[:5000, ...]
    arr_faces = concatenate_images(faces_collection)
    arr_faces = convert_image_stack_to_gray(arr_faces)

    arr_faces = np.rollaxis(arr_faces, 3, 1)
    arr_faces = arr_faces
    num_face_imgs = arr_faces.shape[0]
    arr_faces = arr_faces.reshape((arr_faces.shape[0], -1))
    out_faces = np.ones(arr_faces.shape[0])

    arr_bkgs = concatenate_images(bkgs_collection)
    arr_bkgs = convert_image_stack_to_gray(arr_bkgs)
    arr_bkgs = np.rollaxis(arr_bkgs, 3, 1)
    arr_bkgs = arr_bkgs.reshape((arr_bkgs.shape[0], -1))
    arr_bkgs = arr_bkgs
    out_bkgs = np.zeros(arr_bkgs.shape[0])

    test_set = np.concatenate((arr_faces, arr_bkgs))
    labels = np.concatenate((out_faces, out_bkgs))

    arr_indexes = np.random.permutation(test_set.shape[0])

    shuffled_test_set = test_set[arr_indexes]
    shuffled_labels = labels[arr_indexes].flatten()

    borrow = True
    shared_x = theano.shared(np.asarray(shuffled_test_set,
                                        dtype=theano.config.floatX),
                             borrow=borrow)

    shared_y = theano.shared(np.asarray(shuffled_labels,
                                        dtype=theano.config.floatX),
                             borrow=borrow)

    return shared_x, T.cast(shared_y, 'int32')
def prepare_data(faces_collection, bkgs_collection):

    arr_faces = concatenate_images(faces_collection)
    #arr_faces = convert_image_stack_to_lbp(arr_faces)
    arr_faces = arr_faces[:, :, :, np.newaxis]
    arr_faces = np.rollaxis(arr_faces, 3, 1)
    arr_faces = arr_faces
    num_face_imgs = arr_faces.shape[0]
    arr_faces = arr_faces.reshape((arr_faces.shape[0], -1))
    # Need to check this ---compare with flatten used during training

    out_faces = np.ones(arr_faces.shape[0])

    arr_bkgs = concatenate_images(bkgs_collection)
    #arr_bkgs = convert_image_stack_to_lbp(arr_bkgs)
    arr_bkgs = arr_bkgs[:, :, :, np.newaxis]
    arr_bkgs = np.rollaxis(arr_bkgs, 3, 1)
    arr_bkgs = arr_bkgs.reshape((arr_bkgs.shape[0], -1))
    arr_bkgs = arr_bkgs  # Reduce the size of bkg images
    out_bkgs = np.zeros(arr_bkgs.shape[0])

    test_set = np.concatenate((arr_faces, arr_bkgs))
    labels = np.concatenate((out_faces, out_bkgs))

    arr_indexes = np.random.permutation(test_set.shape[0])

    shuffled_test_set = test_set[arr_indexes]
    shuffled_labels = labels[arr_indexes].flatten()

    #     borrow = True
    #     shared_x = theano.shared(np.asarray(shuffled_test_set, dtype=theano.config.floatX),  # @UndefinedVariable
    #                              borrow=borrow)
    #
    #     shared_y = theano.shared(np.asarray(shuffled_labels, dtype=theano.config.floatX),  # @UndefinedVariable
    #                              borrow=borrow)

    return shuffled_test_set, shuffled_labels
def prepare_data(faces_collection, bkgs_collection):

    arr_faces = concatenate_images(faces_collection)
    #arr_faces = convert_image_stack_to_lbp(arr_faces)
    arr_faces=arr_faces[:,:,:,np.newaxis]
    arr_faces = np.rollaxis(arr_faces, 3, 1)
    arr_faces = arr_faces
    num_face_imgs = arr_faces.shape[0]
    arr_faces = arr_faces.reshape((arr_faces.shape[0], -1)); # Need to check this ---compare with flatten used during training

    out_faces = np.ones(arr_faces.shape[0])

    arr_bkgs = concatenate_images(bkgs_collection)
    #arr_bkgs = convert_image_stack_to_lbp(arr_bkgs)
    arr_bkgs=arr_bkgs[:,:,:,np.newaxis]
    arr_bkgs = np.rollaxis(arr_bkgs, 3, 1)
    arr_bkgs = arr_bkgs.reshape((arr_bkgs.shape[0], -1));
    arr_bkgs = arr_bkgs # Reduce the size of bkg images
    out_bkgs = np.zeros(arr_bkgs.shape[0])

    test_set = np.concatenate((arr_faces, arr_bkgs))
    labels = np.concatenate((out_faces, out_bkgs))

    arr_indexes = np.random.permutation(test_set.shape[0])

    shuffled_test_set = test_set[arr_indexes]
    shuffled_labels = labels[arr_indexes].flatten()

#     borrow = True
#     shared_x = theano.shared(np.asarray(shuffled_test_set, dtype=theano.config.floatX),  # @UndefinedVariable
#                              borrow=borrow)
# 
#     shared_y = theano.shared(np.asarray(shuffled_labels, dtype=theano.config.floatX),  # @UndefinedVariable
#                              borrow=borrow)

    return shuffled_test_set,shuffled_labels
def prepare_data(faces_collection, bkgs_collection):

    # To get a subset:
    # arr_faces = concatenate_images(faces_collection)[:5000, ...]
    arr_faces = concatenate_images(faces_collection)
    arr_faces = convert_image_stack_to_gray(arr_faces)

    arr_faces = np.rollaxis(arr_faces, 3, 1)
    arr_faces = arr_faces
    num_face_imgs = arr_faces.shape[0]
    arr_faces = arr_faces.reshape((arr_faces.shape[0], -1))
    out_faces = np.ones(arr_faces.shape[0])

    arr_bkgs = concatenate_images(bkgs_collection)
    arr_bkgs = convert_image_stack_to_gray(arr_bkgs)
    arr_bkgs = np.rollaxis(arr_bkgs, 3, 1)
    arr_bkgs = arr_bkgs.reshape((arr_bkgs.shape[0], -1))
    arr_bkgs = arr_bkgs
    out_bkgs = np.zeros(arr_bkgs.shape[0])

    test_set = np.concatenate((arr_faces, arr_bkgs))
    labels = np.concatenate((out_faces, out_bkgs))

    arr_indexes = np.random.permutation(test_set.shape[0])

    shuffled_test_set = test_set[arr_indexes]
    shuffled_labels = labels[arr_indexes].flatten()

    borrow = True
    shared_x = theano.shared(np.asarray(shuffled_test_set, dtype=theano.config.floatX),
                             borrow=borrow)

    shared_y = theano.shared(np.asarray(shuffled_labels, dtype=theano.config.floatX),
                             borrow=borrow)

    return shared_x, T.cast(shared_y, 'int32')