Esempio n. 1
0
def normalize():
    positive_features = []
    negative_features = []
    for i in range(500):
        n = '%03d' % i
        filepath = './datasets/original/face/face_' + n + '.jpg'
        savepath = './datasets/pre-processing/face/face_' + n + '.jpg'
        image = Image.open(filepath).convert('L')
        image.thumbnail((24, 24))
        image.save(savepath)
        mat = np.array(image)
        NPD = feature.NPDFeature(mat)
        fea = NPD.extract()
        positive_features.append(fea)
    np.save("./datasets/nonface_features.npy", positive_features)

    for i in range(500):
        n = '%03d' % i
        filepath = './datasets/original/nonface/nonface_' + n + '.jpg'
        savepath = './datasets/pre-processing/nonface/nonface_' + n + '.jpg'
        image = Image.open(filepath).convert('L')
        image.thumbnail((24, 24))
        image.save(savepath)
        mat = np.array(image)
        NPD = feature.NPDFeature(mat)
        fea = NPD.extract()
        negative_features.append(fea)
    np.save("./datasets/face_features.npy", negative_features)
Esempio n. 2
0
def exfuture(m, n, file):
    X = np.zeros((m + n, 165600))
    i = 0
    output = open(file, "wb")
    for file in os.listdir("./datasets/original/face"):

        im = np.array(
            Image.open("./datasets/original/face/" + file).convert('L').resize(
                (24, 24), Image.BILINEAR), 'i')
        x = NPD.NPDFeature(im)
        X[i] = x.extract()
        i += 1
    for file in os.listdir("./datasets/original/nonface"):

        im = np.array(
            Image.open("./datasets/original/nonface/" +
                       file).convert('L').resize((24, 24), Image.BILINEAR),
            'i')
        x = NPD.NPDFeature(im)
        X[i] = x.extract()
        i += 1
    pickle.dump(X, output)
    y = np.hstack((np.ones((m, )).T, np.zeros((n, )).T))
    pickle.dump(y, output)
    output.close()
Esempio n. 3
0
def read_files(dir, count, is_face):
    array = []
    for i in range(0, count):
        s = "%03d" % i
        if (is_face):
            filename = dir + "face_%s.jpg" % s
        else:
            filename = dir + "nonface_%s.jpg" % s
        #print(filename)
        im = read_image(filename)
        im_array = trans_to_array(im, (24, 24))
        ndp_feature = feature.NPDFeature(im_array)
        result = ndp_feature.extract()
        array.append(result)

    array = np.array(array)
    if (is_face):
        y = np.ones((array.shape[0], 1))
    else:
        y = -1 * np.ones((array.shape[0], 1))

    array = np.concatenate((array, y), axis=1)
    #print(array[:, -1])
    if (is_face):
        with open('face.data', 'wb') as file:
            pickle.dump(array, file)
    else:
        with open('nonface.data', 'wb') as file:
            pickle.dump(array, file)
Esempio n. 4
0
def extract_feature(pic_type, feature_type):
    path_name = './datasets/original/' + pic_type + \
        '/' + pic_type + '_' + '%03d' % i+'.jpg'
    obj = Image.open(path_name).convert('L')
    obj.thumbnail(size, Image.ANTIALIAS)
    npd = feature.NPDFeature(np.array(obj))
    n = npd.extract()
    feature_type.append(n.tolist())
Esempio n. 5
0
    def get_features(path):

        image_paths = [os.path.join(path, f) for f in os.listdir(path)]
        features = []
        # read the images and extract the features
        for image_path in image_paths:
            img = cv2.imread(image_path)
            # convert into gray image
            gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
            img_reshape = cv2.resize(gray_img, (25, 25), interpolation=cv2.INTER_CUBIC)
            image = feature.NPDFeature(img_reshape)
            pre_features = feature.NPDFeature.extract(image)
            AdaBoostClassifier.save(pre_features, "save.p")
            face_feature = AdaBoostClassifier.load("save.p")
            features.append(face_feature)
        return features
Esempio n. 6
0
def preprocess_data():
    """
    预处理数据
    :return:
    """
    # 判断预处理数据文件是否存在,若不存在则执行预处理
    if not os.path.exists('NPD_feature.pkl'):
        # 获得所有图片路径名
        d = os.path.dirname(__file__)
        face_img_path_list = get_path(d + '/datasets/original/face')
        nonface_img_path_list = get_path(d + '/datasets/original/nonface')
        img_path_list = face_img_path_list + nonface_img_path_list
        # 构建标签
        face_label = np.ones(len(face_img_path_list)).astype(int)
        nonface_label = np.ones(len(nonface_img_path_list)).astype(int) * -1
        label = np.insert(nonface_label, 0, values=face_label, axis=0)

        # 将图片路径和标签同步打乱,构成随机数据集
        temp = list(zip(img_path_list, label))
        random.shuffle(temp)
        img_path_list[:], label[:] = zip(*temp)

        NPD_feature_list = []
        # 读取图片,将全部图片转成大小为24*24的灰度图
        # 提取NPD特征,并通过dump()函数将预处理后的特征数据保存起来
        for img_item in img_path_list:
            img = Image.open(img_item).convert('L')
            img = img.resize((24, 24), Image.ANTIALIAS)
            # 转换成np.ndarray格式
            img = np.array(img)
            NPDFeature_obj = feature.NPDFeature(img)
            NPD_feature = NPDFeature_obj.extract()

            NPD_feature_list.append(NPD_feature)

        # 序列化特征列表,存储在文件中,方便下次调用
        with open('NPD_feature.pkl', 'wb') as f:
            pickle.dump(NPD_feature_list, f)

        # 将标签保存起来
        data = pd.DataFrame({'label': label})
        data.to_csv('label.csv')

    else:
        print("preprocessing has been performed")
Esempio n. 7
0
def getDataX(path):
    """
    Inputs:
    - path  : the path of the image
    
    Onputs:
    - dataSetX :  a list indicating characteristic data
    """    
    
    newSize=[24,24]
    dataSetX=[]
    for filename in os.listdir(path):           # the parameter of  listdir is the path of the image
        #print ( path+ filename )                # print the path of every file
        img = cv2.imread(path+"\\"+ filename,cv2.IMREAD_GRAYSCALE)   # read the grayscale image   
        if img is None:         
            continue
        res1= cv2.resize(img,(newSize[0],newSize[1])) # resize the img to 24 * 24 
        #res1_1 = res1.reshape(1,24*24)/255   # 2D -> 1D ; norming
        NPD=feature.NPDFeature(res1)   # Extract features using the NPDFeature class in feature.py. 
        feat=NPD.extract()             # extract NPD features
        res2 = feat.tolist()           # matrix -> list
        dataSetX.append(res2)          # append new list to the exisiting list  

    return dataSetX
Esempio n. 8
0
            list(
                map(lambda s: 'datasets/original/face/' + s,
                    os.listdir('datasets/original/face'))))
        pathes = np.append(
            pathes,
            np.array(
                list(
                    map(lambda s: 'datasets/original/nonface/' + s,
                        os.listdir('datasets/original/nonface')))))
        for index, path in enumerate(pathes):
            with Image.open(path) as image:
                print(index, path)
                image = image.convert('L')
                image = image.resize((24, 24))
                imageData = np.array(image)
                npd = feature.NPDFeature(imageData)
                features.append(npd.extract())
        AdaBoostClassifier.save(features, 'features.dump')

    features = np.array(features)
    print(features.shape)

    X_train, X_val, y_train, y_val = train_test_split(features,
                                                      y,
                                                      test_size=0.25)

    classifier = AdaBoostClassifier(DecisionTreeClassifier, 5)
    classifier.fit(X_train, y_train)

    score = classifier.predict_scores(X_val, y_val)
    predict = classifier.predict(X_val)
Esempio n. 9
0
@author: Shirelle
"""

from PIL import Image
import feature
import numpy as np
import pickle

face_dir="./datasets/original/face/face_"
nonface_dir="./datasets/original/nonface/nonface_"

for i in range(500):
        dir=face_dir+str("%.3d"%i)+".jpg"
        im=Image.open(dir).convert("L").resize((24,24))#L:灰色图像,数字表示不同的灰度
        NPD = feature.NPDFeature(np.array(im))
        if(i==0):
            X=NPD.extract() 
            y=np.array([1])
        else:   
            X=np.vstack((X,NPD.extract())) 
            y=np.vstack((y,np.array([1])))  
            
for i in range(500):
        dir=nonface_dir+str("%.3d"%i)+".jpg"
        im=Image.open(dir).convert("L").resize((24,24))
        NPD = feature.NPDFeature(np.array(im))
        X=np.vstack((X,NPD.extract()))
        y=np.vstack((y,np.array([-1])))

Esempio n. 10
0
path1 = '/Users/apple/Documents/ml/3/datasets/original/face/'
path2 = '/Users/apple/Documents/ml/3/datasets/original/nonface/'
size_tran = 24, 24
face = []
nface = []
face_l = []
nface_l = []
N = 100
for i in range(N):
    name = path1 + 'face_' + str(i).zfill(3) + '.jpg'
    obj = Image.open(name)  #打开图片

    obj.convert('L')  #彩图转换为灰度图
    obj.thumbnail(size_tran, Image.ANTIALIAS)
    npd = feature.NPDFeature(np.array(obj))
    face.append(pickle.dumps(npd.extract().tolist(),
                             True))  #讲道理我不知道老师要求里是不是这个意思但是我觉得怪怪的你们可以再改一下…ˊ_>ˋ
    #face.append(n.tolist())

    name = path2 + 'nonface_' + str(i).zfill(3) + '.jpg'
    obj = Image.open(name)

    obj.convert('L')
    obj.thumbnail(size_tran, Image.ANTIALIAS)

    npd = feature.NPDFeature(np.array(obj))
    nface.append(pickle.dumps(npd.extract().tolist(), True))
    #nface.append(npd.extract().tolist())
p = np.ones(N)
n = -p
Esempio n. 11
0
from PIL import Image
import feature
import numpy as np
import pickle

#提取特征的代码
X = np.zeros(shape=(165600, ))
Y = np.zeros(shape=(1, ))
#提取NPD特征
for i in range(0, 20):
    f = np.array(
        Image.open("F:\机器学习\lab3\ML2019-lab-03-master\data\\face\%03d.jpg" %
                   i))
    X = np.row_stack((X, feature.NPDFeature(f).extract()))
    Y = np.row_stack((Y, np.ones((1, ))))
    nf = np.array(
        Image.open(
            "F:\机器学习\lab3\ML2019-lab-03-master\data\\nonface\\non_%03d.jpg" %
            i))
    X = np.row_stack((X, feature.NPDFeature(f).extract()))
    Y = np.row_stack((Y, np.array([-1])))
X = np.delete(X, 0, axis=0)
Y = np.delete(Y, 0, axis=0)

state = np.random.get_state()
np.random.shuffle(X)
np.random.set_state(state)
np.random.shuffle(Y)

dataX = open('lab3\\trainingX.data', 'wb')
dataY = open('lab3\\trainingY.data', 'wb')