Пример #1
0
    def StartDemo(self, ref, model_path):
        self.captureWidth = 640
        self.captureHeight = 480
        self.cap = cv2.VideoCapture(0)
        self.cap.set(3, self.captureWidth)
        self.cap.set(4, self.captureHeight)

        # self.graph = tf.get_default_graph()

        bp = 'trained_model/'

        a = read_model("models/model99.txt")
        modelObject = ModelBuilder.ModelBuilder(a, (80, 80, 2))
        self.model = modelObject.model
        self.model.load_weights(bp+model_path)

        #self.model = load_model(bp+model_path)
        self.ref_img = FaceExtractionPipeline.SingletonPipeline().FaceExtractionPipelineImage(skimage.io.imread(ref))

        demo.Window(ref)
Пример #2
0
# defining the folders path train and test
TRAINING_DATASET_FOLDER_NAME = '3_preprocessed_1_dataset train'

X_train, Y_train, _ = GetData(TRAINING_DATASET_FOLDER_NAME, limit_value=1)
Y_train = np_utils.to_categorical(Y_train, 2)
X_train = X_train.astype('float32')
X_train /= np.max(X_train)

width = 80
height = 80
depth = 2
num_classes = 2

# load the model architecture from file
a = read_model("models/model01.txt")
modelObject = ModelBuilder.ModelBuilder(a, (height, width, depth))
model = modelObject.model

model.compile(
    loss='categorical_crossentropy',  # using the cross-entropy loss function
    optimizer='adam',  # using the Adam optimiser
    metrics=['accuracy'])  # reporting the accuracy

# train the model
model.fit(X_train,
          Y_train,
          batch_size=128,
          epochs=1,
          verbose=1,
          validation_split=0.2)
Пример #3
0
    fmt = '.2f' if normalize else 'd'
    thresh = cm.max() / 2.
    for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
        plt.text(j,
                 i,
                 format(cm[i, j], fmt),
                 horizontalalignment="center",
                 color="white" if cm[i, j] > thresh else "black")

    plt.tight_layout()
    plt.ylabel('True label')
    plt.xlabel('Predicted label')


a = read_model("models/model08.txt")
modelObject = ModelBuilder(a, (80, 80, 2))
model = modelObject.model
model.load_weights(
    'trained_model/2018-07-04 22:29:20/model08.txt_2018-07-05 12:24:21.h5')
#plot_model(model, to_file='model_graph.png', show_shapes=True, show_layer_names=True)

(X_test, y_test, _) = GetData('lfw-whofitinram_p80x80')
Y_test = np_utils.to_categorical(y_test, num_classes)
X_test = X_test.astype('float32')
X_test /= np.max(X_test)  # Normalise data to [0, 1] range
model.compile(
    loss='categorical_crossentropy',  # using the cross-entropy loss function
    optimizer='adam',  # using the Adam optimiser
    metrics=['accuracy'])