Exemplo n.º 1
0
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.transform(testY)

# construct the image generator for data augmentation
aug = ImageDataGenerator(rotation_range=30,
                         width_shift_range=0.1,
                         height_shift_range=0.1,
                         shear_range=0.2,
                         zoom_range=0.2,
                         horizontal_flip=True,
                         fill_mode="nearest")

# initialize our VGG-like Convolutional Neural Network
model = SmallVGGNet.build(width=64,
                          height=64,
                          depth=3,
                          classes=len(lb.classes_))

# initialize our initial learning rate, # of epochs to train for,
# and batch size
INIT_LR = 0.01
EPOCHS = 75
BS = 32

# initialize the model and optimizer (you'll want to use
# binary_crossentropy for 2-class classification)
print("[INFO] training network...")
opt = SGD(lr=INIT_LR, decay=INIT_LR / EPOCHS)
model.compile(loss="categorical_crossentropy",
              optimizer=opt,
              metrics=["accuracy"])
Exemplo n.º 2
0
'''
Encode labels in a one-vs-all fashion
'''
lb = LabelBinarizer()
lb.fit(np.arange(digits, dtype='uint8'))
y_train = lb.transform(y_train)
y_test = lb.transform(y_test)

#Instantiate Data Generator for augmented data set.
datagen = ImageDataGenerator(rotation_range=85, width_shift_range=0.2, \
                    height_shift_range=0.2, zoom_range=0.2, \
                        horizontal_flip=True)
'''
Instantiate model with the images input_shape 64x64x1
'''
model = SmallVGGNet.build(width=64, height=64, depth=1, classes=digits)
'''
Define learning rate, # of epochs, moment and batch_size values
'''
LR = 0.01
EPOCHS = 50
MOMENT = 0.0
BATCH = 32
'''
Instantiate optimizer and Compile model
TO DO: Play with the optimizer params.: lr, decay, momentum
'''
opt = SGD(lr=LR, momentum=MOMENT)
model.compile(loss="categorical_crossentropy",
              optimizer=opt,
              metrics=["accuracy"])
Exemplo n.º 3
0
data = np.array(data, dtype="float") / 255.0
labels = np.array(labels)

# Es separen les dades en les particions de train (75%) i test (25%)
(trainX, testX, trainY, testY) = train_test_split(data,
                                                  labels,
                                                  test_size=0.25,
                                                  random_state=42)

# Les etiquetes es converteixen a vectors
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.transform(testY)

# S'inicialitza la CNN
model = SmallVGGNet.build(width=64, height=64, depth=3, classes=4)

# S'inicialitzen el learning rate, # d'epochs i el batch size
INIT_LR = 0.01
EPOCHS = 75
BS = 32

# S'inicialitzen el model i l'optimitzador
opt = SGD(lr=INIT_LR, decay=INIT_LR / EPOCHS)
model.compile(loss="categorical_crossentropy",
              optimizer=opt,
              metrics=["accuracy"])

# Entrenament de la xarxa
H = model.fit(trainX,
              trainY,
Exemplo n.º 4
0
    image = image.flatten()
    image = image.reshape((1, image.shape[0]))

# otherwise, we must be working with a CNN -- don't flatten the
# image, simply add the batch dimension
else:
    image = image.reshape((1, image.shape[0], image.shape[1], image.shape[2]))

# load the model and label binarizer
print("[INFO] loading network and label binarizer...")
lb = pickle.loads(open(args["label_bin"], "rb").read())
#lb = pickle.loads(open(label_bin, "rb").read())

#model = load_model(args["model"])
model = SmallVGGNet.build(width=SIZE[0],
                          height=SIZE[1],
                          depth=3,
                          classes=len(lb.classes_))
model.load_weights(args["model"], by_name=True, skip_mismatch=True)

print("labels", lb)
# make a prediction on the image
preds = model.predict(image)

# find the class label index with the largest corresponding
# probability
i = preds.argmax(axis=1)[0]
label = lb.classes_[i]

# draw the class label + probability on the output image
text = "{}: {:.2f}%".format(label, preds[0][i] * 100)
cv2.putText(output, text, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255),