Ejemplo n.º 1
0
lb = LabelBinarizer()
train_y = lb.fit_transform(train_y)
test_y = lb.transform(test_y)

# Initialize the label names for the CIFAR-10 dataset
label_names = [
    "airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse",
    "ship", "truck"
]

# Define the set of callbacks to be passed to the model during training
callbacks = [LearningRateScheduler(step_decay)]

# Initialize the optimizer and model
optimizer = SGD(lr=0.01, momentum=0.9, nesterov=True)
model = MiniVGGNet.build(width=32, height=32, depth=3, classes=10)
model.compile(loss="categorical_crossentropy",
              optimizer=optimizer,
              metrics=["accuracy"])

# Train the network
H = model.fit(train_x,
              train_y,
              validation_data=(test_x, test_y),
              batch_size=64,
              epochs=40,
              callbacks=callbacks,
              verbose=1)

# Evaluate the network
print("[INFO]: Evaluating....")
Ejemplo n.º 2
0
aap = AspectAwarePreprocessor(64,64)
sdl = SimpleDatasetLoader(preprocessors=[aap])
(data, labels) = sdl.load(imagePaths, verbose=500)

#preprocessing the data
data = data.astype("float")/255.0
lb = LabelBinarizer()
labels = lb.fit_transform(labels)
trainX, testX, trainY, testY = train_test_split(data, labels, random_state=42, test_size=0.25)

#building the netwok and applying data augmentaion
opt = SGD(lr = 0.05, nesterov=True, momentum = 0.9)
aug = ImageDataGenerator(rotation_range = 30, width_shift_range = 0.1, zoom_range = 0.2,
                         height_shift_range = 0.1, shear_range = 0.2, horizontal_flip = True,
                         fill_mode = "nearest")
model = MiniVGGNet.build(width = 64, height = 64, depth = 3, classes = len(flower_className))
model.compile(optimizer = opt, loss = "categorical_crossentropy", metrics = ["accuracy"])
H = model.fit_generator(aug.flow(trainX, trainY, batch_size = 32), steps_per_epoch = len(trainX)//32,
                    validation_data = (testX, testY), epochs = 100, verbose = 1)

#saving the model
model.save("MiniVGGNet on flowers 17 dataset with data augmentation.hdf5")

#plotting and evaluating the dataset progress reports
plt.style.use("ggplot")
plt.figure("MiniVGGNet on flowers 17 with data aumentation")
plt.plot(np.arange(0, 100), H.history["acc"], label = "Training accuracy")
plt.plot(np.arange(0, 100), H.history["val_acc"], label = "Validation accuracy")
plt.title("Training loss and accuracy")
plt.xlabel("Epochs")
plt.ylabel("Accuracy")
labels = np.array(labels)

# Convert the labels from integers to vectors
le = LabelEncoder().fit(labels)
labels = np_utils.to_categorical(le.transform(labels), 2)

# Account for skew in the labeled data
class_totals = labels.sum(axis=0)
class_weight = class_totals.max() / class_totals

# Partition the data into training data (80%) and testing data (20%)
(train_x, test_x, train_y, test_y) = train_test_split(data, labels, test_size=0.20, stratify=labels, random_state=42)

# Initialize the model
print("[INFO]: Compiling model....")
model = MiniVGGNet.build(width=28, height=28, depth=1, classes=2)
model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"])

# Train the network
print("[INFO]: Training....")
H = model.fit(train_x, train_y, validation_data=(test_x, test_y), class_weight=class_weight,
              batch_size=64, epochs=15, verbose=1)

# Evaluate the network
print("[INFO]: Evaluating....")
predictions = model.predict(test_x, batch_size=64)
print(classification_report(test_y.argmax(axis=1), predictions.argmax(axis=1), target_names=le.classes_))

# Save the model to disk
print("[INFO]: Serializing network....")
model.save(args["model"])
Ejemplo n.º 4
0
from keras.optimizers import SGD
import matplotlib
import matplotlib.pyplot as plt
matplotlib.use("Agg")

#importing the dataset and preprocessing
(trainX, trainY), (testX, testY) = cifar10.load_data()
trainX = trainX.astype("float")/255.0
testX = testX.astype("float")/255.0
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.transform(testY)

#importing the DL algorithm
sgd = SGD(lr=0.01, decay=0.01/40, momentum=0.9,nesterov=True)
model = MiniVGGNet().build(width=32, height=32, depth=3, classes = 10)
model.compile(optimizer = sgd, loss="categorical_crossentropy", metrics=['accuracy'])
model.fit(trainX, trainY, validation_data=(testX, testY), epochs=40, batch_size=64)












Ejemplo n.º 5
0
sp = SimplePreprocessor(32, 32)
sdl = SimpleDatasetLoader(preprocessors=[sp])
(dataset, labels) = sdl.load(imagePaths, verbose=1000)
dataset = dataset.astype('float') / 255.0
trainX, testX, trainY, testY = train_test_split(dataset,
                                                labels,
                                                random_state=42,
                                                test_size=0.25)
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.fit_transform(testY)

#build the ML model and use Learning_rate _decay
sgd = SGD(lr=0.01, momentum=0.9, nesterov=True)
#model = LeNet.build(width=32, height=32, depth=3, classes = 3)
model = MiniVGGNet.build(width=32, height=32, depth=3, classes=3)
model.compile(optimizer=sgd,
              loss="categorical_crossentropy",
              metrics=['accuracy'])
H = model.fit(trainX,
              trainY,
              validation_data=(testX, testY),
              epochs=30,
              batch_size=64)

#evaluating
plt.figure("Training MiniVGGNet on animals without Decay")
plt.style.use('ggplot')
plt.plot(np.arange(0, 30), H.history['acc'], label="Training accuracy")
plt.plot(np.arange(0, 30), H.history['val_acc'], label="Validation accuracy")
plt.xlabel("Epochs")
(train_x, test_x, train_y, test_y) = train_test_split(data, labels, test_size=0.25, random_state=42)

# Convert the labels from integers to vectors
lb = LabelBinarizer()
train_y = lb.fit_transform(train_y)
test_y = lb.transform(test_y)

# construct the image generator for data augmentation
aug = ImageDataGenerator(rotation_range=30, width_shift_range=0.1,
	height_shift_range=0.1, shear_range=0.2, zoom_range=0.2,
	horizontal_flip=True, fill_mode="nearest")

# Initialize the optimizer and model
print("[INFO]: Compiling model....")
optimizer = SGD(lr=0.05)
model = MiniVGGNet.build(width=64, height=64, depth=3, classes=len(classNames))
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["accuracy"])

# Train the network
print("[INFO]: Training....")
H = model.fit_generator(aug.flow(train_x, train_y, batch_size=32), 
	validation_data=(test_x, test_y), steps_per_epoch=len(train_x)//32, 
	epochs=100, verbose=1)

# Evaluate the network
print("[INFO]: Evaluating....")
predictions = model.predict(test_x, batch_size=32)
print(classification_report(test_y.argmax(axis=1), predictions.argmax(axis=1), target_names=classNames))

# Plot the training loss and accuracy
plt.style.use("ggplot")