示例#1
0
# Check to see if we are applying "on the fly" data augmentation and if so re-instantiate the object
if args["augment"] > 0:
    print("[INFO] Performing 'on the fly' data augmentation")
    aug = ImageDataGenerator(rotation_range=20,
                             zoom_range=0.15,
                             width_shift_range=0.2,
                             height_shift_range=0.2,
                             shear_range=0.15,
                             horizontal_flip=True,
                             fill_mode="nearest")

# Initialize the optimizer and model
print("[INFO] Compiling the model...")
opt = SGD(lr=INIT_LR, momentum=0.9, decay=INIT_LR / EPOCHS)
model = ResNet.build(64, 64, 3, 2, (2, 3, 4), (32, 64, 128, 256), reg=0.0001)
model.compile(loss="binary_crossentropy", optimizer=opt, metrics=["accuracy"])

# Train the network
print("[INFO] Training network for {} epochs...".format(EPOCHS))
H = model.fit_generator(aug.flow(trainX, trainY, batch_size=BS),
                        validation_data=(testX, testY),
                        steps_per_epoch=len(trainX) // BS,
                        epochs=EPOCHS)

# Evaluate the network
print("[INFO] Evaluating network...")
predictions = model.predict(testX, batch_size=BS)
print(
    classification_report(testY.argmax(axis=1),
                          predictions.argmax(axis=1),

#%%
## Define the model
#model_obj = NNModels()
#model = model_obj.base_model(input_shape,num_classes)
#
##sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
##model.compile(loss='categorical_crossentropy', optimizer=sgd,metrics=["accuracy"])
#model.compile(loss='categorical_crossentropy', optimizer='rmsprop',metrics=["accuracy"])
#model.summary()

# initialize our ResNet model and compile it
model = ResNet.build(64,
                     64,
                     3,
                     num_classes, (3, 4, 6), (64, 128, 256, 512),
                     reg=0.0005)
opt = SGD(lr=INIT_LR, momentum=0.9)
model.compile(loss="categorical_crossentropy",
              optimizer='adam',
              metrics=["accuracy"])
model.summary()
#%%
# define the data generator to load data
base_dir = 'MURA-v1.1'
train_file = 'train_data_files.csv'
val_file = 'val_data_files.csv'

train_generator = DataGenerator(data_path=os.path.join(base_dir,train_file),\
                                target_size=(img_w,img_h),num_classes=num_classes)
示例#3
0
# initialize the decay for the optimizer
decay = 0.0

# if we are using Keras' "standard" decay, then we need to set the
# decay parameter
if args["schedule"] == "standard":
    print("[INFO] using 'keras standard' learning rate decay...")
    decay = 1e-1 / epochs

# otherwise, no learning rate schedule is being used
elif schedule is None:
    print("[INFO] no learning rate schedule being used")

# initialize our optimizer and model, then compile it
opt = SGD(lr=1e-1, momentum=0.9, decay=decay)
model = ResNet.build(32, 32, 3, 10, (9, 9, 9), (64, 64, 128, 256), reg=0.0005)
model.compile(loss="categorical_crossentropy",
              optimizer=opt,
              metrics=["accuracy"])

# train the network
H = model.fit(trainX,
              trainY,
              validation_data=(testX, testY),
              batch_size=128,
              epochs=epochs,
              callbacks=callbacks,
              verbose=1)

# evaluate the network
print("[INFO] evaluating network...")
示例#4
0
                                    class_mode="categorical",
                                    target_size=(64, 64),
                                    color_mode="rgb",
                                    shuffle=False,
                                    batch_size=BS)

# initialize the testing generator
testGen = valAug.flow_from_directory(config.TEST_PATH,
                                     class_mode="categorical",
                                     target_size=(64, 64),
                                     color_mode="rgb",
                                     shuffle=False,
                                     batch_size=BS)

# initialize our ResNet model and compile it
model = ResNet.build(64, 64, 3, 2, (3, 4, 6), (64, 128, 256, 512), reg=0.0005)
opt = SGD(lr=INIT_LR, momentum=0.9)
model.compile(loss="binary_crossentropy", optimizer=opt, metrics=["accuracy"])

# define our set of callbacks and fit the model
callbacks = [LearningRateScheduler(poly_decay)]
H = model.fit(x=trainGen,
              steps_per_epoch=totalTrain // BS,
              validation_data=valGen,
              validation_steps=totalVal // BS,
              epochs=NUM_EPOCHS,
              callbacks=callbacks)

# reset the testing generator and then use our trained model to
# make predictions on the data
print("[INFO] evaluating network...")
示例#5
0
# construct the training image generator for data augmentation
aug = ImageDataGenerator(rotation_range=18,
                         zoom_range=0.15,
                         width_shift_range=0.2,
                         height_shift_range=0.2,
                         shear_range=0.15,
                         horizontal_flip=True,
                         fill_mode="nearest")

# if there is no specific model checkpoint supplied, then initialize
# the network and compile the model
if args["model_load"] is None:
    print("[INFO] compiling model...")
    model = ResNet.build(64,
                         64,
                         3,
                         2, (3, 4, 6), (64, 128, 256, 512),
                         reg=0.0005,
                         dataset="tiny_imagenet")
    opt = SGD(lr=1e-1, momentum=0.9)
    model.compile(loss="binary_crossentropy",
                  optimizer=opt,
                  metrics=["accuracy"])

# otherwise, load the checkpoint from disk
else:
    print("[INFO] loading {}...".format(args["model"]))
    model = load_model(args["model"])

    # update the learning rate
    print("[INFO] old learning rate: {}".format(K.get_value(
        model.optimizer.lr)))
示例#6
0
	batch_size=BS)
 
# testing generator
testGen = valAug.flow_from_directory(
	config.TEST_PATH,
	class_mode="categorical",
	target_size=(64, 64),
	color_mode="rgb",
	shuffle=False,
	batch_size=BS)


# initialize the ResNet model and compile it (Deep convolution neural nets)
model = ResNet.build(64, 64, 3, # Image size and rgb multi-channel
                     2, # two classes
                     (3, 4, 6),	# Stacking levels
                     (64, # filters alone
                      128, 256, 512), # filters in stack
                     reg=0.0005) 
                     
# Defining the optimizer
opt = SGD(lr=INIT_LR, momentum=0.9) 

# compiling the NN structure with loss, opt and metric parameters
model.compile(loss="binary_crossentropy", optimizer=opt,	
              metrics=["accuracy"])


# set of callbacks
callbacks = [LearningRateScheduler(poly_decay)]

# fit the model