Ejemplo n.º 1
0
import _init_paths
from data_helper import readUNetData
from unet import UNet
from config import *
import numpy as np
import cv2

# Read data
(train_x, train_y), (test_x, test_y) = readUNetData()
model = UNet(img_height, img_width, save_path=model_path + unet_model_name)
train_x.astype('float32')
train_y.astype('float32')

# Train
model.compile()
model.train(train_x, train_y)
Ejemplo n.º 2
0
X, y = [], []
for i in range(1000):
    img = np.zeros((IMG_WIDTH, IMG_HEIGHT, 3))
    label = np.zeros((IMG_WIDTH, IMG_HEIGHT, 1))
    cx, cy = np.random.randint(0, IMG_WIDTH), np.random.randint(0, IMG_HEIGHT)
    w, h = np.random.randint(10, 30), np.random.randint(10, 30)
    color = (np.random.random(3) * 256).astype(int)
    cv2.rectangle(img, (cx, cy), (cx + w, cy + h), color, -1)
    cx, cy = np.random.randint(0, IMG_WIDTH), np.random.randint(0, IMG_HEIGHT)
    r = np.random.randint(10, 40)
    color = (np.random.random(3) * 256).astype(int)
    cv2.circle(img, (cx, cy), r, color, -1)
    cv2.circle(label, (cx, cy), r, color, -1)
    label[label > 0] = 1
    X.append(img)
    y.append(label)
X, y = np.asarray(X), np.asarray(y)
X = preprocess_input(X)
print X.shape, y.shape
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.33,
                                                    random_state=42)

# Training.
model = UNet(input_shape=(IMG_WIDTH, IMG_HEIGHT, 3))
model.compile(optimizer=Adam(lr=1e-5),
              loss=dice_coef_loss,
              metrics=[dice_coef])
model.fit(X_train, y_train, validation_data=(X_test, y_test), nb_epoch=100)
Ejemplo n.º 3
0
    # compile the model
    # model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])
    # model.compile(loss='categorical_crossentropy', optimizer=optimizers.Adadelta(lr=0.1, rho=0.95, epsilon=1e-08, decay=0.0005), metrics=['accuracy'])
    # model.compile(loss = "categorical_crossentropy", optimizer = optimizers.SGD(lr=0.0001, momentum=0.9), metrics=["accuracy"], decay=0.0005)

    # model.compile(loss='categorical_crossentropy',
    #               optimizer=optimizers.RMSprop(lr=0.00003, rho=0.9, epsilon=1e-08, decay=0.000001),
    #               metrics=['accuracy'])

    # model.compile(loss = "categorical_crossentropy",
    #               optimizer = optimizers.SGD(lr=0.00003, momentum=0.9, nesterov=True, decay=1e-6),
    #               metrics=["accuracy"],
    #               decay=0.0005)

    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizers.Nadam(lr=0.000001),
                  metrics=['accuracy'])

    # model.compile(loss = "mean_absolute_error",
    #               optimizer = optimizers.SGD(lr=0.0001, momentum=0.9, nesterov=True),
    #               metrics=["accuracy"],
    #               decay=0.0005)

    print_summary(model)
    plot_model(model, to_file='model.pdf', show_shapes=True)

    history = model.fit_generator(train_generator,
                                  steps_per_epoch=200,
                                  epochs=epochs1,
                                  validation_data=valid_generator,
                                  validation_steps=50,
Ejemplo n.º 4
0
    tblogdir = './logs/' + training_name + '{}'.format(
        datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))

    np.random.seed(19)
    ########################################
    #########   MODEL & SESSION   ##########
    ########################################

    tf.reset_default_graph()
    sess = tf.Session()
    graph = tf.get_default_graph()
    set_session(sess)

    model = UNet((patchsize, patchsize, 3), filters=features, blocks=blocks)
    model.compile(loss='categorical_crossentropy',
                  optimizer=Adam(adam),
                  metrics=['accuracy'])

    ########################################
    #############  CALLBACKS   #############
    ########################################

    os.makedirs(epoch_folder_name, exist_ok=True)

    tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=tblogdir,
                                                          write_graph=False)
    tensorboard_callback.set_model(model)
    skmetrics = SkMetrics()
    skmetrics.set_file_writer(tblogdir)

    Callbacks = [
Ejemplo n.º 5
0
filenames = os.listdir(train_df)
random.shuffle(filenames)
n_validation = 2560
train_filenames = filenames[n_validation:]
validation_filenames = filenames[:n_validation]
print('number of train samples=', len(train_filenames))
print('number of validation samples=', len(validation_filenames))

BATCH_SIZE = 16
IMAGE_SIZE = 320
N_EPOCH = 5

# build model
model = UNet(input_shape=(IMAGE_SIZE,IMAGE_SIZE,1))
model.compile(optimizer='adam',
              loss=keras.losses.binary_crossentropy,
              metrics=[iou_segmentation])

# visualize model
plot_model(UNet, 'model.png', show_shapes=True)
model.summary()

callbacks=[]
callback_lr = LearningRate(lr=0.0001)
callbacks = [callback_lr]

# training
train_df = os.path.join('../input/kaggle/stage_1_train_images')
train_gen = Generator(folder, 
                      train_filenames, 
                      box_locations, 
Ejemplo n.º 6
0
    valid_y = sorted(glob(os.path.join(valid_path, "mask", "*.jpg")))

    H = 360
    W = 640
    batch_size = 2
    lr = 1e-3
    epochs = 200
    model_path = "files/model.h5"
    csv_path = "files/data.csv"

    train_dataset = tf_dataset(train_x, train_y, batch=batch_size)
    valid_dataset = tf_dataset(valid_x, valid_y, batch=batch_size)

    model = UNet(H, W)
    metrics = [dice_coef, iou, MeanIoU(num_classes=2), Recall(), Precision()]
    model.compile(loss=dice_loss, optimizer=Adam(lr), metrics=metrics)
    model.summary()

    callbacks = [
        ModelCheckpoint(model_path, verbose=1, save_best_only=True),
        # ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10, min_lr=1e-7, verbose=1),
        CSVLogger(csv_path),
        TensorBoard(),
        EarlyStopping(monitor='val_loss',
                      patience=50,
                      restore_best_weights=False),
        SGDRScheduler(min_lr=1e-6,
                      max_lr=1e-3,
                      steps_per_epoch=np.ceil(epochs / batch_size),
                      lr_decay=0.9,
                      cycle_length=5,
Ejemplo n.º 7
0
mask_list=glob.glob(TRAIN_PATH+'masks/*')

X_train=[]
Y_train=[]

width=1024

for n, id_ in tqdm(enumerate(im_list), total=len(im_list)):
    im=cv2.imread(im_list[n])
    im=cv2.resize(im,(width,width),interpolation = cv2.INTER_CUBIC)
    X_train.append(im)
    
for n, id_ in tqdm(enumerate(mask_list), total=len(mask_list)):
    mask=cv2.imread(mask_list[n],0)
    mask=cv2.resize(mask,(width,width),interpolation = cv2.INTER_CUBIC)
    Y_train.append(mask)

X_train=np.array(X_train)
Y_train=np.array(Y_train)

Y_train=Y_train.reshape(Y_train.shape+(1,))

model=UNet((width,width))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['mse',dice_coef])
model.summary()

earlystopper = EarlyStopping(patience=5, verbose=1)
checkpointer = ModelCheckpoint('unet.{epoch:02d}-{val_loss:.2f}.h5',monitor='val_dice_coef', verbose=1, save_best_only=True)
results = model.fit(X_train, Y_train, validation_split=0.1, batch_size=16, epochs=100, 
                    callbacks=[earlystopper, checkpointer])