data_gen = BaseMaskDatasetIterator(train_idx,
                                           random_transformers=[
                                               aug_mega_hardcore((-0.25, 0.6)),
                                               aug_mega_hardcore((-0.6, 0.25))
                                           ],
                                           batch_size=batch_size,
                                           shuffle=True,
                                           seed=1)

        np.random.seed(it + 111)
        random.seed(it + 111)
        tf.set_random_seed(it + 111)

        #        tbCallback = TensorBoard(log_dir="tb_logs/densenet_softmax_{0}".format(it), histogram_freq=0, write_graph=True, write_images=False)

        lrSchedule = LearningRateScheduler(lambda epoch: schedule_steps(
            epoch, [(1e-5, 2), (3e-4, 4), (1e-4, 6)]))

        model = get_densenet121_unet_softmax((None, None), weights='imagenet')
        model.compile(loss=softmax_dice_loss,
                      optimizer=Adam(lr=3e-4, amsgrad=True),
                      metrics=[
                          dice_coef_rounded_ch0, dice_coef_rounded_ch1,
                          metrics.categorical_crossentropy
                      ])
        model.fit_generator(generator=data_gen,
                            epochs=6,
                            steps_per_epoch=steps_per_epoch,
                            verbose=2,
                            validation_data=val_data_generator(
                                val_idx, val_batch, validation_steps),
                            validation_steps=validation_steps,
Exemplo n.º 2
0
                                           batch_size=batch_size,
                                           shuffle=True,
                                           seed=1)

        np.random.seed(it + 111)
        random.seed(it + 111)
        tf.set_random_seed(it + 111)

        #        tbCallback = TensorBoard(log_dir="tb_logs/inception_softmax_{0}".format(it), histogram_freq=0, write_graph=True, write_images=False)

        model = get_inception_resnet_v2_unet_softmax((None, None),
                                                     weights=None)
        model.load_weights(
            path.join(models_folder,
                      'inception_resnet_v2_weights_{0}.h5'.format(it)))
        lrSchedule = LearningRateScheduler(lambda epoch: schedule_steps(
            epoch, [(1e-6, 2), (1e-5, 10), (5e-6, 20), (3e-6, 25)]))
        model.compile(loss=softmax_dice_loss,
                      optimizer=Adam(lr=1e-6, amsgrad=True),
                      metrics=[
                          dice_coef_rounded_ch0, dice_coef_rounded_ch1,
                          metrics.categorical_crossentropy
                      ])
        model_checkpoint = ModelCheckpoint(path.join(
            models_folder, 'inception_resnet_v2_weights_{0}.h5'.format(it)),
                                           monitor='val_loss',
                                           save_best_only=True,
                                           save_weights_only=True,
                                           mode='min')
        model.fit_generator(
            generator=data_gen,
            epochs=25,