Exemple #1
0
            if i in ignored_cities or not path.isfile(
                    path.join(
                        models_folder,
                        'vgg_smallest_model_weights4_{0}_{1}.h5'.format(
                            cities[i], it))):
                models.append(None)
                continue
            if not path.isdir(
                    path.join(
                        path.join(pred_folder, model_name, str(it),
                                  cities[i]))):
                mkdir(
                    path.join(
                        path.join(pred_folder, model_name, str(it),
                                  cities[i])))
            model = get_vgg_unet_small(input_shape, weights=None)
            model.load_weights(
                path.join(
                    models_folder,
                    'vgg_smallest_model_weights4_{0}_{1}.h5'.format(
                        cities[i], it)))
            models.append(model)

        print('Predictiong fold', it)
        for d in test_folders:
            for f in tqdm(sorted(listdir(path.join(d, 'MUL')))):
                if path.isfile(path.join(d, 'MUL', f)) and '.tif' in f:
                    img_id = f.split('MUL_')[1].split('.')[0]
                    cinp = np.zeros((4, ))
                    cinp[cities.index(img_id.split('_')[2])] = 1.0
                    cid = cinp.argmax()
         steps_per_epoch = int(len(train_idx) / batch_size)
         
         if validation_steps == 0 or steps_per_epoch == 0:
             print("No data for city", cities[city_id])
             continue
         
         print('Training city', cities[city_id], 'fold', it)
         print('steps_per_epoch', steps_per_epoch, 'validation_steps', validation_steps)
         
         np.random.seed(it+1)
         random.seed(it+1)
         tf.set_random_seed(it+1)        
 
         print('Training model', it, cities[city_id])
         
         model = get_vgg_unet_small(input_shape, freeze=True)
             
         model.compile(loss=dice_logloss3,
                       optimizer=SGD(lr=5e-2, decay=1e-6, momentum=0.9, nesterov=True),
                       metrics=[dice_coef, dice_coef_rounded, metrics.binary_crossentropy])
         
         model_checkpoint = ModelCheckpoint(path.join(models_folder, 'vgg_smallest_model_weights_{0}_{1}.h5'.format(cities[city_id], it)), monitor='val_dice_coef_rounded', 
                                            save_best_only=True, save_weights_only=True, mode='max')
         model.fit_generator(generator=batch_data_generator(train_idx, batch_size),
                               epochs=15, steps_per_epoch=steps_per_epoch, verbose=2,
                               validation_data=val_data_generator(val_idx, batch_size, validation_steps),
                               validation_steps=validation_steps,
                               callbacks=[model_checkpoint])
         for l in model.layers:
             l.trainable = True
         model.compile(loss=dice_logloss3,
         steps_per_epoch = int(len(train_idx) / batch_size)
         
         if validation_steps == 0 or steps_per_epoch == 0:
             print("No data for city", cities[city_id])
             continue
         
         print('Training city', cities[city_id], 'fold', it)
         print('steps_per_epoch', steps_per_epoch, 'validation_steps', validation_steps)
         
         np.random.seed(it+1)
         random.seed(it+1)
         tf.set_random_seed(it+1)        
 
         print('Training model', it, cities[city_id])
         
         model = get_vgg_unet_small(input_shape)
             
         model.compile(loss=dice_logloss3,
                       optimizer=SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True),
                       metrics=[dice_coef, dice_coef_rounded, metrics.binary_crossentropy])
         
         model_checkpoint = ModelCheckpoint(path.join(models_folder, 'vgg2_small_model_weights_{0}_{1}.h5'.format(cities[city_id], it)), monitor='val_dice_coef_rounded', 
                                            save_best_only=True, save_weights_only=True, mode='max')
         model.fit_generator(generator=batch_data_generator(train_idx, batch_size),
                               epochs=15, steps_per_epoch=steps_per_epoch, verbose=2,
                               validation_data=val_data_generator(val_idx, batch_size, validation_steps),
                               validation_steps=validation_steps,
                               callbacks=[model_checkpoint])
         model.optimizer = Adam(lr=2e-4)
         
         model.fit_generator(generator=batch_data_generator(train_idx, batch_size),
Exemple #4
0
            if validation_steps == 0 or steps_per_epoch == 0:
                print("No data for city", cities[city_id])
                continue

            print('Training city', cities[city_id], 'fold', it)
            print('steps_per_epoch', steps_per_epoch, 'validation_steps',
                  validation_steps)

            np.random.seed(it + 1)
            random.seed(it + 1)
            tf.set_random_seed(it + 1)

            print('Training model', it, cities[city_id])

            model = get_vgg_unet_small(input_shape, freeze=True)

            model.compile(loss=dice_logloss3,
                          optimizer=SGD(lr=5e-2,
                                        decay=1e-6,
                                        momentum=0.9,
                                        nesterov=True),
                          metrics=[
                              dice_coef, dice_coef_rounded,
                              metrics.binary_crossentropy
                          ])

            model_checkpoint = ModelCheckpoint(path.join(
                models_folder, 'vgg_smallest_model_weights_{0}_{1}.h5'.format(
                    cities[city_id], it)),
                                               monitor='val_dice_coef_rounded',