if i in ignored_cities or not path.isfile( path.join( models_folder, 'linknet_520_model_weights4_{0}_{1}.h5'.format( cities[i], it))): models.append(None) continue if not path.isdir( path.join( path.join(pred_folder, model_name, str(it), cities[i]))): mkdir( path.join( path.join(pred_folder, model_name, str(it), cities[i]))) model = get_resnet50_linknet(input_shape, weights=None) model.load_weights( path.join( models_folder, 'linknet_520_model_weights4_{0}_{1}.h5'.format( cities[i], it))) models.append(model) print('Predictiong fold', it) for d in test_folders: for f in tqdm(sorted(listdir(path.join(d, 'MUL')))): if path.isfile(path.join(d, 'MUL', f)) and '.tif' in f: img_id = f.split('MUL_')[1].split('.')[0] cinp = np.zeros((4, )) cinp[cities.index(img_id.split('_')[2])] = 1.0 cid = cinp.argmax()
if not path.isdir(path.join(pred_folder, model_name)): mkdir(path.join(pred_folder, model_name)) for it in [0, 1]: models = [] if not path.isdir(path.join(pred_folder, model_name, str(it))): mkdir(path.join(pred_folder, model_name, str(it))) for i in range(4): if i in ignored_cities or not path.isfile(path.join(models_folder, 'linknet_520_model_weights4_{0}_{1}.h5'.format(cities[i], it))): models.append(None) continue if not path.isdir(path.join(path.join(pred_folder, model_name, str(it), cities[i]))): mkdir(path.join(path.join(pred_folder, model_name, str(it), cities[i]))) model = get_resnet50_linknet(input_shape, weights=None) model.load_weights(path.join(models_folder, 'linknet_520_model_weights4_{0}_{1}.h5'.format(cities[i], it))) models.append(model) print('Predictiong fold', it) for d in test_folders: for f in tqdm(sorted(listdir(path.join(d, 'MUL')))): if path.isfile(path.join(d, 'MUL', f)) and '.tif' in f: img_id = f.split('MUL_')[1].split('.')[0] cinp = np.zeros((4,)) cinp[cities.index(img_id.split('_')[2])] = 1.0 cid = cinp.argmax() if cid in ignored_cities: continue fpath = path.join(d, 'MUL', f) img = skimage.io.imread(fpath, plugin='tifffile')
if validation_steps == 0 or steps_per_epoch == 0: print("No data for city", cities[city_id]) continue print('Training city', cities[city_id], 'fold', it) print('steps_per_epoch', steps_per_epoch, 'validation_steps', validation_steps) np.random.seed(it + 1) random.seed(it + 1) tf.set_random_seed(it + 1) print('Training model', it, cities[city_id]) model = get_resnet50_linknet(input_shape) model.compile(loss=dice_logloss3, optimizer=SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True), metrics=[ dice_coef, dice_coef_rounded, metrics.binary_crossentropy ]) model_checkpoint = ModelCheckpoint(path.join( models_folder, 'linknet_520_model_weights_{0}_{1}.h5'.format( cities[city_id], it)), monitor='val_dice_coef_rounded',
steps_per_epoch = int(len(train_idx) / batch_size) if validation_steps == 0 or steps_per_epoch == 0: print("No data for city", cities[city_id]) continue print('Training city', cities[city_id], 'fold', it) print('steps_per_epoch', steps_per_epoch, 'validation_steps', validation_steps) np.random.seed(it+1) random.seed(it+1) tf.set_random_seed(it+1) print('Training model', it, cities[city_id]) model = get_resnet50_linknet(input_shape) model.compile(loss=dice_logloss3, optimizer=SGD(lr=1e-2, decay=1e-6, momentum=0.9, nesterov=True), metrics=[dice_coef, dice_coef_rounded, metrics.binary_crossentropy]) model_checkpoint = ModelCheckpoint(path.join(models_folder, 'linknet_big_model_weights_{0}_{1}.h5'.format(cities[city_id], it)), monitor='val_dice_coef_rounded', save_best_only=True, save_weights_only=True, mode='max') model.fit_generator(generator=batch_data_generator(train_idx, batch_size), epochs=15, steps_per_epoch=steps_per_epoch, verbose=2) for l in model.layers: l.trainable = True model.compile(loss=dice_logloss3, optimizer=Adam(lr=2e-4), metrics=[dice_coef, dice_coef_rounded, metrics.binary_crossentropy])