def main(): model_deploy_config_path = os.path.join(opt.data_path, opt.model_deploy_config_filename) with open(model_deploy_config_path, 'r') as f: model_deploy_config = json.load(f) deployable_checkpoint_dict = json.loads(opt.deployable_checkpoint_info) model_deploy_config['checkpoint_path'] = deployable_checkpoint_dict[ 'checkpoint_path'] model_deploy_config['metrics'] = deployable_checkpoint_dict['metrics'] model_deploy_config['update_time'] = datetime.datetime.now().strftime( "%Y%m%d-%H%M%S") with open(model_deploy_config_path, 'w') as f: f.write(json.dumps(model_deploy_config)) model_serve_path = os.path.join(opt.data_path, opt.seldon_model_path, 'nwd') if not os.path.exists(model_serve_path): os.makedirs(model_serve_path) saved_model_paths = glob.glob(model_serve_path + '/*') if len(saved_model_paths) > 0: latest_model_version = max([ int(os.path.relpath(g, model_serve_path)) for g in glob.glob(model_serve_path + '/*') ]) else: latest_model_version = 0 model_save_path = os.path.join(model_serve_path, str(latest_model_version + 1)) print(f'msp: {model_save_path}') load_weight_path = os.path.join( opt.data_path, deployable_checkpoint_dict['checkpoint_path'] + '.h5') print(f'lwp: {load_weight_path}') model = UNet().create_model(img_shape=[256, 256, 3], num_class=2, rate=.0) model.load_weights(load_weight_path) model.save(model_save_path)
optimizer = tf.keras.optimizers.Adam(lr=config.lr) # compute class weights for the loss: inverse-frequency balanced # note: we set to 0 the weights for the classes "no_data"(0) and "clouds"(1) to ignore these class_weight = (1 / LCD.TRAIN_CLASS_COUNTS ) * LCD.TRAIN_CLASS_COUNTS.sum() / (LCD.N_CLASSES) class_weight[LCD.IGNORED_CLASSES_IDX] = 0. print(f"Will use class weights: {class_weight}") #loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) loss = WeightedSparseCategoricalCrossEntropy() #loss = dice_loss() #loss = jaccard_loss() print("Compile model") model.compile(optimizer=optimizer, loss=loss, metrics=[]) # metrics = [tf.keras.metrics.Precision(), # tf.keras.metrics.Recall(), # tf.keras.metrics.MeanIoU(num_classes=LCD.N_CLASSES)]) # TODO segmentation metrics # Launch training model.fit( train_dataset, epochs=config.epochs, callbacks=callbacks, steps_per_epoch=trainset_size // config.batch_size, validation_data=val_dataset, validation_steps=valset_size // config.batch_size, ) model.save('/content/experiments/saved')
parser.add_argument('--save_path' , type = str, default = 'result.pth') parser.add_argument('--epoch' , type = int, default = 2) args = parser.parse_args() # ========================================================================================= # 2. Training # ========================================================================================= # Create the data loader loader = Data.DataLoader( dataset = SpectrogramDataset(args.train_folder), batch_size=1, num_workers=0, shuffle=True ) # Load the pre-trained model model = UNet() model.load(args.load_path) # Train! for ep in range(args.epoch): bar = tqdm_table(loader) for i, (mix, voc) in enumerate(bar): mix, voc = mix.cuda(), voc.cuda() model.backward(mix, voc) if i == len(bar) - 1: info_dict = model.getLoss(normalize = True) else: info_dict = model.getLoss(normalize = False) info_dict.update({'Epoch': ep}) bar.set_table_info(info_dict) model.save(args.save_path) print("Finish training!")