desc_img = descriptor_build(desc, x_img) desc_warped_img = descriptor_build(desc, x_warped_img) concat = tf.keras.layers.Concatenate()([det_img['logits'], det_warped_img['logits'], desc_img, desc_warped_img]) model = tf.keras.Model([img_in, warped_img_in], concat) model.summary() """ tf.keras.utils.plot_model(model, config['model_visual'], show_shapes=True) """ run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() model.compile(optimizer=tf.keras.optimizers.Adam(lr=config['learning_rate']), loss=total_loss, metrics=[precision_metric(det_img['pred']), recall_metric(det_img['pred']), warped_precision_metric(det_warped_img['pred']), warped_recall_metric(det_warped_img['pred']), threshold_precision_metric(det_img['pred']), threshold_recall_metric(det_img['pred']), warped_threshold_precision_metric(det_warped_img['pred']), warped_threshold_recall_metric(det_warped_img['pred']), repeatability_metric(det_img['pred'], det_warped_img['pred'])], options=run_options, run_metadata=run_metadata) if not config['pretrained_model']: pass else: model.load_weights(basepath + '/' + config['pretrained_weights'], by_name=True) """ model._make_train_function() with open(basepath + '/' + config['pretrained_optimizer'], 'rb') as opt: weight_values = pickle.load(opt) model.optimizer.set_weights(weight_values)
sess = tf.Session(config=config) tf.keras.backend.set_session(sess) basepath = '/home/ubuntu/data' with open('configs/config_sp_hpatches_descriptors.yaml', 'r') as f: config = yaml.load(f) model = tf.keras.models.load_model(basepath + '/' + config['model'], custom_objects={ 'total_loss': total_loss, 'precision': precision_metric(0), 'recall': recall_metric(0), 'warped_precision': warped_precision_metric(0), 'warped_recall': warped_recall_metric(0), 'threshold_precision': threshold_precision_metric(0), 'threshold_recall': threshold_recall_metric(0), 'warped_threshold_precision': warped_threshold_precision_metric(0), 'warped_threshold_recall': warped_threshold_recall_metric(0), 'repeatability': repeatability_metric( np.zeros((1, 1), np.int32),
d = detector_head() det = detector_build(d, encoder_layers, **config) model = tf.keras.Model(img_in, det['logits']) model.summary() """ tf.keras.utils.plot_model(model, config['model_visual'], show_shapes=True) """ run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() model.compile(optimizer=tf.keras.optimizers.Adam(lr=config['learning_rate']), loss=detector_loss, metrics=[ precision_metric(det['pred']), recall_metric(det['pred']), threshold_precision_metric(det['pred']), threshold_recall_metric(det['pred']) ], options=run_options, run_metadata=run_metadata) if not config['pretrained_model']: pass else: model.load_weights(basepath + '/' + config['pretrained_weights']) """ model._make_train_function() with open(basepath + '/' + config['pretrained_optimizer'], 'rb') as opt: weight_values = pickle.load(opt) model.optimizer.set_weights(weight_values)
config = tf.ConfigProto() config.gpu_options.allow_growth = True sess = tf.Session(config=config) tf.keras.backend.set_session(sess) d2l = lambda d: [dict(zip(d, e)) for e in zip(*d.values())] basepath = '/home/ubuntu/data' with open('configs/config_mp_coco_export.yaml', 'r') as f: config = yaml.load(f) model = tf.keras.models.load_model(basepath + '/' + config['model'], custom_objects={'detector_loss': detector_loss, 'precision': precision_metric(0), 'recall': recall_metric(0), 'threshold_precision': threshold_precision_metric(0), 'threshold_recall': threshold_recall_metric(0)}) model.summary() picklefile = Path(basepath, config['picklefile']) with open(picklefile, 'rb') as handle: files = pickle.load(handle) output_dir = Path(basepath, config['export_name']) if not output_dir.exists(): os.makedirs(output_dir, exist_ok=True) with open(basepath + '/' + config['export_name'] + '/' + 'config.yml', 'w') as f: yaml.dump(config, f, default_flow_style=False)