def cont(name, preload, key): with get_session() as sess: fcn = FCN(sess=sess, name=name) sess.run(tf.global_variables_initializer()) if preload is not None: fcn.load(name=preload, key=key) fcn.train(EPOCHS)
def dump_errors(name, ckpt, fold, output_filename, method='full', samples=0, pooling='median'): samples = int(samples) with get_session() as sess: kwargs = {'dataset_name': 'gehler', 'subset': 0, 'fold': fold} fcn = FCN(sess=sess, name=name, kwargs=kwargs) fcn.load(ckpt) for i in range(4): if method == 'full': errors, t, _, _, _ = fcn.test(scales=[0.5]) elif method == 'resize': errors, t = fcn.test_resize() elif method == 'patches': errors, t = fcn.test_patch_based(scale=0.5, patches=samples, pooling=pooling) else: assert False utils.print_angular_errors(errors) with open(output_filename, 'w') as f: pickle.dump({'e': errors, 't': t}, f)
def test(name, ckpt, image_pack_name=None, output_filename=None): external_image = image_pack_name.index('.') != -1 if image_pack_name is None: data = None elif not external_image: data = load_data(image_pack_name.split(',')) with get_session() as sess: fcn = FCN(sess=sess, name=name) if ckpt != "-1": fcn.load(ckpt) else: fcn.load_absolute(name) if not external_image: errors, _, _, _, ret, conf = fcn.test(scales=[0.5], summary=True, summary_key=123, data=data, eval_speed=False, visualize=True) if output_filename is not None: with open('outputs/%s.pkl' % output_filename, 'wb') as f: pickle.dump(ret, f) with open('outputs/%s_err.pkl' % output_filename, 'wb') as f: pickle.dump(errors, f) with open('outputs/%s_conf.pkl' % output_filename, 'wb') as f: pickle.dump(conf, f) print ret print 'results dumped' else: img = cv2.imread(image_pack_name) # reverse gamma correction for sRGB img = (img / 255.0)**2.2 * 16384 images = [img] fcn.test_external(images=images, fns=[image_pack_name])
def dump_result(name, ckpt, image_pack_name=None): if image_pack_name is None: data = None else: data = load_data(image_pack_name.split(',')) outputs = [] gts = [] with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) _, _, outputs, gts = fcn.test(scales=[0.5], summary=True, summary_key=123, data=data) result = { 'outputs': np.array(outputs), 'gts': np.array(gts), } pickle.dump( result, open("outputs/%s-%s-%s.pkl" % (name, ckpt, image_pack_name), "wb"))
def test_input_gamma(name, ckpt, input_gamma, image_pack_name=None, output_filename=None): config_set_input_gamma(float(input_gamma)) if image_pack_name is None: data = None else: data = load_data(image_pack_name.split(',')) with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) _, _, _, _, ret = fcn.test(scales=[0.5], summary=True, summary_key=123, data=data) if output_filename is not None: with open('outputs/%s.pkl' % output_filename, 'wb') as f: pickle.dump(ret, f) print ret print 'results dumped'
def test_network(name, ckpt): with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) fcn.test_network()
def test_multi(name, ckpt): with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) fcn.test_multi()
from camera import CameraHandler #from camera.camera_handler_2 import CameraHandler from policy import Detector import numpy as np from fcn import FCN import cv2 '''FCN''' VGG_WEIGHTS = '/home/nvidia/RobotControl/src/fcn/vgg16.npy' MODEL_PATH = '/home/nvidia/RobotControl/src/fcn/model_save/old_2/' model = FCN((240, 320), 1, VGG_WEIGHTS) model.load(path=MODEL_PATH) img = None ct = 0 mask = np.zeros((480, 640), dtype=np.uint8) mask = cv2.fillConvexPoly( mask, np.array([[275, 285], [365, 285], [430, 166], [210, 166]]), 1, 1) def preprocessor(img): # mask=np.zeros((480, 640), dtype=np.uint8) # mask = cv2.fillConvexPoly(mask, np.array([[275, 285], [365, 285], [430, 166], [210, 166]]), 1, 1) return model.extract_hmap(img)[0] * mask # return img * np.expand_dims(mask,-1) handler = CameraHandler(2, frame_rate=20, preprocessor=lambda img: preprocessor(img)) handler.start()