Ejemplo n.º 1
0
def setup_kp_network(network_str):
    fn = KP_NETWORK_OPTIONS[network_str]['url']
    file_url = join('https://lev.cs.rpi.edu/public/models/', fn)
    network_params_path = ut.grab_file_url(file_url, appname='ibeis')
    network_params = ut.load_cPkl(network_params_path)
    # network_params also includes normalization constants needed for the dataset, and is assumed to be a dictionary
    # with keys mean, std, and params
    network_exp = KP_NETWORK_OPTIONS[network_str]['exp']()
    ll.set_all_param_values(network_exp, network_params['params'])
    X = T.tensor4()
    network_fn = tfn([X], ll.get_output(network_exp, X, deterministic=True))
    return {'mean': network_params['mean'], 'std': network_params['std'], 'networkfn': network_fn,
            'input_size': KP_NETWORK_OPTIONS[network_str]['size']}
Ejemplo n.º 2
0
def setup_kp_network(network_str):
    fn = KP_NETWORK_OPTIONS[network_str]['url']
    file_url = join('https://lev.cs.rpi.edu/public/models/', fn)
    network_params_path = ut.grab_file_url(file_url, appname='ibeis')
    network_params = ut.load_cPkl(network_params_path)
    # network_params also includes normalization constants needed for the dataset, and is assumed to be a dictionary
    # with keys mean, std, and params
    network_exp = KP_NETWORK_OPTIONS[network_str]['exp']()
    ll.set_all_param_values(network_exp, network_params['params'])
    X = T.tensor4()
    network_fn = tfn([X], ll.get_output(network_exp, X, deterministic=True))
    return {
        'mean': network_params['mean'],
        'std': network_params['std'],
        'networkfn': network_fn,
        'input_size': KP_NETWORK_OPTIONS[network_str]['size']
    }
Ejemplo n.º 3
0
def setup_te_network(network_str):
    fn = TE_NETWORK_OPTIONS[network_str]['url']
    file_url = join('https://lev.cs.rpi.edu/public/models/', fn)
    network_params_path = ut.grab_file_url(file_url, appname='ibeis')
    network_params = ut.load_cPkl(network_params_path)
    # network_params also includes normalization constants needed for the dataset, and is assumed to be a dictionary
    # with keys mean, std, and params
    network_exp = TE_NETWORK_OPTIONS[network_str]['exp']()
    ll.set_all_param_values(network_exp, network_params['params'])
    X = T.tensor4()
    network_fn = tfn([X], ll.get_output(
        network_exp[-1], X, deterministic=True))
    retdict = {'mean': network_params['mean'], 'std': network_params[
        'std'], 'networkfn': network_fn}
    if any([i in network_str for i in ('upsample', 'jet')]):
        retdict['mod_acc'] = 8
    return retdict
Ejemplo n.º 4
0
def setup_te_network(network_str):
    fn = TE_NETWORK_OPTIONS[network_str]['url']
    file_url = join('https://lev.cs.rpi.edu/public/models/', fn)
    network_params_path = ut.grab_file_url(file_url, appname='ibeis')
    network_params = ut.load_cPkl(network_params_path)
    # network_params also includes normalization constants needed for the dataset, and is assumed to be a dictionary
    # with keys mean, std, and params
    network_exp = TE_NETWORK_OPTIONS[network_str]['exp']()
    ll.set_all_param_values(network_exp, network_params['params'])
    X = T.tensor4()
    network_fn = tfn([X], ll.get_output(network_exp[-1], X,
                                        deterministic=True))
    retdict = {
        'mean': network_params['mean'],
        'std': network_params['std'],
        'networkfn': network_fn
    }
    if any([i in network_str for i in ('upsample', 'jet')]):
        retdict['mod_acc'] = 8
    return retdict
    
def updateVersion(nextVersion):
    	f = open('networkVersion.txt', 'w')
    	f.write(str(nextVersion))
    	f.close()  

if __name__ == '__main__':
        nextVersion = getVersion()
	with open(join(dataset_loc, "Flukes/patches/annot_full_64_100r_zs/vgg16_c43_10ep_adam_l21e-3.pkl"), 'r') as f:
		model = pickle.load(f)
	test_dset = load_dataset(join(dataset_loc, "Flukes/patches/TESTannot_full_64_100r_zs")) 
	segmenter = build_segmenter_vgg()
	ll.set_all_param_values(segmenter, model)
	X = T.tensor4()
	segmenter_out = ll.get_output(segmenter, X, deterministic=True)
	segmenter_fn = tfn([X], segmenter_out)
	dset_for_model = {section:preproc_dataset(test_dset[section]) for section in ['train', 'valid', 'test']}
	segmentation_outputs = segmenter_fn(dset_for_model['train']['X'])
	segmentation_outputs_valid = segmenter_fn(dset_for_model['valid']['X'])
	usedGids = set()

        #open MongoDB
	c = MongoClient()
	db = c['annotationInfo']
	collection = db['networkResults']
	cursor = collection.find({'version':nextVersion})
	values = cursor[:]
	for value in values:
		usedGids.add(value['gid'])
	
	ibs = ibeis.opendb(dbdir='/home/zach/data/IBEIS/humpbacks')