def main():
    network_generators = {
        'ZF': ZFGenerator
    }

    args = parse_args(network_generators.keys())

    if args.cfg is not None:
        cfg_from_file(args.cfg)

    apollocaffe.set_random_seed(cfg.RNG_SEED)
    np.random.seed(cfg.RNG_SEED)

    if args.gpu_id >= 0:
        apollocaffe.set_device(args.gpu_id)

    apollocaffe.set_cpp_loglevel(3)

    train_roidb = None
    if args.train_imdb is not None:
        train_imdb =  get_imdb(args.train_imdb)
        train_imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD)
        train_roidb = get_training_roidb(train_imdb)

    test_roidb = None
    if args.test_imdb is not None:
        test_imdb = get_imdb(args.test_imdb)
        test_imdb.set_proposal_method('gt')
        prepare_roidb(test_imdb)
        test_roidb = test_imdb.roidb

    faster_rcnn = FasterRCNN(args, network_generators, train_roidb=train_roidb, test_roidb=test_roidb)
    faster_rcnn.train()

    return 0
Ejemplo n.º 2
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the trainer."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    net = apollocaffe.ApolloNet()
    image_mean = load_image_mean_from_binproto(config['data']["idl_mean"])
    fake_input_en = {"image": np.zeros((1, 3, 227, 227))}

    forward(net, fake_input_en, deploy=True)

    if config["solver"]["weights"]:
        net.load(config["solver"]["weights"])
    else:
        raise Exception('weights file is not provided!')

    run_socket(net, 13502, image_mean)
Ejemplo n.º 3
0
def configure():
    apollocaffe.set_random_seed(0)
    np.random.seed(0)
    random.seed(0)

    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("-c",
                            "--config",
                            dest="config",
                            required=True,
                            help="model configuration file")
    arg_parser.add_argument("-l",
                            "--log-config",
                            dest="log_config",
                            default="config/log.yml",
                            help="log configuration file")

    args = arg_parser.parse_args()
    config_name = args.config.split("/")[-1].split(".")[0]

    with open(args.log_config) as log_config_f:
        log_filename = "logs/%s.log" % config_name
        log_config = yaml.load(log_config_f)
        log_config["handlers"]["fileHandler"]["filename"] = log_filename
        logging.config.dictConfig(log_config)

    with open(args.config) as config_f:
        config = util.Struct(**yaml.load(config_f))

    assert not hasattr(config, "name")
    config.name = config_name

    return config
Ejemplo n.º 4
0
def configure():
    apollocaffe.set_random_seed(0)
    np.random.seed(0)
    random.seed(0)

    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("-c", "--config", dest="config", required=True, help="model configuration file")
    arg_parser.add_argument(
        "-l", "--log-config", dest="log_config", default="config/log.yml", help="log configuration file"
    )

    args = arg_parser.parse_args()
    config_name = args.config.split("/")[-1].split(".")[0]

    with open(args.log_config) as log_config_f:
        log_filename = "logs/%s.log" % config_name
        log_config = yaml.load(log_config_f)
        log_config["handlers"]["fileHandler"]["filename"] = log_filename
        logging.config.dictConfig(log_config)

    with open(args.config) as config_f:
        config = util.Struct(**yaml.load(config_f))

    assert not hasattr(config, "name")
    config.name = config_name

    return config
Ejemplo n.º 5
0
def main():
    apollocaffe.set_device(0)
    #apollocaffe.set_cpp_loglevel(0)
    apollocaffe.set_random_seed(0)
    np.random.seed(0)

    job = sys.argv[1]
    corpus_name = sys.argv[2]

    config = util.Struct(**yaml.load(CONFIG))
    if corpus_name == "abstract":
        train_scenes, dev_scenes, test_scenes = corpus.load_abstract()
    else:
        assert corpus_name == "birds"
        train_scenes, dev_scenes, test_scenes = corpus.load_birds()
    apollo_net = ApolloNet()
    print "loaded data"
    print "%d training examples" % len(train_scenes)

    listener0_model = Listener0Model(apollo_net, config.model)
    speaker0_model = Speaker0Model(apollo_net, config.model)
    sampling_speaker1_model = SamplingSpeaker1Model(apollo_net, config.model)
    compiled_speaker1_model = CompiledSpeaker1Model(apollo_net, config.model)

    if job == "train.base":
        train(train_scenes, dev_scenes, listener0_model, apollo_net, config.opt)
        train(train_scenes, dev_scenes, speaker0_model, apollo_net, config.opt)
        apollo_net.save("models/%s.base.caffemodel" % corpus_name)
        exit()

    if job == "train.compiled":
        apollo_net.load("models/%s.base.caffemodel" % corpus_name)
        print "loaded model"
        train(train_scenes, dev_scenes, compiled_speaker1_model, apollo_net,
                config.opt)
        apollo_net.save("models/%s.compiled.caffemodel" % corpus_name)
        exit()

    if job in ("sample.base", "sample.compiled"):
        if job == "sample.base":
            apollo_net.load("models/%s.base.caffemodel" % corpus_name)
        else:
            apollo_net.load("models/%s.compiled.caffemodel" % corpus_name)
        print "loaded model"
        if job == "sample.base":
            models = {
                "sampling_speaker1": sampling_speaker1_model,
            }
        elif job == "sample.compiled":
            models = {
                "compiled_speaker1": compiled_speaker1_model,
            }

        name = job.split(".")[1]

        run_experiment("one_different", corpus_name, name, models, dev_scenes)
        run_experiment("by_similarity", corpus_name, name, models, dev_scenes)
        run_experiment("all_same", corpus_name, name, models, dev_scenes)
Ejemplo n.º 6
0
def main():
    parser = apollocaffe.base_parser()
    parser.add_argument("--config", required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    evaluate(config)
Ejemplo n.º 7
0
def main():
    parser = apollocaffe.base_parser()
    parser.add_argument("--config", required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    train(config)
Ejemplo n.º 8
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the test."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    print ("Test config file is " + config["data"]["test_idl"] )
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(0) # gpu
    test(config)
Ejemplo n.º 9
0
def main():
    parser = apollocaffe.base_parser()
    parser.add_argument("--config", required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    list_add=data_root+'list_all_test.txt'
    list_crop_add=data_root+'list_det_crop_align_filled.txt'
    feat_add=data_root+'feat1.txt'

    train_gt=np.loadtxt(list_add, dtype={'names': ('name', ), 'formats': ('S200', )})
    train_crop_gt=np.loadtxt(list_crop_add, dtype={'names': ('name', 'label'), 'formats': ('S200', 'i4')})
    train_feat=np.loadtxt(feat_add)


    train_feat_list=[]
    train_label_list=[]
    assert(len(train_crop_gt)==train_feat.shape[0])
    im_list=train_crop_gt['name'];
    for k in xrange(len(im_list)):
        im_list[k]=im_list[k].split('/')[-1][0:-7]
    im_list_uniq=list(set(im_list))
    for s in im_list_uniq:
        idx=s==train_crop_gt['name']
        feat=train_feat[idx,:]
        train_feat_list.append(feat)
        train_label_list.append(s)


    feat_add2=data_root+'feat2.txt'

    train_feat2=np.loadtxt(feat_add2)#.reshape((-1,1))
    train_feat_list2=[]
    assert(len(train_crop_gt)==train_feat2.shape[0])
    for s in im_list_uniq:
        idx=s==im_list
        feat2=train_feat2[idx,:]
        train_feat_list2.append(feat2)

    holistic_feat_add=data_root+'feat_centrist_test_d1024.txt'
    holistic_feat=np.loadtxt(holistic_feat_add)
    scene_feat_list=[]
    for k,s in enumerate(im_list_uniq):
        idx=s.split('/')[-1]==train_gt['name']
        assert(idx.sum()==1)
        scene_feat_list.append(holistic_feat[idx,:])


    test_data={'feats': train_feat_list, 'feats2': train_feat_list2, 'labels': train_label_list, 'scene_feats':  scene_feat_list, 'current_idx': 0}    

    evaluate(config, test_data)
Ejemplo n.º 10
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the test."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    print("Test config file is " + config["data"]["test_idl"])
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(0)  # gpu
    test(config)
Ejemplo n.º 11
0
def main():
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    config["solver"]["start_iter"] = args.start_iter
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    train(config)
Ejemplo n.º 12
0
def main():
    parser = apollocaffe.base_parser()
    parser.add_argument("--config", required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, "r"))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    config["solver"]["start_iter"] = args.start_iter
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    train(config)
Ejemplo n.º 13
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the trainer."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    deploy(config)
Ejemplo n.º 14
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the trainer."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    config["solver"]["start_iter"] = args.start_iter
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    train(config)
Ejemplo n.º 15
0
def main():
    """Sets up all the configurations for apollocaffe, and ReInspect
    and runs the trainer."""
    parser = apollocaffe.base_parser()
    parser.add_argument('--config', required=True)
    args = parser.parse_args()
    config = json.load(open(args.config, 'r'))
    if args.weights is not None:
        config["solver"]["weights"] = args.weights
    config["solver"]["start_iter"] = args.start_iter
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(args.gpu)
    apollocaffe.set_cpp_loglevel(args.loglevel)

    print json.dumps(config['solver'], indent=4, sort_keys=True)
    print json.dumps(config['MMD'], indent=4, sort_keys=True)

    train(config)
Ejemplo n.º 16
0
def main():
	config = json.load(open("config.json", 'r'))
	config["data"]["test_idl"] = "./data/brainwash/brainwash_test.idl"

	apollocaffe.set_random_seed(config["solver"]["random_seed"])
	apollocaffe.set_device(0)

	# Now lets load the data mean and the data.
	data_mean = load_data_mean(config["data"]["idl_mean"], 
						   config["net"]["img_width"], 
						   config["net"]["img_height"], image_scaling=1.0)

	num_test_images = 500
	display = True

	## Warning: load_idl returns an infinite generator. Calling list() before islice() will hang.
	test_list = list(itertools.islice(
			load_idl(config["data"]["test_idl"], data_mean, config["net"], False),
			0,
			num_test_images))

	# We can now load the snapshot weights.
	net = apollocaffe.ApolloNet()
	net.phase = 'test'
	import time; s = time.time()
	forward(net, test_list[0], config["net"], True) # define structure
	print time.time() - s
	net.load("./data/brainwash_800000.h5") # load pre-trained weights

	# We can now begin to run the model and visualize the results.
	annolist = al.AnnoList()
	net_config = config["net"]
	pix_per_w = net_config["img_width"]/net_config["grid_width"]
	pix_per_h = net_config["img_height"]/net_config["grid_height"]

	for i in range(10):
		inputs = test_list[i]
		timer = Timer()
		timer.tic()
		bbox_list, conf_list = forward(net, inputs, net_config, True)
		timer.toc()
		print ('Detection took {:.3f}s').format(timer.total_time)
		img = np.copy(inputs["raw"])
		png = np.copy(inputs["imname"])
		all_rects = [[[] for x in range(net_config["grid_width"])] for y in range(net_config["grid_height"])]
		for n in range(len(bbox_list)):
			for k in range(net_config["grid_height"] * net_config["grid_width"]):
				y = int(k / net_config["grid_width"])
				x = int(k % net_config["grid_width"])
				bbox = bbox_list[n][k]
				conf = conf_list[n][k,1].flatten()[0]
				abs_cx = pix_per_w/2 + pix_per_w*x + int(bbox[0,0,0])
				abs_cy = pix_per_h/2 + pix_per_h*y+int(bbox[1,0,0])
				w = bbox[2,0,0]
				h = bbox[3,0,0]
				all_rects[y][x].append(Rect(abs_cx,abs_cy,w,h,conf))

		timer.tic()
		acc_rects = stitch_rects(all_rects)
		timer.toc()
		print ('Stitching detected bboxes took {:.3f}s').format(timer.total_time)		

		if display:
			visualize_detection(img, acc_rects)    
			
		anno = al.Annotation()
		anno.imageName = inputs["imname"]
		for rect in acc_rects:
			r = al.AnnoRect()
			r.x1 = rect.cx - rect.width/2.
			r.x2 = rect.cx + rect.width/2.
			r.y1 = rect.cy - rect.height/2.
			r.y2 = rect.cy + rect.height/2.
			r.score = rect.true_confidence
			anno.rects.append(r)
		annolist.append(anno)
Ejemplo n.º 17
0
from apollocaffe.layers import (Concat, Dropout, LstmUnit, InnerProduct,
                                NumpyData, Softmax, SoftmaxWithLoss, Wordvec)

batch_size = 32
vocab_size = 256
zero_symbol = vocab_size - 1
dimension = 250
base_lr = 0.15
clip_gradients = 10
i_temperature = 1.5

parser = apollocaffe.base_parser()
parser.add_argument('--data_source', type=str)
args = parser.parse_args()
apollocaffe.set_device(args.gpu)
apollocaffe.set_random_seed(0)


def get_data():
    if args.data_source:
        data_source = args.data_source
    else:
        data_source = '%s/reddit_ml.txt' % os.path.dirname(
            os.path.realpath(__file__))
    if not os.path.exists(data_source):
        raise IOError(
            'You must download the data with ./examples/apollocaffe/char_model/get_char.sh'
        )
    epoch = 0
    while True:
        with open(data_source, 'r') as f:
Ejemplo n.º 18
0
import util
from visualizer import visualizer

import apollocaffe
import argparse
from collections import defaultdict
import importlib
import itertools
import logging.config
import numpy as np
import yaml

KBEST=5

np.random.seed(0)
apollocaffe.set_random_seed(0)

arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-c", "--config", dest="config", required=True,
                        help="model configuration file")
arg_parser.add_argument("-l", "--log-config", dest="log_config", 
                        default="config/log.yml", help="log configuration file")

@profile
def main():
    args = arg_parser.parse_args()
    config_name = args.config.split("/")[-1].split(".")[0]

    with open(args.log_config) as log_config_f:
        log_filename = "logs/%s.log" % config_name
        log_config = yaml.load(log_config_f)
Ejemplo n.º 19
0
                              config["net"])
    im.set_array(new_frame)
    output_video.write(new_frame)
    return im,


if __name__ == "__main__":
    # load config
    config = json.load(open("config.json", 'r'))
    data_mean = load_data_mean(config["data"]["idl_mean"],
                               config["net"]["img_width"],
                               config["net"]["img_height"],
                               image_scaling=1.0)

    # init apollocaffe
    apollocaffe.set_random_seed(config["solver"]["random_seed"])
    apollocaffe.set_device(0)

    # model and video source
    global src_list
    # src_list = [ ("./multi_scene_data/pre_data/video_640_480/second_carteen_02.mov",
    # 		[('./data/brainwash_800000.h5',False),
    # 		 ("./tmp/saved/second_carteen_3_1800.h5", True)]),
    # 	   ("./multi_scene_data/pre_data/video_640_480/laoximen.mov",
    # 		[('./data/brainwash_800000.h5',False),
    # 		 ("./tmp/saved/laoximen_3_1100.h5", True)]),
    # 	  ("./multi_scene_data/pre_data/video_640_480/tianmulu_03.mov",
    # 		[('./data/brainwash_800000.h5',False),
    # 		 ("./tmp/saved/tianmu_3_1400.h5", True)])]
    src_list = [
        ("./multi_scene_data/pre_data/video_640_480/second_carteen_02.mov",