#!/usr/bin/env python from nltk.corpus import wordnet as wn from imagenet import ImageNet CLASSES = '../../Classes/classes.txt' OUTFILE = 'definitions.txt' inet = ImageNet(CLASSES) with open(OUTFILE, 'w') as f: for i in range(0, 1000): w = inet.name(i) w_ = w.replace(' ', '_') ss = wn.synsets(w_) print w if not ss: print "ERR: %d %s not found" % (i, w) for s in ss: f.write(w_ + " " + s.definition() + "\n")
outfile = os.path.join(OUTDIR, "twitter1.txt") start_targets = random.sample(range(0, 1000), NSTART) conffile = os.path.join(OUTDIR, 'conf0.json') neuralgia.write_config(start_targets, conffile) subprocess.call(["./neuralgae_draw.sh", OUTDIR, 'image0', conffile, SIZE, SCALE, BLEND]) #lastimage = "./Neuralgia/Chapter5/image199.jpg" lastimage = os.path.join(OUTDIR, 'image0.jpg') imagen = ImageNet("./Classes/classes.txt") classes = ', '.join([imagen.name(c) for c in start_targets]) with open(outfile, 'w') as f: f.write("%s: %s\n" % (lastimage, classes)) for i in range(0,20): jsonfile = os.path.join(OUTDIR, ("conf%d.json" % i)) subprocess.call(["./classify.py", str(NTWEEN), str(NSAMPLE), lastimage, jsonfile]) subprocess.call(["./neuralgae_draw.sh", OUTDIR, "image%d" % i, jsonfile, SIZE, SCALE, BLEND]) lastimage = os.path.join(OUTDIR, ("image%d.jpg" % i)) t = neuralgia.read_config(jsonfile) if t: print t
net.blobs['data'].data[...] = transformer.preprocess('data', caffe.io.load_image(args.image)) print "About to predict..." out = net.forward() #print out.keys() #p = out['prob'].argmax() #print "Image {}".format(p) targets = [] nclasses = int(args.n) + 1 top_k = net.blobs['prob'].data[0].flatten().argsort() for i in top_k[-1:-nclasses:-1]: print classes.name(i) targets.append(i) s = int(args.sample) n = int(args.n) if s < n: print "Sampling %d of %d" % (s, n) targets = random.sample(targets, s) print "Sample: " + ', '.join([classes.name(i) for i in targets]) neuralgia.write_config(targets, args.output)
net.blobs['data'].data[...] = transformer.preprocess( 'data', caffe.io.load_image(args.image)) print "About to predict..." out = net.forward() #print out.keys() #p = out['prob'].argmax() #print "Image {}".format(p) targets = [] nclasses = int(args.n) + 1 top_k = net.blobs['prob'].data[0].flatten().argsort() for i in top_k[-1:-nclasses:-1]: print classes.name(i) targets.append(i) s = int(args.sample) n = int(args.n) if s < n: print "Sampling %d of %d" % (s, n) targets = random.sample(targets, s) print "Sample: " + ', '.join([classes.name(i) for i in targets]) neuralgia.write_config(targets, args.output)
outfile = os.path.join(OUTDIR, "neuralgia6.txt") start_targets = random.sample(range(0, 1000), NSTART) #conffile = os.path.join(OUTDIR, 'conf0.json') #neuralgia.write_config(start_targets, conffile) #subprocess.call(["./neuralgia.sh", OUTDIR, 'image0', conffile, SIZE, SCALE, BLEND]) lastimage = "./Neuralgia/Chapter5/image199.jpg" imagen = ImageNet("./Classes/classes.txt") classes = ', '.join([imagen.name(c) for c in start_targets]) with open(outfile, 'w') as f: f.write("%s: %s\n" % (lastimage, classes)) for i in range(0,1): jsonfile = "./Neuralgia/conf%d.json" % i subprocess.call(["./classify.py", str(NTWEEN), str(NSAMPLE), lastimage, jsonfile]) subprocess.call(["./neuralgia.sh", OUTDIR, "image%d" % i, jsonfile, SIZE, SCALE, BLEND]) lastimage = "./Neuralgia/image%d.jpg" % i t = neuralgia.read_config(jsonfile) if t: print t