def check_on_previous(): """ Test a hypothesis on all previous examples """ t = gl.current_hypothesis new = t['new'] specialized = t['specialized'] retained = ['retained'] x = utils.merge_all_lists([new, specialized, retained]) cwd = os.getcwd() testpath = os.path.join(os.path.dirname(cwd), 'runtime', 'test_file.lp') testfile = open(testpath, 'w') for y in x: testfile.write(y.as_string_with_var_types) testfile.write('\n\n') testfile.write('#hide.') testfile.close() import asp allok = False for i in gl.seen_examples: utils.get_example(i) if not asp.test_hypothesis(debug=True): print('unsatisfiable at example %s') % (str(i)) allok = True if allok: print('Hypothesis consistent with all past examples') os.remove(testfile)
def check_on_previous(): """ Test a hypothesis on all previous examples """ t = gl.current_hypothesis new = t['new'] specialized = t['specialized'] retained = ['retained'] x = utils.merge_all_lists([new,specialized,retained]) cwd = os.getcwd() testpath = os.path.join(os.path.dirname(cwd),'runtime','test_file.lp') testfile = open(testpath, 'w') for y in x: testfile.write(y.as_string_with_var_types) testfile.write('\n\n') testfile.write('#hide.') testfile.close() import asp allok = False for i in gl.seen_examples: utils.get_example(i) if not asp.test_hypothesis(debug=True): print('unsatisfiable at example %s')%(str(i)) allok = True if allok: print('Hypothesis consistent with all past examples') os.remove(testfile)
def test_default(**kwargs): import utils result = None testfile = test_theory options = [clingo,bk,ex,testfile,exmpl_constr,'0 --asp09'] command = ' '.join(options) out = os.popen(command).read().split('. ') if out[0].strip() == 'UNSATISFIABLE': result = False else: out = set([x.strip() for x in out if not x.strip() == '']) out = filter(lambda x: 'OPTIMUM FOUND' not in x,out) if list(out) == [] : result = True elif all('posCovered' in x or 'negsCovered' in x for x in out): (_,_,score) = utils.get_score(out) if score == gl.current_example_object.positive_count: return True else: return False else: raise excps.HypothesisTestingException('ASP reasoner returned %s'%(' '.join(out)),gl.logger) if 'last_seen' in kwargs: if result: print(gl.current_example,'ok') else: print(gl.current_example,'Not ok!') i = kwargs['last_seen'] utils.get_example(i) return result
def test_all(last_example): import utils all_,unsat = True,[] for i in gl.seen_examples: utils.get_example(i) test = test_hypothesis if not test: all_ = False unsat.append(i) utils.get_example(last_example) # clean up if all_: print('Hypothesis ok with all seen examples') else: print('Not ok with %s')%(','.join([str(x) for x in unsat])) return (all_,unsat)
def get_questions(self): questions = [] for d in self.data_dicts: relation = d['relation'] coll = d['collection'] prop = d['property'] scale = self.prop_info[prop][0]['scale'] coll_rel = (coll, relation) if prop.startswith('made_of'): coll = 'parts_material' prop = prop.lstrip('made_of_') scale = self.prop_info[prop][0]['scale'] if scale == 'T': coll = coll+'_scale' coll_rel = (coll, relation) if coll_rel in self.question_templates: q_d = dict() q_d.update(d) qu_temp = self.question_templates[coll_rel] # print(prop) # print(self.prop_info[prop]) cat = self.prop_info[prop][0]['category'] q_d['quid'] = d['quid'] q_d['relation'] = relation q_d['question'] = utils.create_question(prop, d['concept'], qu_temp, cat) examples = self.relation_examples_dict[relation][coll] ex_dict = utils.get_example(examples, qu_temp, self.prop_info, relation) if 'collection' in ex_dict: ex_dict.pop('collection') q_d.update(ex_dict) # print(q_d['collection']) # print('---') questions.append(q_d) return questions
def display_example(word): example = get_example(word) window['output'].print("Word: " + word) if example: window['output'].print("Example: ", example) else: display_error("Meaning Not Found")
def test_hypothesis(**kwargs): """ Test a hypothesis on examples. How to call: - If called with no kwargs it simply checks a hypothesis on gl.current_example (normal mode) - If called with kwargs = {example:i}, where i is an integer then its gets example i from hte database, performs check and cleans up, i.e it restores gl.current_example (debugging mode) - If called with kwargs = {example:'all'} it checks a hypothesis on all seen examples and cleans up afterwards. (debugging mode) """ import utils current_example = gl.current_example # remember to clean up later if kwargs == {}: # default return test_default() else: if 'example' in kwargs: # perform a test with a specific example (for debugging) i = kwargs['example'] if utils.isint(i): utils.get_example(i) return test_default(last_seen = current_example) else: # then we want to check all seen examples for correctness (argument = 'all') return test_all(current_example)
def main(): args = utils.get_args() print("Prepare dataset...") mnist = input_data.read_data_sets("mnist/", one_hot = True) with tf.Graph().as_default(), tf.Session() as session: autoencoder = Autoencoder( 784, args.hid_shape, args.lat_shape, optimizer = tf.train.AdagradOptimizer(args.lr), batch_size = args.batch_size, dropout = args.dropout) session.run(tf.initialize_all_variables()) if args.save_model or args.load_model: saver = tf.train.Saver() if args.load_model: try: saver.restore(session, utils.SAVER_FILE) except ValueError: print("Cant find model file") sys.exit(1) if args.make_imgs: index = 0 print("Prepare images directory...") utils.prepare_image_folder() example = utils.get_example(args.digit, mnist.test) print("Start training...") for epoch in range(args.epoches): for i, batch in enumerate(utils.gen_data(args.batch_size, mnist.train.images)): autoencoder.fit_on_batch(session, batch) if (i+1) % args.log_after == 0: test_cost = autoencoder.evaluate(session, mnist.test.images) print("Test error = {0:.4f} on {1} batch in {2} epoch".format(test_cost, i+1, epoch+1)) if args.make_imgs: path = os.path.join(utils.IMG_FOLDER, "{0:03}.png".format(index)) autoencoded = autoencoder.encode_decode(session, example.reshape(1, 784)) utils.save_image(autoencoded.reshape((28, 28)), path) index += 1 if args.save_model: saver.save(session, utils.SAVER_FILE) print("Model saved")
def get_questions(self): questions = [] for d in self.data_dicts: certainty = d['certainty'] label = d['label'] levels = utils.get_levels(label, certainty, self.restrict) for l in levels: relations = self.level_dict[l] for rel in relations: coll = d['collection'] prop = d['property'] if prop.startswith('made_of'): coll = 'parts_material' prop = prop.lstrip('made_of_') scale = self.prop_info[prop][0]['scale'] if scale == 'T': coll = coll+'_scale' coll_rel = (coll, rel) if coll_rel in self.question_templates: q_d = dict() q_d.update(d) qu_temp = self.question_templates[coll_rel] # print(prop) # print(self.prop_info[prop]) cat = self.prop_info[prop][0]['category'] q_d['quid'] = uuid.uuid4() q_d['relation'] = rel q_d['question'] = utils.create_question(prop, d['concept'], qu_temp, cat) examples = self.relation_examples_dict[rel][coll] ex_dict = utils.get_example(examples, qu_temp, self.prop_info, rel) if 'collection' in ex_dict: ex_dict.pop('collection') q_d.update(ex_dict) # print(q_d['collection']) # print('---') questions.append(q_d) # else: # print('not found:,', coll_rel) return questions
def learn(**kwargs): u.clear_prior() found_new_clauses = False hs,scs,cls,incsolve = False,False,False,False (step,up_to_exmpl) = utils.get_time_step(gl) time_interval = (i for i in range(0,up_to_exmpl*step+1,step) ) (newclauses,retained,specialized) = ([],[],[]) if 'heuristic_search' in kwargs and kwargs['heuristic_search'] : hs = True if 'set_cover_search' in kwargs and kwargs['set_cover_search'] : scs = True if 'clause_level_search' in kwargs and kwargs['clause_level_search'] : cls = True if 'incremental_solve' in kwargs and kwargs['incremental_solve'] : incsolve = True u.post_exmpl_constraints(heuristic_search=hs,set_cover_search=scs,clause_level_search=cls) """ for i in range(1,600): if utils.get_example(i): gl.current_example = i #if not asp.test_hypothesis(): print(i) asp.show_negs() print('here') """ #for i in time_interval: #for i in range(1,100000): for i in range(1,1000000): #for i in (110,120): #i = 250 #print(i) if i == 1672: stop = 'stop' if utils.get_example(i): gl.current_example = i if not asp.test_hypothesis(): print(i) print('revising') (newclauses,retained,specialized) = \ u.revise(is_new_example=True, debug=False, newcl=newclauses, refcl=specialized, retcl=retained,heuristic_search=hs, set_cover_search=scs, clause_level_search=cls, incremental_solve=incsolve) found_new_clauses = newclauses != [] else: print(i) print('correct') if gl.current_example_object.has_positives: u.update_support([newclauses, retained, specialized], simple_update=True) if found_new_clauses: for j in gl.seen_examples: utils.get_example(j) if not asp.test_hypothesis(): (newclauses,retained,specialized) = \ u.revise(is_new_example=False, debug=False, newcl=newclauses, refcl=specialized, retcl=retained,heuristic_search=hs, set_cover_search=scs, clause_level_search=cls, incremental_solve=incsolve) else: pass u.tideup(newclauses,retained,specialized, kernel='clear',delta='clear', usedict='clear',updatexmpl=i) found_new_clauses = False
def learn(**kwargs): u.clear_prior() found_new_clauses = False hs, scs, cls, incsolve = False, False, False, False (step, up_to_exmpl) = utils.get_time_step(gl) time_interval = (i for i in range(0, up_to_exmpl * step + 1, step)) (newclauses, retained, specialized) = ([], [], []) if 'heuristic_search' in kwargs and kwargs['heuristic_search']: hs = True if 'set_cover_search' in kwargs and kwargs['set_cover_search']: scs = True if 'clause_level_search' in kwargs and kwargs['clause_level_search']: cls = True if 'incremental_solve' in kwargs and kwargs['incremental_solve']: incsolve = True u.post_exmpl_constraints(heuristic_search=hs, set_cover_search=scs, clause_level_search=cls) """ for i in range(1,600): if utils.get_example(i): gl.current_example = i #if not asp.test_hypothesis(): print(i) asp.show_negs() print('here') """ #for i in time_interval: #for i in range(1,100000): for i in range(1, 1000000): #for i in (110,120): #i = 250 #print(i) if i == 1672: stop = 'stop' if utils.get_example(i): gl.current_example = i if not asp.test_hypothesis(): print(i) print('revising') (newclauses,retained,specialized) = \ u.revise(is_new_example=True, debug=False, newcl=newclauses, refcl=specialized, retcl=retained,heuristic_search=hs, set_cover_search=scs, clause_level_search=cls, incremental_solve=incsolve) found_new_clauses = newclauses != [] else: print(i) print('correct') if gl.current_example_object.has_positives: u.update_support([newclauses, retained, specialized], simple_update=True) if found_new_clauses: for j in gl.seen_examples: utils.get_example(j) if not asp.test_hypothesis(): (newclauses,retained,specialized) = \ u.revise(is_new_example=False, debug=False, newcl=newclauses, refcl=specialized, retcl=retained,heuristic_search=hs, set_cover_search=scs, clause_level_search=cls, incremental_solve=incsolve) else: pass u.tideup(newclauses, retained, specialized, kernel='clear', delta='clear', usedict='clear', updatexmpl=i) found_new_clauses = False
def train_net(self): """ trains the restoration net """ net = self.build_net() print("### net created successfully ###") # cost function cost_average_pixels = tf.reduce_mean(tf.square(self.label - self.out)) cost_similar_filters = tf.reduce_mean( tf.square( tf.stack(self.gradients) - tf.reduce_mean(tf.stack(self.gradients)))) cost = cost_average_pixels + self.alpha * cost_similar_filters optimizer = tf.train.AdamOptimizer( learning_rate=self.learning_rate).minimize(cost) saver = tf.train.Saver() loss_graph = [] loss_a = [] loss_b = [] loss_g = [] with tf.Session() as sess: print("### initializing variables ###") sess.run(tf.global_variables_initializer()) gen = utils.get_example(self.path, self.num_images) print("### strating the training ###") for iter_index in range(self.num_epochs): for index in range(self.num_images): (x, y) = next(gen) fd = {} for i in range(len(x[0])): fd[self.convs[i]] = x[0][i] for i in range(len(x[1])): fd[self.pool_dims[i]] = x[1][i] fd[self.label] = y _, c, c_a, c_b, c_g = sess.run([ optimizer, cost, cost_average_pixels, cost_similar_filters, self.gradients ], feed_dict=fd) loss_graph.append(c) loss_a.append(c_a) loss_b.append(c_b) loss_g.append(c_g) if iter_index % 50 == 0 or iter_index == self.num_epochs - 1: print("Finished iteration " + str(iter_index + 1) + " out of " + str(self.num_epochs) + " with loss: " + str(c)) print("pixelwise: %d", c_a) print("gradients: %d", c_b) print(c_g) print("### finished training ###") save_path = saver.save(sess, self.model_location) print("Model saved in file: %s" % save_path) np.save("stats/loss_total.npy", loss_graph) np.save("stats/loss_a.npy", loss_a) np.save("stats/loss_b.npy", loss_b) np.save("stats/loss_g.npy", loss_g)