def test_min_informative_str(): # evaluates a reference output to make sure the # min_informative_str function works as intended A = tensor.matrix(name="A") B = tensor.matrix(name="B") C = A + B C.name = "C" D = tensor.matrix(name="D") E = tensor.matrix(name="E") F = D + E G = C + F mis = min_informative_str(G).replace("\t", " ") reference = """A. Elemwise{add,no_inplace} B. C C. Elemwise{add,no_inplace} D. D E. E""" if mis != reference: print("--" + mis + "--") print("--" + reference + "--") assert mis == reference
def test_min_informative_str(): """ evaluates a reference output to make sure the min_informative_str function works as intended """ A = tensor.matrix(name='A') B = tensor.matrix(name='B') C = A + B C.name = 'C' D = tensor.matrix(name='D') E = tensor.matrix(name='E') F = D + E G = C + F mis = min_informative_str(G).replace("\t", " ") reference = """A. Elemwise{add,no_inplace} B. C C. Elemwise{add,no_inplace} D. D E. E""" if mis != reference: print('--' + mis + '--') print('--' + reference + '--') assert mis == reference
def test_min_informative_str(): """ evaluates a reference output to make sure the min_informative_str function works as intended """ A = tensor.matrix(name="A") B = tensor.matrix(name="B") C = A + B C.name = "C" D = tensor.matrix(name="D") E = tensor.matrix(name="E") F = D + E G = C + F mis = min_informative_str(G).replace("\t", " ") reference = """A. Elemwise{add,no_inplace} B. C C. Elemwise{add,no_inplace} D. D E. E""" if mis != reference: print "--" + mis + "--" print "--" + reference + "--" assert mis == reference
def handle_line(fgraph, line, i, node, fn): """ Records new node computation. Parameters ---------- line : string Line to record. For example, the function name or node name. i : integer Node number in the toposort order. node : Apply, The Apply node which created the entry. fn : Function, Function related to Apply node. """ try: self.record.handle_line(line) except MismatchError as e: print("Got this MismatchError:") print(e) print(f"while processing node i={i}:") print(f"str(node):{node}") print("Symbolic inputs: ") for elem in node.inputs: print(min_informative_str(elem)) print("str(output) of outputs: ") for elem in fn.outputs: assert isinstance(elem, list) (elem, ) = elem print(str(elem)) print(f"function name: {fgraph.name}") raise MismatchError("Non-determinism detected by WrapLinker")
orig_obj = orig_obj[field[1:-1]] elif field.startswith('.'): obj_name += '.' + field orig_obj = getattr(orig_obj,field[1:]) else: obj_name + '[' + field + ']' orig_obj = orig_obj[eval(field)] if id(orig_obj) in cycle_check: print "You're going in circles, "+obj_name+" is the same as "+cycle_check[id(orig_obj)] quit() cycle_check[id(orig_obj)] = obj_name print 'type of object: '+str(type(orig_obj)) print 'object: '+str(orig_obj) print 'object, longer description:\n'+min_informative_str(orig_obj, indent_level = 1) t1 = time.time() s = cPickle.dumps(orig_obj) t2 = time.time() prev_ts = t2 - t1 prev_bytes = len(s) print 'orig_obj bytes: \t\t\t\t'+str(prev_bytes) t1 = time.time() x = cPickle.loads(s) t2 = time.time() prev_t = t2 - t1 print 'orig load time: '+str(prev_t) print 'orig save time: '+str(prev_ts)
nodes = func.maker.fgraph.toposort() count = 0 for node in nodes: if str(type(node.op)).lower().find('hostfrom') != -1: count += 1 found = 0 for ipt in node.inputs: if ipt.owner is not None and str(type(ipt.owner.op)).lower().find('hostfrom') != -1: found += 1 try: print ipt.ndim,'dimensions' except: print 'no ndm' print min_informative_str(ipt) if found > 0: print type(node.op), found try: print '\t',type(node.op.scalar_op) except: pass print count """ i = 58 for key in mf1mod.hidden_layers[0].transformer.get_params(): func = function([Xb,yb,alpha], updates[key], on_unused_input = 'ignore')
nodes = func.maker.fgraph.toposort() count = 0 for node in nodes: if str(type(node.op)).lower().find('hostfrom') != -1: count += 1 found = 0 for ipt in node.inputs: if ipt.owner is not None and str(type( ipt.owner.op)).lower().find('hostfrom') != -1: found += 1 try: print ipt.ndim, 'dimensions' except: print 'no ndm' print min_informative_str(ipt) if found > 0: print type(node.op), found try: print '\t', type(node.op.scalar_op) except: pass print count test = CIFAR10(which_set='test', one_hot=True, gcn=55.) yl = T.argmax(yb, axis=1) mf1acc = 1. - T.neq(yl, T.argmax(ymf1, axis=1)).mean() #mfnacc = 1.-T.neq(yl , T.argmax(mfny,axis=1)).mean()
X = space.make_theano_batch() X.tag.test_value = space.get_origin_batch(m).astype(X.dtype) inputs = [X] history = model.mf(X, return_history=True) for elem in history: assert isinstance(elem, (list, tuple)) assert len(elem) == len(model.hidden_layers) outputs = [elem[-1] for elem in history] for elem in outputs: for value in get_debug_values(elem): if value.shape[0] != m: print 'culprint is',id(elem) print min_informative_str(elem) quit(-1) f = function(inputs, outputs) n_classes = model.hidden_layers[-1].n_classes if isinstance(n_classes, float): assert n_classes == int(n_classes) n_classes = int(n_classes) assert isinstance(n_classes, int) templates = np.zeros((n_classes, space.get_total_dimension())) for i in xrange(n_classes): for j in xrange(-1, -dataset.X.shape[0], -1): if dataset.y[j,i]: templates[i, :] = dataset.X[j, :]
do_one_V_update() do_one_optimized_H_update() cur_V = V cur_H = H for i in xrange(num_rounds): cur_V = update_V(cur_H) cur_H = update_H(cur_V) print 'Compiling unrolled theano' unrolled_theano = function([], updates={V: cur_V, H: cur_H}) from theano.printing import min_informative_str print min_informative_str(unrolled_theano.maker.env.outputs[0]) assert False def unrolled_loop(): init() unrolled_theano() print 'Timing python loop' run_timed_trial(python_loop) print 'Timing unrolled loop' run_timed_trial(unrolled_loop) print 'Timing optimized python loop'
elif field.startswith('.'): obj_name += '.' + field orig_obj = getattr(orig_obj, field[1:]) else: obj_name + '[' + field + ']' orig_obj = orig_obj[eval(field)] if id(orig_obj) in cycle_check: print("You're going in circles, " + obj_name + " is the same as " + cycle_check[id(orig_obj)]) quit() cycle_check[id(orig_obj)] = obj_name print('type of object: ' + str(type(orig_obj))) print('object: ' + str(orig_obj)) print('object, longer description:\n' + min_informative_str(orig_obj, indent_level=1)) t1 = time.time() s = cPickle.dumps(orig_obj, hp) t2 = time.time() prev_ts = t2 - t1 prev_bytes = len(s) print('orig_obj bytes: \t\t\t\t' + str(prev_bytes)) t1 = time.time() x = cPickle.loads(s) t2 = time.time() prev_t = t2 - t1 print('orig load time: ' + str(prev_t)) print('orig save time: ' + str(prev_ts))
for i in xrange(num_rounds): do_one_V_update() do_one_optimized_H_update() cur_V = V cur_H = H for i in xrange(num_rounds): cur_V = update_V(cur_H) cur_H = update_H(cur_V) print 'Compiling unrolled theano' unrolled_theano = function([], updates = { V : cur_V, H : cur_H } ) from theano.printing import min_informative_str print min_informative_str(unrolled_theano.maker.env.outputs[0]) assert False def unrolled_loop(): init() unrolled_theano() print 'Timing python loop' run_timed_trial(python_loop) print 'Timing unrolled loop' run_timed_trial(unrolled_loop) print 'Timing optimized python loop' run_timed_trial(optimized_python_loop)
X = space.make_theano_batch() X.tag.test_value = space.get_origin_batch(m).astype(X.dtype) inputs = [X] history = model.mf(X, return_history=True) for elem in history: assert isinstance(elem, (list, tuple)) assert len(elem) == len(model.hidden_layers) outputs = [elem[-1] for elem in history] for elem in outputs: for value in get_debug_values(elem): if value.shape[0] != m: print 'culprint is', id(elem) print min_informative_str(elem) quit(-1) f = function(inputs, outputs) n_classes = model.hidden_layers[-1].n_classes if isinstance(n_classes, float): assert n_classes == int(n_classes) n_classes = int(n_classes) assert isinstance(n_classes, int) templates = np.zeros((n_classes, space.get_total_dimension())) for i in xrange(n_classes): for j in xrange(-1, -dataset.X.shape[0], -1): if dataset.y[j, i]: templates[i, :] = dataset.X[j, :]