def timer(inputfile, trials, datalength): # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() #print "bp1" nd.load(inputfile) #print "bp2" skel.load(inputfile) #print "bp3" # msg = "%d, %d" % (asizeof(nd), asizeof(skel)) # print >>op, msg # topologically order graphskeleton skel.toporder() # load bayesian network bn = DiscreteBayesianNetwork(skel, nd) # instantiate pgm learner l = PGMLearner() # free unused memory del nd #sum1 = summary.summarize(muppy.get_objects()) #summary.print_(sum1) # TIME totaltime = 0 for _ in range(trials): data = bn.randomsample(datalength) start = time.clock() ret = l.discrete_mle_estimateparams(skel, data) elapsed = time.clock() - start totaltime += elapsed totaltime /= trials print json.dumps(ret.Vdata, indent=1) return totaltime
def timer(inputfile, trials): # load nodedata and graphskeleton nd = NodeData() skel = GraphSkeleton() nd.load(inputfile) skel.load(inputfile) # topologically order graphskeleton skel.toporder() # load bayesian network bn = DiscreteBayesianNetwork(skel, nd) # TIME totaltime = 0 for _ in range(trials): start = time.clock() ret = bn.randomsample(100) elapsed = time.clock() - start totaltime += elapsed totaltime /= trials return totaltime