def ExperienceAnalysis(self,cn,cnet): ''' calculate the experience for the node named in cn ''' cnex = experience() # get a list of the parents of the node testnode = self.GetNodeNamed(cn,cnet) #start a list for the cartesian sum of node states allstates = list() cparents = self.GetNodeParents(testnode) numnodes = self.LengthNodeList(cparents) for cp in np.arange(numnodes): # append the name to the list of returned names cnode = self.NthNode(cparents,cp) cnex.parent_names.append(cth.c_char_p2str(self.GetNodeName(cnode))) # find the number of states for each parent allstates.append(np.arange(self.GetNodeNumberStates( self.NthNode(cparents,cp)))) if numnodes > 1: cnex.parent_states = self.cartesian(allstates) else: cnex.parent_states = allstates for cs in cnex.parent_states: cnex.node_experience.append(self.GetnodeExperience( testnode,cs.ctypes.data_as(ct.POINTER(ct.c_int)))) cnex.node_experience = np.array(cnex.node_experience) # change the null pointers (meaning cnex.node_experience[cnex.node_experience<1]=0.0 return cnex
def deleteExtraNodes(self, netName, netOut, keepNodes): ### removes all nodes that aren't given in input list # open the net stored in netName cnet = self.pyt.OpenNeticaNet(netName) #get the nodes and their number allnodes = self.pyt.GetNetNodes(cnet) numnodes = self.pyt.LengthNodeList(allnodes) # loop over all nodes and delete those that arent needed for ii in np.arange(numnodes): cnode = self.pyt.NthNode(allnodes, ct.c_int(ii)) cname = cth.c_char_p2str(self.pyt.GetNodeName(cnode)) # cross check the name if not cname in keepNodes: self.pyt.DeleteNode(cnode) #store the net outfile_streamer = self.pyt.NewFileStreamer( ct.c_char_p(netOut.encode())) self.pyt.CompileNet(cnet) vprint(3, self.pyt.verboselvl, "Writing new net with correct nodes to: {0:s}".format(netOut)) self.pyt.WriteNet(cnet, outfile_streamer) self.pyt.DeleteStream(outfile_streamer) self.pyt.DeleteNet(cnet)
def ExperienceAnalysis(self, cn, cnet): ''' calculate the experience for the node named in cn ''' cnex = experience() # get a list of the parents of the node testnode = self.GetNodeNamed(ct.c_char_p(cn.encode()), cnet) #start a list for the cartesian sum of node states allstates = list() cparents = self.GetNodeParents(testnode) numnodes = self.LengthNodeList(cparents) for cp in np.arange(numnodes): # append the name to the list of returned names cnode = self.NthNode(cparents, ct.c_int(cp)) cnex.parent_names.append(cth.c_char_p2str(self.GetNodeName(cnode))) # find the number of states for each parent allstates.append( np.arange( self.GetNodeNumberStates( self.NthNode(cparents, ct.c_int(cp))))) if numnodes > 1: cnex.parent_states = self.cartesian(allstates) else: cnex.parent_states = allstates for cs in cnex.parent_states: cnex.node_experience.append( self.GetNodeExperience(testnode, cs.ctypes.data_as(ct.POINTER( ct.c_int)))) cnex.node_experience = np.array(cnex.node_experience) # change the null pointers (meaning cnex.node_experience[cnex.node_experience < 1] = 0.0 return cnex
def NodeParentIndexing(self, netName, casfile): ''' Find all the configurations of states in the parent nodes for each response node This is used only for ''' # open the net stored in netName cnet = self.pyt.OpenNeticaNet(netName) #get the nodes and their number allnodes = self.pyt.GetNetNodes(cnet) numnodes = self.pyt.LengthNodeList(allnodes) #parent indices dictionary for the results parent_indices = dict() # now focus in on the response nodes only respnodes = self.probpars.scenario.response for cr in respnodes: parent_indices[cr] = parent_inds() crespnode = self.pyt.GetNodeNamed(ct.c_char_p(cr.encode()), cnet) # get the parent nodes and their names cparents = self.pyt.GetNodeParents(crespnode) numparents = self.pyt.LengthNodeList(cparents) for cp in np.arange(numparents): tmpnode = self.pyt.NthNode(cparents, ct.c_int(cp)) parent_indices[cr].parent_names.append( cth.c_char_p2str(self.pyt.GetNodeName(tmpnode))) # open a streamer to the CAS file we will read over cas_streamer = self.pyt.NewFileStreamer(ct.c_char_p(casfile.encode())) # loop over the cases for ccas in np.arange(self.N): if ccas == 0: case_posn = pnC.netica_const.FIRST_CASE else: case_posn = pnC.netica_const.NEXT_CASE # first set the findings according to what's in the case file # case_posn_out = self.pyt.ReadNetFindings2(case_posn, cas_streamer, allnodes) # now, for each parent, in order, read the states for cr in respnodes: tmpinds = list() for cp in parent_indices[cr].parent_names: cnode = self.pyt.GetNodeNamed(ct.c_char_p(cp.encode()), cnet) tmpinds.append(self.pyt.GetNodeFinding(cnode)) parent_indices[cr].parent_indices.append(tmpinds) for cr in respnodes: vprint(3, self.pyt.verboselvl, 'making into an array --> %s' % (cr)) parent_indices[cr].parent_indices = np.array( parent_indices[cr].parent_indices, dtype=int) # clean up the temporary streamer and net self.pyt.DeleteNet(cnet) self.pyt.DeleteStream(cas_streamer) self.parent_inds = parent_indices
def ReadNodeInfo(self, netName): ''' Read in all information on beliefs, states, and likelihoods for all nodes in the net called netName ''' # open the net stored in netName cnet = self.OpenNeticaNet(netName) #get the nodes and their number allnodes = self.GetNetNodes(cnet) numnodes = self.LengthNodeList(allnodes) vprint(3, self.verboselvl, 'Reading Node information from net --> {0:s}'.format(netName)) cNETNODES = dict() # loop over the nodes for cn in np.arange(numnodes): cnode = self.NthNode(allnodes, ct.c_int(cn)) cnodename = cth.c_char_p2str(self.GetNodeName(cnode)) cNETNODES[cnodename] = nodestruct() cNETNODES[cnodename].name = cth.c_char_p2str( self.GetNodeName(cnode)) cNETNODES[cnodename].title = cth.c_char_p2str( self.GetNodeTitle(cnode)) vprint(3, self.verboselvl, ' Parsing node --> %s' % (cNETNODES[cnodename].title)) cNETNODES[cnodename].Nbeliefs = self.GetNodeNumberStates(cnode) cNETNODES[cnodename].beliefs = cth.c_float_p2float( self.GetNodeBeliefs(cnode), cNETNODES[cnodename].Nbeliefs) cNETNODES[cnodename].likelihood = cth.c_float_p2float( self.GetNodeLikelihood(cnode), cNETNODES[cnodename].Nbeliefs) cNETNODES[cnodename].levels = cth.c_double_p2float( self.GetNodeLevels(cnode), cNETNODES[cnodename].Nbeliefs + 1) # loop over the states in each node for cs in range(cNETNODES[cnodename].Nbeliefs): cNETNODES[cnodename].state.append(statestruct()) cNETNODES[cnodename].state[-1].name = cth.c_char_p2str( self.GetNodeStateName(cnode, ct.c_int(cs))) self.DeleteNet(cnet) return cNETNODES
def NodeParentIndexing(self, netName, casfile): ''' Find all the configurations of states in the parent nodes for each response node This is used only for ''' # open the net stored in netName cnet = self.pyt.OpenNeticaNet(netName) #get the nodes and their number allnodes = self.pyt.GetNetNodes(cnet) numnodes = self.pyt.LengthNodeList(allnodes) #parent indices dictionary for the results parent_indices = dict() # now focus in on the response nodes only respnodes = self.probpars.scenario.response for cr in respnodes: parent_indices[cr] = parent_inds() crespnode = self.pyt.GetNodeNamed(cr, cnet) # get the parent nodes and their names cparents = self.pyt.GetNodeParents(crespnode) numparents = self.pyt.LengthNodeList(cparents) for cp in np.arange(numparents): tmpnode = self.pyt.NthNode(cparents, cp) parent_indices[cr].parent_names.append( cth.c_char_p2str(self.pyt.GetNodeName(tmpnode))) # open a streamer to the CAS file we will read over cas_streamer = self.pyt.NewFileStreamer(casfile) # loop over the cases for ccas in np.arange(self.N): if ccas == 0: case_posn = pnC.netica_const.FIRST_CASE else: case_posn = pnC.netica_const.NEXT_CASE # first set the findings according to what's in the case file case_posn_out = self.pyt.ReadNetFindings2(case_posn, cas_streamer, allnodes) # now, for each parent, in order, read the states for cr in respnodes: tmpinds = list() for cp in parent_indices[cr].parent_names: cnode = self.pyt.GetNodeNamed(cp,cnet) tmpinds.append(self.pyt.GetNodeFinding(cnode)) parent_indices[cr].parent_indices.append(tmpinds) for cr in respnodes: print 'making into an array --> %s' %(cr) parent_indices[cr].parent_indices = np.array( parent_indices[cr].parent_indices, dtype=int) # clean up the temporary streamer and net self.pyt.DeleteNet(cnet) self.pyt.DeleteStream(cas_streamer) self.parent_inds = parent_indices
def ReadNodeInfo(self, netName): ''' Read in all information on beliefs, states, and likelihoods for all nodes in the net called netName ''' # open the net stored in netName cnet = self.OpenNeticaNet(netName) #get the nodes and their number allnodes = self.GetNetNodes(cnet) numnodes = self.LengthNodeList(allnodes) print 'Reading Node information from net --> {0:s}'.format(netName) cNETNODES = dict() # loop over the nodes for cn in np.arange(numnodes): cnode = self.NthNode(allnodes, cn) cnodename = cth.c_char_p2str(self.GetNodeName(cnode)) cNETNODES[cnodename] = nodestruct() cNETNODES[cnodename].name = cth.c_char_p2str(self.GetNodeName(cnode)) cNETNODES[cnodename].title = cth.c_char_p2str(self.GetNodeTitle(cnode)) print ' Parsing node --> %s' %(cNETNODES[cnodename].title) cNETNODES[cnodename].Nbeliefs = self.GetNodeNumberStates(cnode) cNETNODES[cnodename].beliefs = cth.c_float_p2float( self.GetNodeBeliefs(cnode), cNETNODES[cnodename].Nbeliefs) cNETNODES[cnodename].likelihood = cth.c_float_p2float( self.GetNodeLikelihood(cnode), cNETNODES[cnodename].Nbeliefs) cNETNODES[cnodename].levels = cth.c_double_p2float( self.GetNodeLevels(cnode), cNETNODES[cnodename].Nbeliefs + 1) # loop over the states in each node for cs in range(cNETNODES[cnodename].Nbeliefs): cNETNODES[cnodename].state.append(statestruct()) cNETNODES[cnodename].state[-1].name = cth.c_char_p2str( self.GetNodeStateName(cnode,cs)) self.DeleteNet(cnet) return cNETNODES
def predictBayes(self, netName, N, casdata): ''' netName --> name of the built net to make predictions on ''' # first read in the information about a Net's nodes cNETNODES = self.pyt.ReadNodeInfo(netName) ''' Initialize output ''' # initialize dictionary of predictions objects cpred = dict() print "Making predictions for net named --> {0:s}".format(netName) cnet = self.pyt.OpenNeticaNet(netName) #retract all the findings self.pyt.RetractNetFindings(cnet) for CN in cNETNODES: CNODES = cNETNODES[CN] Cname = CNODES.name if Cname in self.probpars.scenario.response: cpred[Cname] = predictions() cpred[Cname].stats = pred_stats() Nbins = CNODES.Nbeliefs cpred[Cname].pdf = np.zeros((N, Nbins)) cpred[Cname].ranges = np.array(CNODES.levels) # get plottable ranges if Nbins < len(CNODES.levels): # continuous, so plot bin centers CNODES.continuous = True cpred[Cname].continuous = True cpred[Cname].rangesplt = (cpred[Cname].ranges[1:] - 0.5*np.diff(cpred[Cname].ranges)) else: #discrete so just use the bin values cpred[Cname].rangesplt = cpred[Cname].ranges.copy() cpred[Cname].priorPDF = CNODES.beliefs allnodes = self.pyt.GetNetNodes(cnet) numnodes = self.pyt.LengthNodeList(allnodes) # # Now loop over each input and get the Netica predictions # for i in np.arange(N): sys.stdout.write('predicting value {0} of {1}\r'.format(i,N)) sys.stdout.flush() # first have to enter the values for each node # retract all the findings again self.pyt.RetractNetFindings(cnet) for cn in np.arange(numnodes): cnode = self.pyt.NthNode(allnodes, cn) cnodename = cth.c_char_p2str(self.pyt.GetNodeName(cnode)) # set the current node values if cnodename in self.probpars.scenario.nodesIn: self.pyt.EnterNodeValue(cnode, casdata[cnodename][i]) for cn in np.arange(numnodes): # obtain the updated beliefs from ALL nodes including input and output cnode = self.pyt.NthNode(allnodes, cn) cnodename = cth.c_char_p2str(self.pyt.GetNodeName(cnode)) if cnodename in self.probpars.scenario.response: # get the current belief values cpred[cnodename].pdf[i, :] = cth.c_float_p2float( self.pyt.GetNodeBeliefs(cnode), self.pyt.GetNodeNumberStates(cnode)) # # Do some postprocessing for just the output nodes # currstds = np.ones((N, 1))*1.0e-16 for i in self.probpars.scenario.response: print 'postprocessing output node --> {0:s}'.format(i) # record whether the node is continuous or discrete if cpred[i].continuous: curr_continuous = 'continuous' else: curr_continuous = 'discrete' pdfRanges = cpred[i].ranges cpred[i].z = np.atleast_2d(casdata[i]).T pdfParam = np.hstack((cpred[i].z, currstds)) pdfData = statfuns.makeInputPdf(pdfRanges, pdfParam, 'norm', curr_continuous) cpred[i].probModelUpdate = np.nansum(pdfData * cpred[i].pdf, 1) cpred[i].probModelPrior = np.nansum(pdfData * np.tile(cpred[i].priorPDF, (N, 1)), 1) cpred[i].logLikelihoodRatio = (np.log10(cpred[i].probModelUpdate + np.spacing(1)) - np.log10(cpred[i].probModelPrior + np.spacing(1))) cpred[i].dataPDF = pdfData.copy() # note --> np.spacing(1) is like eps in MATLAB # get the PDF stats here print 'getting stats' cpred = self.PDF2Stats(i, cpred, alpha=0.1) self.pyt.DeleteNet(cnet) return cpred, cNETNODES
def predictBayes(self, netName, N, casdata): ''' netName --> name of the built net to make predictions on ''' # first read in the information about a Net's nodes cNETNODES = self.pyt.ReadNodeInfo(netName) ''' Initialize output ''' # initialize dictionary of predictions objects cpred = dict() vprint(3, self.pyt.verboselvl, 'Making predictions for net named --> {0:s}'.format(netName)) cnet = self.pyt.OpenNeticaNet(netName) #retract all the findings self.pyt.RetractNetFindings(cnet) for CN in cNETNODES: CNODES = cNETNODES[CN] Cname = CNODES.name if Cname in self.probpars.scenario.response: cpred[Cname] = predictions() cpred[Cname].stats = pred_stats() Nbins = CNODES.Nbeliefs cpred[Cname].pdf = np.zeros((N, Nbins)) cpred[Cname].ranges = np.array(CNODES.levels) # get plottable ranges if Nbins < len(CNODES.levels): # continuous, so plot bin centers CNODES.continuous = True cpred[Cname].continuous = True cpred[Cname].rangesplt = ( cpred[Cname].ranges[1:] - 0.5 * np.diff(cpred[Cname].ranges)) else: #discrete so just use the bin values cpred[Cname].rangesplt = cpred[Cname].ranges.copy() cpred[Cname].priorPDF = CNODES.beliefs allnodes = self.pyt.GetNetNodes(cnet) numnodes = self.pyt.LengthNodeList(allnodes) # # Now loop over each input and get the Netica predictions # for i in np.arange(N): if self.pyt.verboselvl >= 3: sys.stdout.write('predicting value {0} of {1}\r'.format(i, N)) sys.stdout.flush() # first have to enter the values for each node # retract all the findings again self.pyt.RetractNetFindings(cnet) for cn in np.arange(numnodes): cnode = self.pyt.NthNode(allnodes, ct.c_int(cn)) cnodename = cth.c_char_p2str(self.pyt.GetNodeName(cnode)) # set the current node values if cnodename in self.probpars.scenario.nodesIn: self.pyt.EnterNodeValue(cnode, ct.c_double(casdata[cnodename][i])) for cn in np.arange(numnodes): # obtain the updated beliefs from ALL nodes including input and output cnode = self.pyt.NthNode(allnodes, ct.c_int(cn)) cnodename = cth.c_char_p2str(self.pyt.GetNodeName(cnode)) if cnodename in self.probpars.scenario.response: # get the current belief values cpred[cnodename].pdf[i, :] = cth.c_float_p2float( self.pyt.GetNodeBeliefs(cnode), self.pyt.GetNodeNumberStates(cnode)) # # Do some postprocessing for just the output nodes # currstds = np.ones((N, 1)) * 1.0e-16 for i in self.probpars.scenario.response: vprint(3, self.pyt.verboselvl, 'postprocessing output node --> {0:s}'.format(i)) # record whether the node is continuous or discrete if cpred[i].continuous: curr_continuous = 'continuous' else: curr_continuous = 'discrete' # these ranges may throw overflow warnings as e.g., <25 is # implemented as -(really big number)*10^308 to 25 pdfRanges = cpred[i].ranges cpred[i].z = np.atleast_2d(casdata[i]).T pdfParam = np.hstack((cpred[i].z, currstds)) pdfData = statfuns.makeInputPdf(pdfRanges, pdfParam, 'norm', curr_continuous) cpred[i].probModelUpdate = np.nansum(pdfData * cpred[i].pdf, 1) cpred[i].probModelPrior = np.nansum( pdfData * np.tile(cpred[i].priorPDF, (N, 1)), 1) cpred[i].logLikelihoodRatio = ( np.log10(cpred[i].probModelUpdate + np.spacing(1)) - np.log10(cpred[i].probModelPrior + np.spacing(1))) cpred[i].dataPDF = pdfData.copy() # note --> np.spacing(1) is like eps in MATLAB # get the PDF stats here vprint(3, self.pyt.verboselvl, 'getting stats') cpred = self.PDF2Stats(i, cpred, alpha=0.1) self.pyt.DeleteNet(cnet) return cpred, cNETNODES
def chkerr(self, err_severity=pnC.errseverity_ns_const.ERROR_ERR): if self.GetError(err_severity): exceptionMsg = ("\npythonNeticaUtils: \nError " + cth.c_char_p2str( self.ErrorMessage(self.GetError(err_severity)))) self.CloseNetica() raise NeticaException(exceptionMsg)