示例#1
0
 def __eq__(a,b):
     """ True if a and b have same elements, size and names """
     if b.__class__ == na.NumArray:
     # in case b is a just a numarray and not a Table instance
     # in this case, variable should absoltely be at the same order
     # otherwise the Table and numArray are considered as different
         return (na.alltrue(a.cpt.flat == b.flat) \
                 and a.shape == b.shape)
         
     
     elif b == None:
     # in case b is None type
         return False
     
     elif isinstance(b, (int, float, long)):
     # b is just a number, int, float, long
         return a.cpt == b
     
     else:
     # the b class should better be a Table or something like that
     # order of variables is not important
         # put the variables in the same order
         # first must get the correspondance vector :
         bcpt = a.prepareOther(b)
         return (a.names == b.names and \
                 bcpt.shape == a.shape and \
                 na.allclose(bcpt, a.cpt))
示例#2
0
def _min_or_max_filter(input, size, footprint, structure, output, mode, cval,
                       origin, minimum):
    if structure is None:
        if footprint is None:
            if size is None:
                raise RuntimeError, "no footprint provided"
            separable = True
        else:
            footprint = numarray.asarray(footprint, numarray.Bool)
            if numarray.alltrue(numarray.ravel(footprint)):
                size = footprint.shape
                footprint = None
                separable = True
            else:
                separable = False
    else:
        structure = numarray.asarray(structure, type=numarray.Float64)
        separable = False
        if footprint is None:
            footprint = numarray.ones(structure.shape, numarray.Bool)
        else:
            footprint = numarray.asarray(footprint, numarray.Bool)
    input = numarray.asarray(input)
    if isinstance(input.type(), numarray.ComplexType):
        raise TypeError, 'Complex type not supported'
    output, return_value = _ni_support._get_output(output, input)
    origins = _ni_support._normalize_sequence(origin, input.rank)
    if separable:
        sizes = _ni_support._normalize_sequence(size, input.rank)
        axes = range(input.rank)
        axes = [(axes[ii], sizes[ii], origins[ii]) for ii in range(len(axes))
                if sizes[ii] > 1]
        if minimum:
            filter = minimum_filter1d
        else:
            filter = maximum_filter1d
        if len(axes) > 0:
            for axis, size, origin in axes:
                filter(input, int(size), axis, output, mode, cval, origin)
                input = output
        else:
            output[...] = input[...]
    else:
        fshape = [ii for ii in footprint.shape if ii > 0]
        if len(fshape) != input.rank:
            raise RuntimeError, 'footprint array has incorrect shape.'
        for origin, lenf in zip(origins, fshape):
            if (lenf // 2 + origin < 0) or (lenf // 2 + origin > lenf):
                raise ValueError, 'invalid origin'
        if not footprint.iscontiguous():
            footprint = footprint.copy()
        if structure is not None:
            if len(structure.shape) != input.rank:
                raise RuntimeError, 'structure array has incorrect shape'
            if not structure.iscontiguous():
                structure = structure.copy()
        mode = _ni_support._extend_mode_to_code(mode)
        _nd_image.min_or_max_filter(input, footprint, structure, output, mode,
                                    cval, origins, minimum)
    return return_value
示例#3
0
    def testEM(self):
        # sample the network 2000 times
        cases = self.BNet.Sample(2000)
        # delete some observations
        for i in range(500):
            case = cases[3*i]
            rand = random.sample(['c','s','r','w'],1)[0]
            case[rand] = '?' 
        for i in range(50):
            case = cases[3*i]
            rand = random.sample(['c','s','r','w'],1)[0]
            case[rand] = '?'

        # create a new BNet with same nodes as self.BNet but all parameters
        # set to 1s
        G = copy.deepcopy(self.BNet)
        
        G.InitDistributions()
        
        engine = EMLearningEngine(G)
        engine.EMLearning(cases, 10)
        
        tol = 0.08
        assert(na.alltrue([na.allclose(v.distribution.cpt, self.BNet.v[v.name].distribution.cpt, atol=tol) \
               for v in engine.BNet.all_v])), \
                " Learning does not converge to true values "
        print 'ok!!!!!!!!!!!!'
    def testMarginalise(self):
        def factorial(n):
            if n==1:return 1
            return factorial(n - 1) * n
        
        var = set('c')
        b = self.a.Marginalise(var)
        var2 = set(['c','a'])
        c = self.a.Marginalise(var2)
        d = DiscretePotential(['b','c'], [3,4], na.arange(12))

        # extended test
        a = DiscretePotential('a b c d e f'.split(), [2,3,4,5,6,7], \
                              na.arange(factorial(7)))
        aa = a.Marginalise('c f a'.split())
      

        assert(b.names == self.a.names - var and \
               b[0,1] == na.sum(self.a[0,1]) and \
               c.names == self.a.names - var2 and \
               na.alltrue(c.cpt.flat == na.sum(na.sum(self.a.cpt,axis=2), axis=0)) and
               aa.shape == (3,5,6) and \
               aa.names_list == 'b d e'.split() and \
               aa[2,4,3] == na.sum(a[:,2,:,4,3,:].flat)), \
               " Marginalisation doesn't work"
示例#5
0
 def hasntConverged(self, old, new, precision):
     '''
     Return true if the difference of distribution of at least one vertex 
     of the old and new BNet is bigger than precision
     '''
     if not old :
         return True   
     else:
         return not  na.alltrue([na.allclose(v.distribution, new.v[v.name].distribution, atol=precision) for v in old.v.values()])
示例#6
0
文件: filters.py 项目: joshfermin/AI
def _min_or_max_filter(input, size, footprint, structure, output, mode, cval, origin, minimum):
    if structure is None:
        if footprint is None:
            if size is None:
                raise RuntimeError, "no footprint provided"
            separable = True
        else:
            footprint = numarray.asarray(footprint, numarray.Bool)
            if numarray.alltrue(numarray.ravel(footprint)):
                size = footprint.shape
                footprint = None
                separable = True
            else:
                separable = False
    else:
        structure = numarray.asarray(structure, type=numarray.Float64)
        separable = False
        if footprint is None:
            footprint = numarray.ones(structure.shape, numarray.Bool)
        else:
            footprint = numarray.asarray(footprint, numarray.Bool)
    input = numarray.asarray(input)
    if isinstance(input.type(), numarray.ComplexType):
        raise TypeError, "Complex type not supported"
    output, return_value = _ni_support._get_output(output, input)
    origins = _ni_support._normalize_sequence(origin, input.rank)
    if separable:
        sizes = _ni_support._normalize_sequence(size, input.rank)
        axes = range(input.rank)
        axes = [(axes[ii], sizes[ii], origins[ii]) for ii in range(len(axes)) if sizes[ii] > 1]
        if minimum:
            filter = minimum_filter1d
        else:
            filter = maximum_filter1d
        if len(axes) > 0:
            for axis, size, origin in axes:
                filter(input, int(size), axis, output, mode, cval, origin)
                input = output
        else:
            output[...] = input[...]
    else:
        fshape = [ii for ii in footprint.shape if ii > 0]
        if len(fshape) != input.rank:
            raise RuntimeError, "footprint array has incorrect shape."
        for origin, lenf in zip(origins, fshape):
            if (lenf // 2 + origin < 0) or (lenf // 2 + origin > lenf):
                raise ValueError, "invalid origin"
        if not footprint.iscontiguous():
            footprint = footprint.copy()
        if structure is not None:
            if len(structure.shape) != input.rank:
                raise RuntimeError, "structure array has incorrect shape"
            if not structure.iscontiguous():
                structure = structure.copy()
        mode = _ni_support._extend_mode_to_code(mode)
        _nd_image.min_or_max_filter(input, footprint, structure, output, mode, cval, origins, minimum)
    return return_value
    def __init__(self, v, cpt = None, isAdjustable=False):
        Distribution.__init__(self, v, isAdjustable=isAdjustable)
        self.distribution_type = "Multinomial"
        
        assert(na.alltrue([v.discrete for v in self.family])), \
              'All nodes in family '+ str(self.names_list)+ ' must be discrete !!!'
        
        self.sizes = [v.nvalues for v in self.family]

        # initialize the cpt
        Table.__init__(self, self.names_list, self.sizes, cpt)

        #Used for Learning
        self.counts = None
示例#8
0
    def InitDistribution(self, *args, **kwargs):
        """ Initialise the distribution, all edges must be added"""
        #first decide which type of Distribution
        #if all nodes are discrete, then Multinomial)
        if na.alltrue([v.discrete for v in self.family]):
            #print self.name,'Multinomial'
            #FIX: should be able to pass through 'isAdjustable=True' and it work
            self.distribution = distributions.MultinomialDistribution(self, *args, **kwargs) 
            return

        #gaussian distribution
        if not self.discrete:
            #print self.name,'Gaussian'
            self.distribution = distributions.Gaussian_Distribution(self, *args, **kwargs)
            return
示例#9
0
 def testML(self):
     # sample the network 2000 times
     cases = self.BNet.Sample(2000)
     
     # create a new BNet with same nodes as self.BNet but all parameters
     # set to 1s
     G = copy.deepcopy(self.BNet)
     
     G.InitDistributions()
     
     # create an infeence engine
     engine = JoinTree(G)
     
     # learn according to the test cases
     engine.LearnMLParams(cases)
     
     tol = 0.05
     assert(na.alltrue([na.allclose(v.distribution.cpt, self.BNet.v[v.name].distribution.cpt, atol=tol) \
            for v in G.all_v])), \
             " Learning does not converge to true values "
示例#10
0
 def Marginalise(self, vname, samples = None):
     # 1.Sample the network N times
     if not samples:
         # if no samples are given, get them
         samples = self.BNet.Sample(self.N)
     
     # 2. Create the distribution that will be returned
     v = self.BNet.v[vname]        # the variable
     vDist = v.GetSamplingDistribution()
     vDist.initializeCounts()                 # set all 0s
     
     # 3.Count number of occurences of vname = i
     #    for each possible value of i, that respects the evidence
     for s in samples:
         if na.alltrue([s[e] == i for e, i in self.evidence.items()]): 
             # this samples respects the evidence
             # add one to the corresponding instance of the variable
             vDist.incrCounts(s)
     
     vDist.setCounts()    #apply the counts as the distribution
     vDist.normalize()    #normalize to obtain a probability
     
     return vDist
示例#11
0
 def testDelegate(self):
     assert (na.alltrue(self.a.flat == self.a.cpt.flat)), \
            " Delegation does not work check __getattr__"
示例#12
0
def all(a, axis=None):
    '''Numpy-compatible version of all()'''
    if axis is None:
        return _all(a)
    return alltrue(a, axis)
示例#13
0
 def __eq__(self, other):
     return bool(alltrue(self.pos == other.pos))