Exemplo n.º 1
0
    def __init__(self,
                 eval,
                 doc,
                 name,
                 parents,
                 dtype=None,
                 trace=True,
                 cache_depth=2,
                 plot=None,
                 verbose=None,
                 jacobians={},
                 jacobian_formats={}):
        self.ParentDict = ParentDict

        # This function gets used to evaluate self's value.
        self._eval_fun = eval
        self._jacobian_functions = jacobians
        self._jacobian_formats = jacobian_formats

        Variable.__init__(self,
                          doc=doc,
                          name=name,
                          parents=parents,
                          cache_depth=cache_depth,
                          dtype=dtype,
                          trace=trace,
                          plot=plot,
                          verbose=verbose)
Exemplo n.º 2
0
    def __init__(
        self,
        eval,
        doc,
        name,
        parents,
        dtype=None,
        trace=True,
        cache_depth=2,
        plot=None,
        verbose=None,
        jacobians={},
        jacobian_formats={},
    ):
        self.ParentDict = ParentDict

        # This function gets used to evaluate self's value.
        self._eval_fun = eval
        self._jacobian_functions = jacobians
        self._jacobian_formats = jacobian_formats

        Variable.__init__(
            self,
            doc=doc,
            name=name,
            parents=parents,
            cache_depth=cache_depth,
            dtype=dtype,
            trace=trace,
            plot=plot,
            verbose=verbose,
        )
Exemplo n.º 3
0
    def addVarNode(self, nid, i, j):
        """
        This function creates a variable node and adds it to the 'variables' list.

        :param nid: node id.
        :param i:   row index of the variable.
        :param j:   column index of the variable.

        :returns: variable node.
        """

        new_var = Variable('X', nid, i, j, self.damp)
        self.variables.append(new_var)
        self.var_count += 1

        return new_var
Exemplo n.º 4
0
    def __init__(   self,
                    logp,
                    doc,
                    name,
                    parents,
                    random = None,
                    trace=True,
                    value=None,
                    dtype=None,
                    rseed=False,
                    observed=False,
                    cache_depth=2,
                    plot=None,
                    verbose = None,
                    isdata=None):

        self.counter = Counter()
        self.ParentDict = ParentDict

        # Support legacy 'isdata' for a while
        if isdata is not None:
            print "Deprecation Warning: the 'isdata' flag has been replaced by 'observed'. Please update your model accordingly."
            self.observed = isdata

        # A flag indicating whether self's value has been observed.
        self._observed = observed
        if observed and value is None:
            raise ValueError, 'Stochastic %s must be given an initial value if observed=True.'%name

        # This function will be used to evaluate self's log probability.
        self._logp_fun = logp

        # This function will be used to draw values for self conditional on self's parents.
        self._random = random

        # A seed for self's rng. If provided, the initial value will be drawn. Otherwise it's
        # taken from the constructor.
        self.rseed = rseed

        self.errmsg = "Stochastic %s's value is outside its support,\n or it forbids its parents' current values."%name

        dtype = np.dtype(dtype)

        # Initialize value, either from value provided or from random function.
        try:
            if dtype.kind != 'O' and value is not None:
                self._value = asanyarray(value, dtype=dtype)
                self._value.flags['W']=False
            else:
                self._value = value
        except:
            cls, inst, tb = sys.exc_info()
            new_inst = cls('Stochastic %s: Failed to cast initial value to required dtype.\n\nOriginal error message:\n'%name + inst.message)
            raise cls, new_inst, tb

        # Store the shape of the stochastic value
        self._shape = np.shape(self._value)

        Variable.__init__(  self,
                        doc=doc,
                        name=name,
                        parents=parents,
                        cache_depth=cache_depth,
                        trace=trace,
                        dtype=dtype,
                        plot=plot,
                        verbose=verbose)

        # self._logp.force_compute()

        # Store the shape of the stochastic value
        self._shape = np.shape(self._value)

        if isinstance(self._value, ndarray):
            self._value.flags['W'] = False

        # Check initial value
        if not isinstance(self.logp, float):
            raise ValueError, "Stochastic " + self.__name__ + "'s initial log-probability is %s, should be a float." %self.logp.__repr__()
Exemplo n.º 5
0
    def __init__(self,
                 logp,
                 doc,
                 name,
                 parents,
                 random=None,
                 trace=True,
                 value=None,
                 dtype=None,
                 rseed=False,
                 observed=False,
                 cache_depth=2,
                 plot=None,
                 verbose=None,
                 isdata=None,
                 check_logp=True,
                 logp_partial_gradients={}):

        self.counter = Counter()
        self.ParentDict = ParentDict

        # Support legacy 'isdata' for a while
        if isdata is not None:
            print "Deprecation Warning: the 'isdata' flag has been replaced by 'observed'. Please update your model accordingly."
            self.observed = isdata

        # A flag indicating whether self's value has been observed.
        self._observed = observed
        # Default value of None for mask
        self._mask = None
        if observed:

            if value is None:
                raise ValueError, 'Stochastic %s must be given an initial value if observed=True.' % name

            try:

                # If there are missing values, store mask to missing elements
                self._mask = value.mask

                # Set to value of mean of observed data
                value.fill_value = value.mean()
                value = value.filled()

                # Set observed flag to False, so that missing values will update
                self._observed = False

            except AttributeError:
                # Must not have missing values
                pass

        # This function will be used to evaluate self's log probability.
        self._logp_fun = logp

        #This function will be used to evaluate self's gradient of log probability.
        self._logp_partial_gradient_functions = logp_partial_gradients

        # This function will be used to draw values for self conditional on self's parents.
        self._random = random

        # A seed for self's rng. If provided, the initial value will be drawn. Otherwise it's
        # taken from the constructor.
        self.rseed = rseed

        self.errmsg = "Stochastic %s's value is outside its support,\n or it forbids its parents' current values." % name

        dtype = np.dtype(dtype)

        # Initialize value, either from value provided or from random function.
        try:
            if dtype.kind != 'O' and value is not None:
                self._value = asanyarray(value, dtype=dtype)
                self._value.flags['W'] = False
            else:
                self._value = value
        except:
            cls, inst, tb = sys.exc_info()
            new_inst = cls(
                'Stochastic %s: Failed to cast initial value to required dtype.\n\nOriginal error message:\n'
                % name + inst.message)
            raise cls, new_inst, tb

        # Store the shape of the stochastic value
        self._shape = np.shape(self._value)

        Variable.__init__(self,
                          doc=doc,
                          name=name,
                          parents=parents,
                          cache_depth=cache_depth,
                          trace=trace,
                          dtype=dtype,
                          plot=plot,
                          verbose=verbose)

        # self._logp.force_compute()

        self._shape = np.shape(self._value)

        if isinstance(self._value, ndarray):
            self._value.flags['W'] = False

        if check_logp:
            # Check initial value
            if not isinstance(self.logp, float):
                raise ValueError, "Stochastic " + self.__name__ + "'s initial log-probability is %s, should be a float." % self.logp.__repr__(
                )
Exemplo n.º 6
0
    def __init__(
        self,
        logp,
        doc,
        name,
        parents,
        random=None,
        trace=True,
        value=None,
        dtype=None,
        rseed=False,
        observed=False,
        cache_depth=2,
        plot=None,
        verbose=None,
        isdata=None,
        check_logp=True,
        logp_partial_gradients={},
    ):

        self.counter = Counter()
        self.ParentDict = ParentDict

        # Support legacy 'isdata' for a while
        if isdata is not None:
            print "Deprecation Warning: the 'isdata' flag has been replaced by 'observed'. Please update your model accordingly."
            self.observed = isdata

        # A flag indicating whether self's value has been observed.
        self._observed = observed
        # Default value of None for mask
        self._mask = None
        if observed:

            if value is None:
                raise ValueError, "Stochastic %s must be given an initial value if observed=True." % name

            try:

                # If there are missing values, store mask to missing elements
                self._mask = value.mask

                # Set to value of mean of observed data
                value.fill_value = value.mean()
                value = value.filled()

                # Set observed flag to False, so that missing values will update
                self._observed = False

            except AttributeError:
                # Must not have missing values
                pass

        # This function will be used to evaluate self's log probability.
        self._logp_fun = logp

        # This function will be used to evaluate self's gradient of log probability.
        self._logp_partial_gradient_functions = logp_partial_gradients

        # This function will be used to draw values for self conditional on self's parents.
        self._random = random

        # A seed for self's rng. If provided, the initial value will be drawn. Otherwise it's
        # taken from the constructor.
        self.rseed = rseed

        self.errmsg = (
            "Stochastic %s's value is outside its support,\n or it forbids its parents' current values." % name
        )

        dtype = np.dtype(dtype)

        # Initialize value, either from value provided or from random function.
        try:
            if dtype.kind != "O" and value is not None:
                self._value = asanyarray(value, dtype=dtype)
                self._value.flags["W"] = False
            else:
                self._value = value
        except:
            cls, inst, tb = sys.exc_info()
            new_inst = cls(
                "Stochastic %s: Failed to cast initial value to required dtype.\n\nOriginal error message:\n" % name
                + inst.message
            )
            raise cls, new_inst, tb

        # Store the shape of the stochastic value
        self._shape = np.shape(self._value)

        Variable.__init__(
            self,
            doc=doc,
            name=name,
            parents=parents,
            cache_depth=cache_depth,
            trace=trace,
            dtype=dtype,
            plot=plot,
            verbose=verbose,
        )

        # self._logp.force_compute()

        self._shape = np.shape(self._value)

        if isinstance(self._value, ndarray):
            self._value.flags["W"] = False

        if check_logp:
            # Check initial value
            if not isinstance(self.logp, float):
                raise ValueError, "Stochastic " + self.__name__ + "'s initial log-probability is %s, should be a float." % self.logp.__repr__()