コード例 #1
0
ファイル: GibbsStepMethods.py プロジェクト: GunioRobot/pymc
 def competence(stochastic):
     test_input = stochastic.extended_children | set([stochastic])
     try:
         NormalSubmodel.check_input(test_input)
         return pymc.conjugate_Gibbs_competence
     except ValueError:
         return 0
コード例 #2
0
ファイル: GibbsStepMethods.py プロジェクト: takluyver/pymc
 def competence(stochastic):
     test_input = stochastic.extended_children | set([stochastic])
     try:
         NormalSubmodel.check_input(test_input)
         return pymc.conjugate_Gibbs_competence
     except ValueError:
         return 0
コード例 #3
0
ファイル: GibbsStepMethods.py プロジェクト: takluyver/pymc
        def __init__(self, input, verbose=0):

            # if input is not a Normal submodel, find maximal Normal submodel incorporating it.
            if not isinstance(input, NormalSubmodel):
                # TODO: Uncomment when crawl_... is working
                # input = NormalSubmodel(crawl_normal_submodel(input))
                input = NormalSubmodel(input)

            # Otherwise just store the input, which was a Normal submodel.
            self.NSM = input
            self.verbose = verbose

            # Read self.stochastics from Normal submodel.
            self.stochastics = set(self.NSM.changeable_stochastic_list)

            self.children = set([])
            self.parents = set([])
            for s in self.stochastics:
                self.children |= s.extended_children
                self.parents |= s.extended_parents

            # Remove own stochastics from children and parents.
            self.children -= self.stochastics
            self.parents -= self.stochastics

            self.conjugate = True
コード例 #4
0
ファイル: DLM.py プロジェクト: huard/pymc
    def __init__(self, F, G, V, W, m_0, C_0, Y_vals=None):
        """
        D = DLM(F, G, V, W, m_0, C_0[, Y_vals])

        Returns special NormalSubmodel instance representing the dynamic
        linear model formed by F, G, V and W.

        Resulting probability model:

            theta[0] | m_0, C_0 ~ N(m_0, C_0)

            theta[t] | theta[t-1], G[t], W[t] ~ N(G[t] theta[t-1], W[t]), t = 1..T

            Y[t] | theta[t], F[t], V[t] ~ N(F[t] theta[t], V[t]), t = 0..T


        Arguments F, G, V should be dictionaries keyed by name of component.
            F[comp], G[comp], V[comp] should be lists.
                F[comp][t] should be the design vector of component 'comp' at time t.
                G[comp][t] should be the system matrix.

        Argument W should be either a number between 0 and 1 or a dictionary of lists
        like V.
            If a dictionary of lists, W[comp][t] should be the system covariance or
            variance at time t.
            If a scalar, W should be the discount factor for the DLM.

        Arguments V and Y_vals, if given, should be lists.
            V[t] should be the observation covariance or variance at time t.
            Y_vals[t] should give the value of output Y at time t.

        Arguments m_0 and C_0 should be dictionaries keyed by name of component.
            m_0[comp] should be the mean of theta[comp][0].
            C_0[comp] should be the covariance or variance of theta[comp][0].

        Note: if multiple components are correlated in W or V, they should be made into
        a single component.

        D.comp is a handle to a list.
            D.comp[t] is a Stochastic representing the value of system state 'theta'
            sliced according to component 'comp' at time t.

        D.theta is a dictionary of lists analogous to F, G, V and W.

        D.Y is a list. D.Y[t] is a Stochastic representing the value of the output
        'Y' at time t.
        """

        self.comps = F.keys()

        self.F = dict_to_recarray(F)
        self.G = dict_to_recarray(G)
        self.V = pymc.ListContainer(V)
        if np.isscalar(W):
            self.discount = True
            self.delta = W
        else:
            self.W = dict_to_recarray(W)
            self.discount = False
            self.delta = None
        if self.discount:
            raise NotImplemented, "Have yet to code up the discount factor."
        self.m_0 = dict_to_recarray(m_0)
        self.C_0 = dict_to_recarray(C_0)
        self.T = len(self.V)

        theta = {}
        theta_mean = {}

        Y_mean = []
        Y = []

        # ==============
        # = Make theta =
        # ==============
        for comp in self.comps:
            # Is diagonal the covariance or variance?
            if isinstance(self.W[comp][0], pymc.Variable):
                diag = isvector(self.W[comp][0].value)
            else:
                diag = isvector(self.W[comp][0])

            if diag:
                # Normal variates if diagonal.
                theta[comp] = [pymc.Normal("%s[0]" % comp, m_0[comp], C_0[comp])]
            else:
                # MV normal otherwise.
                theta[comp] = [pymc.MvNormal("%s[0]" % comp, m_0[comp], C_0[comp])]

            theta_mean[comp] = []

            for t in xrange(1, self.T):

                theta_mean[comp].append(
                    pymc.LinearCombination("%s_mean[%i]" % (comp, t), [G[comp][t - 1]], [theta[comp][t - 1]])
                )

                if diag:
                    # Normal variates if diagonal.
                    theta[comp].append(pymc.Normal("%s[%i]" % (comp, t), theta_mean[comp][t - 1], W[comp][t - 1]))
                else:
                    # MV normal otherwise.
                    theta[comp].append(pymc.MvNormal("%s[%i]" % (comp, t), theta_mean[comp][t - 1], W[comp][t - 1]))

        self.theta = dict_to_recarray(theta)
        self.theta_mean = dict_to_recarray(theta_mean)

        # ==========
        # = Make Y =
        # ==========
        Y_diag = isvector(self.V.value[0])

        for t in xrange(self.T):
            x_coef = []
            y_coef = []

            for comp in self.comps:
                x_coef.append(self.F[comp][t])
                y_coef.append(theta[comp][t])

            Y_mean.append(pymc.LinearCombination("Y_mean[%i]" % t, x_coef, y_coef))
            if Y_diag:
                # Normal variates if diagonal.
                Y.append(pymc.Normal("Y[%i]" % t, Y_mean[t], V[t]))
            else:
                # MV normal otherwise.
                Y.append(pymc.MvNormal("Y[%i]" % t, Y_mean[t], V[t]))

            # If data provided, use it.
            if Y_vals is not None:
                Y[t].value = Y_vals[t]
                Y[t].observed = True

        self.Y_mean = pymc.Container(np.array(Y_mean))
        self.Y = pymc.Container(np.array(Y))

        # No sense creating a NormalSubmodel here... just stay a ListContainer.
        NormalSubmodel.__init__(self, [F, G, W, V, m_0, C_0, Y, theta, theta_mean, Y_mean])
コード例 #5
0
ファイル: DLM.py プロジェクト: rsumner31/pymc3-23
    def __init__(self, F, G, V, W, m_0, C_0, Y_vals = None):
        """
        D = DLM(F, G, V, W, m_0, C_0[, Y_vals])

        Returns special NormalSubmodel instance representing the dynamic
        linear model formed by F, G, V and W.

        Resulting probability model:

            theta[0] | m_0, C_0 ~ N(m_0, C_0)

            theta[t] | theta[t-1], G[t], W[t] ~ N(G[t] theta[t-1], W[t]), t = 1..T

            Y[t] | theta[t], F[t], V[t] ~ N(F[t] theta[t], V[t]), t = 0..T


        Arguments F, G, V should be dictionaries keyed by name of component.
            F[comp], G[comp], V[comp] should be lists.
                F[comp][t] should be the design vector of component 'comp' at time t.
                G[comp][t] should be the system matrix.

        Argument W should be either a number between 0 and 1 or a dictionary of lists
        like V.
            If a dictionary of lists, W[comp][t] should be the system covariance or
            variance at time t.
            If a scalar, W should be the discount factor for the DLM.

        Arguments V and Y_vals, if given, should be lists.
            V[t] should be the observation covariance or variance at time t.
            Y_vals[t] should give the value of output Y at time t.

        Arguments m_0 and C_0 should be dictionaries keyed by name of component.
            m_0[comp] should be the mean of theta[comp][0].
            C_0[comp] should be the covariance or variance of theta[comp][0].

        Note: if multiple components are correlated in W or V, they should be made into
        a single component.

        D.comp is a handle to a list.
            D.comp[t] is a Stochastic representing the value of system state 'theta'
            sliced according to component 'comp' at time t.

        D.theta is a dictionary of lists analogous to F, G, V and W.

        D.Y is a list. D.Y[t] is a Stochastic representing the value of the output
        'Y' at time t.
        """

        self.comps = F.keys()

        self.F = dict_to_recarray(F)
        self.G = dict_to_recarray(G)
        self.V = pymc.ListContainer(V)
        if np.isscalar(W):
            self.discount = True
            self.delta = W
        else:
            self.W = dict_to_recarray(W)
            self.discount = False
            self.delta = None
        if self.discount:
            raise NotImplemented, "Have yet to code up the discount factor."
        self.m_0 = dict_to_recarray(m_0)
        self.C_0 = dict_to_recarray(C_0)
        self.T = len(self.V)

        theta = {}
        theta_mean = {}

        Y_mean = []
        Y = []

        # ==============
        # = Make theta =
        # ==============
        for comp in self.comps:
            # Is diagonal the covariance or variance?
            if isinstance(self.W[comp][0], pymc.Variable):
                diag = isvector(self.W[comp][0].value)
            else:
                diag = isvector(self.W[comp][0])

            if diag:
                # Normal variates if diagonal.
                theta[comp] = [pymc.Normal('%s[0]'%comp, m_0[comp], C_0[comp])]
            else:
                # MV normal otherwise.
                theta[comp] = [pymc.MvNormal('%s[0]'%comp, m_0[comp], C_0[comp])]

            theta_mean[comp] = []

            for t in xrange(1,self.T):

                theta_mean[comp].append(pymc.LinearCombination('%s_mean[%i]'%(comp, t), [G[comp][t-1]], [theta[comp][t-1]]))

                if diag:
                    # Normal variates if diagonal.
                    theta[comp].append(pymc.Normal('%s[%i]'%(comp,t), theta_mean[comp][t-1], W[comp][t-1]))
                else:
                    # MV normal otherwise.
                    theta[comp].append(pymc.MvNormal('%s[%i]'%(comp,t), theta_mean[comp][t-1], W[comp][t-1]))


        self.theta = dict_to_recarray(theta)
        self.theta_mean = dict_to_recarray(theta_mean)


        # ==========
        # = Make Y =
        # ==========
        Y_diag = isvector(self.V.value[0])

        for t in xrange(self.T):
            x_coef = []
            y_coef = []

            for comp in self.comps:
                x_coef.append(self.F[comp][t])
                y_coef.append(theta[comp][t])

            Y_mean.append(pymc.LinearCombination('Y_mean[%i]'%t, x_coef, y_coef))
            if Y_diag:
                # Normal variates if diagonal.
                Y.append(pymc.Normal('Y[%i]'%t, Y_mean[t], V[t]))
            else:
                # MV normal otherwise.
                Y.append(pymc.MvNormal('Y[%i]'%t, Y_mean[t], V[t]))

            # If data provided, use it.
            if Y_vals is not None:
                Y[t].value = Y_vals[t]
                Y[t].observed = True

        self.Y_mean = pymc.Container(np.array(Y_mean))
        self.Y = pymc.Container(np.array(Y))

        # No sense creating a NormalSubmodel here... just stay a ListContainer.
        NormalSubmodel.__init__(self, [F,G,W,V,m_0,C_0,Y,theta,theta_mean,Y_mean])
コード例 #6
0
ファイル: NormalModel.py プロジェクト: rsumner31/pymc3-23
    W = Uninformative('W',np.eye(2)*N)
    base_mu = Uninformative('base_mu', np.ones(2)*3)
    # W[0,1] = W[1,0] = .5
    x_list = [MvNormal('x_0',base_mu,W,value=np.zeros(2))]
    for i in xrange(1,N):
        # L = LinearCombination('L', x=[x_list[i-1]], y = [np.eye(2)])
        x_list.append(MvNormal('x_%i'%i,x_list[i-1],W))

    # W = N
    # x_list = [Normal('x_0',1.,W,value=0)]
    # for i in xrange(1,N):
    #     # L = LinearCombination('L', x=[x_list[i-1]], coefs = {x_list[i-1]:np.ones((2,2))}, offset=0)
    #     x_list.append(Normal('x_%i'%i,x_list[i-1],W))

    data_index = 2*N/3

    x_list[data_index].value = array([4.,-4.])
    x_list[data_index].observed=True

    G = NormalSubmodel(x_list)
    x_list.pop(data_index)

    N = NormalModel(G)
    close('all')
    figure()
    subplot(1,2,1)
    contourf(N.C[x_list][::2,::2].view(ndarray))
    subplot(1,2,2)
    plot(N.mu[x_list][::2])
    plot(N.mu[x_list][1::2])