Esempio n. 1
0
    def __init__(self, Counts, L, GroupLength, prior_offset, Nyes, Ntrials, ModelResponse, value=None):
        """
            Counts - nonterminal -> #h x #rules counts
            L          - group -> #h x 1 array
            GroupLength   - #groups (vector)  - contains the number of trials per group
            Nyes          - #item ( #item = sum(GroupLength))
            Ntrials       - #item
            ModelResponse - #h x #item - each hypothesis' response to the i'th item (1 or 0)
        """

        assert sum(GroupLength) == len(Nyes) == len(Ntrials)

        L = numpy.array(L)

        self_update(self,locals())
        self.N_groups = len(GroupLength)
        self.nts    = Counts.keys() # all nonterminals
        self.nrules = { nt: Counts[nt].shape[1] for nt in self.nts} # number of rules for each nonterminal
        self.N_hyps = Counts[self.nts[0]].shape[0]

        if value is None:
            value = {
                      'rulep': { nt: DirichletDistribution(alpha=np.ones(self.nrules[nt]), proposal_scale=1000.) for nt in self.nts },
                      'alpha': BetaDistribution(1,1),
                      'beta':  BetaDistribution(1,1),
                      'likelihood_temperature':   GammaDistribution(a=1, scale=1, proposal_scale=10.),
                      'prior_temperature': GammaDistribution(a=1, scale=1, proposal_scale=10.)
            }

        Hypothesis.__init__(self, value=value) # sets the value
Esempio n. 2
0
	def __init__(self, make_hypothesis, words=(), **kwargs):
		"""
			hypothesis - a function to generate hypotheses
			words -- words to initially add (sampling from the prior)
		"""
		Hypothesis.__init__(self, value=dict(), **kwargs)
		self.__dict__.update(locals())
		
		assert isroutine(make_hypothesis) # check that we can call
		
		# update with the supplied words, each generating from the grammar
		for w in words:
			self.set_word(w, v=None)
Esempio n. 3
0
    def __init__(self, make_hypothesis, words=(), **kwargs):
        """
			hypothesis - a function to generate hypotheses
			words -- words to initially add (sampling from the prior)
		"""
        Hypothesis.__init__(self, value=dict(), **kwargs)
        self.__dict__.update(locals())

        assert isroutine(make_hypothesis)  # check that we can call

        # update with the supplied words, each generating from the grammar
        for w in words:
            self.set_word(w, v=None)
Esempio n. 4
0
    def __init__(self, value=None, propose_p=0.5, **kwargs):
        """
            make_hypothesis -- a function to make each individual word meaning. None will leave it empty (for copying)
            words -- words to initially add (sampling from the prior)
            propose_p -- the probability of proposing to each word
        """

        if value is None:
            value = dict()
        else:
            assert isinstance(self.value, dict)

        Hypothesis.__init__(self, value=value, **kwargs)

        self.propose_p = propose_p
 def __init__(self, value=None, ll_decay=0.0, **kwargs):
     Hypothesis.__init__(self, value=value, **kwargs)
     self.ll_decay = ll_decay # store this
     self.stored_likelihood = None
Esempio n. 6
0
 def __init__(self, value=None, ll_decay=0.0, **kwargs):
     Hypothesis.__init__(self, value=value, **kwargs)
     self.ll_decay = ll_decay  # store this
     self.stored_likelihood = None