Esempio n. 1
0
    def __init__(self, Counts, L, GroupLength, prior_offset, Nyes, Ntrials, ModelResponse, value=None):
        """
            Counts - nonterminal -> #h x #rules counts
            L          - group -> #h x 1 array
            GroupLength   - #groups (vector)  - contains the number of trials per group
            Nyes          - #item ( #item = sum(GroupLength))
            Ntrials       - #item
            ModelResponse - #h x #item - each hypothesis' response to the i'th item (1 or 0)
        """

        assert sum(GroupLength) == len(Nyes) == len(Ntrials)

        L = numpy.array(L)

        self_update(self,locals())
        self.N_groups = len(GroupLength)
        self.nts    = Counts.keys() # all nonterminals
        self.nrules = { nt: Counts[nt].shape[1] for nt in self.nts} # number of rules for each nonterminal
        self.N_hyps = Counts[self.nts[0]].shape[0]

        if value is None:
            value = {
                      'rulep': { nt: DirichletDistribution(alpha=np.ones(self.nrules[nt]), proposal_scale=1000.) for nt in self.nts },
                      'alpha': BetaDistribution(1,1),
                      'beta':  BetaDistribution(1,1),
                      'likelihood_temperature':   GammaDistribution(a=1, scale=1, proposal_scale=10.),
                      'prior_temperature': GammaDistribution(a=1, scale=1, proposal_scale=10.)
            }

        Hypothesis.__init__(self, value=value) # sets the value
Esempio n. 2
0
	def __init__(self, make_hypothesis, words=(), **kwargs):
		"""
			hypothesis - a function to generate hypotheses
			words -- words to initially add (sampling from the prior)
		"""
		Hypothesis.__init__(self, value=dict(), **kwargs)
		self.__dict__.update(locals())
		
		assert isroutine(make_hypothesis) # check that we can call
		
		# update with the supplied words, each generating from the grammar
		for w in words:
			self.set_word(w, v=None)
Esempio n. 3
0
    def __init__(self, make_hypothesis, words=(), **kwargs):
        """
			hypothesis - a function to generate hypotheses
			words -- words to initially add (sampling from the prior)
		"""
        Hypothesis.__init__(self, value=dict(), **kwargs)
        self.__dict__.update(locals())

        assert isroutine(make_hypothesis)  # check that we can call

        # update with the supplied words, each generating from the grammar
        for w in words:
            self.set_word(w, v=None)
Esempio n. 4
0
    def __init__(self, value=None, propose_p=0.5, **kwargs):
        """
            make_hypothesis -- a function to make each individual word meaning. None will leave it empty (for copying)
            words -- words to initially add (sampling from the prior)
            propose_p -- the probability of proposing to each word
        """

        if value is None:
            value = dict()
        else:
            assert isinstance(self.value, dict)

        Hypothesis.__init__(self, value=value, **kwargs)

        self.propose_p = propose_p
Esempio n. 5
0
def crossover_lot(x,y):
    t = copy(x.value)
    n1, _ = t.sample_subnode(resampleProbability=lambdaOne)
    n2, _ = y.value.sample_subnode(resampleProbability=lambda t: 1*(t.returntype==n1.returntype))

    n1.setto(n2) # assign the value!

    return Hypothesis.__copy__(x, value=t)
Esempio n. 6
0
def crossover_lot(x, y):
    t = copy(x.value)
    n1, _ = t.sample_subnode(resampleProbability=lambdaOne)
    n2, _ = y.value.sample_subnode(
        resampleProbability=lambda t: 1 * (t.returntype == n1.returntype))

    n1.setto(n2)  # assign the value!

    return Hypothesis.__copy__(x, value=t)
    def propose(self):
        if random.random() < 0.5:
            ret = regeneration_proposal(self.grammar, self.value)

        else:
            ret = insert_delete_proposal(self.grammar, self.value)

        p = Hypothesis.__copy__(self, value=ret[0])
        ret[0] = p
        return ret
    def propose(self):
        if random.random() < 0.5:
            ret = regeneration_proposal(self.grammar, self.value)

        else:
            ret = insert_delete_proposal(self.grammar, self.value)

        p = Hypothesis.__copy__(self, value=ret[0])
        ret[0] = p
        return ret
Esempio n. 9
0
    def propose(self, **kwargs):

        while True: # keep trying to propose
            try:
                ret = self.propose_tree(self.value, **kwargs) # don't unpack, since we may return [newt,fb] or [newt,f,b]
                break
            except ProposalFailedException:
                pass

        p = Hypothesis.__copy__(self, value=ret[0])

        ret[0] = p # really make the first a hypothesis, not a tree

        return ret
Esempio n. 10
0
    def propose(self, **kwargs):

        while True:  # keep trying to propose
            try:
                ret = self.propose_tree(
                    self.value, **kwargs
                )  # don't unpack, since we may return [newt,fb] or [newt,f,b]
                break
            except ProposalFailedException:
                pass

        p = Hypothesis.__copy__(self, value=ret[0])

        ret[0] = p  # really make the first a hypothesis, not a tree

        return ret
 def __init__(self, value=None, ll_decay=0.0, **kwargs):
     Hypothesis.__init__(self, value=value, **kwargs)
     self.ll_decay = ll_decay # store this
     self.stored_likelihood = None
Esempio n. 12
0
 def __init__(self, value=None, ll_decay=0.0, **kwargs):
     Hypothesis.__init__(self, value=value, **kwargs)
     self.ll_decay = ll_decay  # store this
     self.stored_likelihood = None