def __init__(self, Counts, L, GroupLength, prior_offset, Nyes, Ntrials, ModelResponse, value=None): """ Counts - nonterminal -> #h x #rules counts L - group -> #h x 1 array GroupLength - #groups (vector) - contains the number of trials per group Nyes - #item ( #item = sum(GroupLength)) Ntrials - #item ModelResponse - #h x #item - each hypothesis' response to the i'th item (1 or 0) """ assert sum(GroupLength) == len(Nyes) == len(Ntrials) L = numpy.array(L) self_update(self,locals()) self.N_groups = len(GroupLength) self.nts = Counts.keys() # all nonterminals self.nrules = { nt: Counts[nt].shape[1] for nt in self.nts} # number of rules for each nonterminal self.N_hyps = Counts[self.nts[0]].shape[0] if value is None: value = { 'rulep': { nt: DirichletDistribution(alpha=np.ones(self.nrules[nt]), proposal_scale=1000.) for nt in self.nts }, 'alpha': BetaDistribution(1,1), 'beta': BetaDistribution(1,1), 'likelihood_temperature': GammaDistribution(a=1, scale=1, proposal_scale=10.), 'prior_temperature': GammaDistribution(a=1, scale=1, proposal_scale=10.) } Hypothesis.__init__(self, value=value) # sets the value
def __init__(self, Counts, L, GroupLength, prior_offset, Nyes, Ntrials, ModelResponse, value=None): """ Counts - nonterminal -> #h x #rules counts Hypotheses - #h L - group -> #h x 1 array GroupLength - #groups (vector) - contains the number of trials per group Nyes - #item ( #item = sum(GroupLength)) Ntrials - #item ModelResponse - #h x #item - each hypothesis' response to the i'th item (1 or 0) """ assert sum(GroupLength) == len(Nyes) == len(Ntrials) L = numpy.array(L) self_update(self,locals()) self.N_groups = len(GroupLength) self.nts = Counts.keys() # all nonterminals self.nrules = { nt: Counts[nt].shape[1] for nt in self.nts} # number of rules for each nonterminal self.N_hyps = Counts[self.nts[0]].shape[0] if value is None: value = { nt: GibbsDirchlet(alpha=np.ones(self.nrules[nt]), proposal_scale=1000.) for nt in self.nts } Hypothesis.__init__(self, value=value) # sets the value
def __init__(self, grammar): """ This takes a grammar and a regex to match variable names """ self_update(self, locals()) LOTProposer.__init__(self, grammar) # check that we used "apply_" instead of "apply" for r in self.grammar: assert r.name is not "apply", "*** Need to use 'apply_' instead of 'apply' " assert r.name is not "lambda_", "*** Need to use 'lambda' instead of 'lambda' " # the asymmetry here is disturbing, but lambda is a keyword and apply is a function self.insertable_rules = defaultdict( list ) # Hash each nonterminal to (a,l) where a and l are the apply and lambda rules you need for nt in self.grammar.rules.keys(): for a in filter(lambda r: (r.name == "apply_") and (r.nt == nt), self.grammar): for l in filter( lambda r: isinstance(r, BVAddGrammarRule) and (r.nt == a.to[0]) and (r.bv_args is None) and (r.bv_type == a.to[1]), self.grammar ): # For each lambda whose "below" is the right type. bv_args are not implemented yet self.insertable_rules[nt].append((a, l))
def __init__(self, utterance, context, possible_utterances): """Creates a new Utterance. Arguments: utterance (doc?): the word that's spoken context (doc?): the environmental/linguistic context in which the word is spoken possible_utterances (doc?): a set of other words we could have spoken, given the context """ self_update(self, locals())
def __init__(self, current_sample, data, steps=Infinity, proposer=None, skip=0, prior_temperature=1.0, likelihood_temperature=1.0, acceptance_temperature=1.0, trace=False, shortcut_likelihood=True): self_update(self,locals()) self.was_accepted = None if proposer is None: self.proposer = lambda x: x.propose() self.samples_yielded = 0 self.set_state(current_sample, compute_posterior=(current_sample is not None)) self.reset_counters()
def __init__(self, value=None, n=1, proposal=None, propose_scale=1.0, propose_n=1): self.n = n self.propose_n = propose_n if value is None: value = np.random.multivariate_normal(np.array([0.0] * n), proposal) if proposal is None: proposal = np.eye(n) * propose_scale propose_mask = self.get_propose_mask() proposal = proposal * propose_mask self.proposal = proposal Hypothesis.__init__(self, value=value) self_update(self, locals())
def __init__(self, grammar, fn, recurse_up=False): """ This manages rules that we add and subtract in the context of grammar generation. This is a class that is somewhat in between Grammar and GrammarRule. It manages creating, adding, and subtracting the bound variable rule via "with" clause in Grammar. NOTE: The "rule" here is the added rule, not the "bound variable" one (that adds the rule) NOTE: If rule is None, then nothing happens This actually could go in FunctionNode, *except* that it needs to know the grammar, which FunctionNodes do not """ self_update(self, locals()) self.added_rules = [] # all of the rules we added -- may be more than one from recurse_up=True
def __init__(self, nt, name, to, p=1.0, bv_prefix=None): p = float(p) assert p>0.0, "*** p=0 in rule %s %s %s. What are you thinking?" %(nt,name,to) self_update(self, locals()) assert to is None or isinstance(to, list) or isinstance(to, tuple), "*** 'to' in a GrammarRule must be a list!" for a in None2Empty(to): assert isinstance(a,str) if name == '': assert (to is None) or (len(to) == 1), \ "*** GrammarRules with empty names must have only 1 argument"
def __init__(self, grammar=None, value=None, f=None, maxnodes=25, **kwargs): if 'args' in kwargs: assert False, "*** Use of 'args' is deprecated. Use display='...' instead." # Save all of our keywords self_update(self, locals()) if value is None and grammar is not None: value = grammar.generate() FunctionHypothesis.__init__(self, value=value, f=f, **kwargs) self.likelihood = 0.0 self.rules_vector = None
def __init__(self, nt, name, to, p=1.0, bv_prefix=None): p = float(p) assert p > 0.0, "*** p=0 in rule %s %s %s. What are you thinking?" % ( nt, name, to) self_update(self, locals()) assert to is None or isinstance(to, list) or isinstance( to, tuple), "*** 'to' in a GrammarRule must be a list!" for a in None2Empty(to): assert isinstance(a, str) if name == '': assert (to is None) or (len(to) == 1), \ "*** GrammarRules with empty names must have only 1 argument"
def __init__(self, nt, name, to, p=1.0, bv_prefix="y", bv_type=None, bv_args=None, bv_p=None): p = float(p) self_update(self, locals()) assert bv_type is not None, "Did you mean to use a GrammarRule instead of a BVGrammarRule?" assert isinstance( bv_type, str ), "bv_type must be a string! Make sure it's not a tuple or list."
def __init__(self, grammar): """ This takes a grammar and a regex to match variable names """ self_update(self,locals()) LOTProposer.__init__(self, grammar) # check that we used "apply_" instead of "apply" for r in self.grammar: assert r.name is not "apply", "*** Need to use 'apply_' instead of 'apply' " assert r.name is not "lambda_", "*** Need to use 'lambda' instead of 'lambda' " # the asymmetry here is disturbing, but lambda is a keyword and apply is a function self.insertable_rules = defaultdict(list) # Hash each nonterminal to (a,l) where a and l are the apply and lambda rules you need for nt in self.grammar.rules.keys(): for a in filter(lambda r: (r.name=="apply_") and (r.nt == nt), self.grammar): for l in filter( lambda r: isinstance(r, BVAddGrammarRule) and (r.nt == a.to[0]) and (r.bv_args is None) and (r.bv_type==a.to[1]), self.grammar): # For each lambda whose "below" is the right type. bv_args are not implemented yet self.insertable_rules[nt].append( (a,l) )
def __init__(self, current_sample, data, steps=Infinity, proposer=None, skip=0, prior_temperature=1.0, likelihood_temperature=1.0, acceptance_temperature=1.0, trace=False, shortcut_likelihood=True): self_update(self, locals()) self.was_accepted = None if proposer is None: self.proposer = lambda x: x.propose() self.samples_yielded = 0 self.set_state(current_sample, compute_posterior=(current_sample is not None)) self.reset_counters()
def __init__(self, grammar, value=None, f=None, node_counts=None, maxnodes=25, recurse_bound=25, display="lambda recurse_, x: %s", **kwargs): """ Initializer. recurse gives the name for the recursion operation internally. """ assert "lambda recurse_" in display, "*** RecursiveLOTHypothesis must have 'recurse_' as first display element." # otherwise it can't eval # save recurse symbol self.recursive_depth_bound = recurse_bound # how deep can we recurse? self.recursive_call_depth = 0 # how far down have we recursed? if 'args' in kwargs: assert False, "*** Use of 'args' is deprecated. Use display='...' instead." # Save all of our keywords self_update(self, locals()) grammar.update_alphas() # make sure alpha/sigma arrays are initialized if value is None: value, self.node_counts = grammar.generate_with_counts() else: self.node_counts = node_counts self.tree_size = np.sum(self.node_counts) FunctionHypothesis.__init__(self, value=value, f=f, display=display, **kwargs) self.likelihood = 0.0 self.compute_prior() self.rules_vector = None
def __init__(self, objects, relations, features=None, ego=None, distance=None): self_update(self, locals()) self.__dict__.pop('relations') self.parents = defaultdict(set) self.spouses = defaultdict(set) self.children = defaultdict(set) for r, x, z in relations: if r == 'parent': self.parents[z].add(x) self.children[x].add(z) elif r == 'spouse': self.spouses[x].add(z) else: assert False, '\t'.join([r, x, z])
def __init__(self, min, max, period): assert max > min assert period > 0. self_update(self, locals()) self.ticks = 0 # how many times have we called next?
def __init__(self, nt, name, to, p=1.0, bv_prefix="y", bv_type=None, bv_args=None, bv_p=None): p = float(p) self_update(self, locals()) assert bv_type is not None, "Did you mean to use a GrammarRule instead of a BVGrammarRule?" assert isinstance(bv_type, str), "bv_type must be a string! Make sure it's not a tuple or list."
def __init__(self,proposers=[],proposer_weights=[],**kwargs): assert len(proposers) == len(proposer_weights) , "MixtureProposer.py >> __init__: different number of proposals and weights!" self_update(self,locals()) Proposer.__init__(self,**kwargs)
def __init__(self, parent, returntype, name, args): self_update(self,locals()) self.added_rule = None assert self.name is None or isinstance(self.name, str)
def __init__(self, max, scale): self_update(self, locals()) self.ticks = 0
def __init__(self, grammar, make_h, data, steps=Infinity): self_update(self, locals())
def __init__(self, h0, data, steps=Infinity): self_update(self, locals()) assert isinstance(h0, LOTHypothesis) # only implemented for LOTHypothesis self.samples_yielded = 0
def __init__(self, word, X, Y, context): self_update(self, locals())
def __init__(self, c, alpha): self_update(self, locals()) self.ticks = 0
def __init__(self, parent, returntype, name, args, term_type='none'): self_update(self, locals()) self.added_rule = None assert self.name is None or isinstance(self.name, str)
def __init__(self, proposers=[], proposer_weights=[], **kwargs): assert len(proposers) == len( proposer_weights ), "MixtureProposer.py >> __init__: different number of proposals and weights!" self_update(self, locals()) Proposer.__init__(self, **kwargs)