def __init__(self, grammar=None, alphabet_size=2, **kwargs): SimpleLexicon.__init__(self, **kwargs) self.grammar = grammar # the base gramar (with 0 included); we copy and add in other recursions on self.deepen() self.N = 0 # the number of meanings we have self.max_total_calls = MAX_SELF_RECURSION # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen(). NOTE: if this is small, then it bounds the length of each string self.total_calls = 0 self.alphabet_size = alphabet_size
def __init__(self, N=4, grammar=None, argument_type='FUNCTION', variable_weight=2.0, value=None, **kwargs): SimpleLexicon.__init__(self, value=value) self.N = N if grammar is not None: # else we are in a copy initializer, and the rest will get copied for w in xrange(N): nthgrammar = deepcopy(grammar) # Add all the bound variables args = [ ] for xi in xrange(w): # no first argument argi = 'x%s'%xi # Add a rule for the variable nthgrammar.add_rule(argument_type, argi, None, variable_weight) args.append(argi) # and add a rule for the n-ary recursion nthgrammar.add_rule('LIST', 'recurse_', ['FUNCTION']*(w), 1.) # we wrap the content with lambda to make it callable for next recursion level nthgrammar.add_rule('FUNCTION', 'lambda', ['LIST'], 1.) nthgrammar.add_rule('LIST', '(%s)()', ['FUNCTION'], 1.) self.set_word(w, self.make_hypothesis(grammar=nthgrammar, args=args))
def __init__(self, grammar=None, alphabet_size=2, **kwargs): SimpleLexicon.__init__(self, **kwargs) self.grammar=grammar # the base gramar (with 0 included); we copy and add in other recursions on self.deepen() self.N = 0 # the number of meanings we have self.max_total_calls = MAX_SELF_RECURSION # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen(). NOTE: if this is small, then it bounds the length of each string self.total_calls = 0 self.alphabet_size = alphabet_size
def __init__(self, N=4, grammar=None, argument_type='LIST', variable_weight=2.0, value=None, **kwargs): SimpleLexicon.__init__(self, value=value) self.N = N if grammar is not None: # else we are in a copy initializer, and the rest will get copied for w in xrange(N): nthgrammar = deepcopy(grammar) # Add all the bound variables args = [] for xi in xrange(w): # no first argument argi = 'x%s' % xi # Add a rule for the variable nthgrammar.add_rule(argument_type, argi, None, variable_weight) args.append(argi) # and add a rule for the n-ary recursion nthgrammar.add_rule('LIST', 'recurse_', [argument_type] * (w), 1.) self.set_word( w, self.make_hypothesis(grammar=nthgrammar, args=args))
def __init__(self, grammar=None, **kwargs): SimpleLexicon.__init__(self, maxnodes=50, **kwargs) self.grammar=grammar # the base gramar (with 0 included); we copy and add in other recursions on self.deepen() self.N = 0 # the number of meanings we have # self.outlier = -1000.0 # read in MultinomialLikelihood self.max_total_calls = 10 # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen() self.total_calls = 0 self.distance = 100.0 # penalize
def __init__(self, N=4, grammar=None, argument_type='FUNCTION', variable_weight=2.0, value=None, recurse_bound=25, **kwargs): SimpleLexicon.__init__(self, value=value) self.base_grammar = deepcopy(grammar) self.argument_type = argument_type self.variable_weight = variable_weight self.recurse_bound = recurse_bound self.recursive_call_depth = 0 if grammar is not None: # else we are in a copy initializer, and the rest will get copied self.N = 0 for w in xrange(N): self.add_new_word()
def compute_prior(self): """ Assign 0-probability to hypotheses with repeat values. BUT we only discover repeat values by reducing. """ seen = set() for k,v in self.value.items(): try: reduced = str(lambda_reduce(v.value)) except EvaluationException: return -Infinity if reduced in seen: return -Infinity else: seen.add(reduced) return SimpleLexicon.compute_prior(self)
def compute_prior(self): """ Assign 0-probability to hypotheses with repeat values. BUT we only discover repeat values by reducing. """ seen = set() for k, v in self.value.items(): try: reduced = str(lambda_reduce(v.value)) except EvaluationException: return -Infinity if reduced in seen: return -Infinity else: seen.add(reduced) return SimpleLexicon.compute_prior(self)
def compute_prior(self): return SimpleLexicon.compute_prior(self) - self.N * log(2.0)/self.prior_temperature # coin flip for each additional word
def __init__(self, alpha=0.99, **kwargs): self.alpha = alpha SimpleLexicon.__init__(self, **kwargs)
def compute_prior(self): return SimpleLexicon.compute_prior(self) - self.N * log( 2.0) / self.prior_temperature # coin flip for each additional word