Пример #1
0
 def __init__(self, grammar=None, alphabet_size=2, **kwargs):
     SimpleLexicon.__init__(self, **kwargs)
     self.grammar = grammar  # the base gramar (with 0 included); we copy and add in other recursions on self.deepen()
     self.N = 0  # the number of meanings we have
     self.max_total_calls = MAX_SELF_RECURSION  # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen(). NOTE: if this is small, then it bounds the length of each string
     self.total_calls = 0
     self.alphabet_size = alphabet_size
Пример #2
0
    def __init__(self, N=4, grammar=None, argument_type='FUNCTION', variable_weight=2.0, value=None, **kwargs):

        SimpleLexicon.__init__(self, value=value)

        self.N = N

        if grammar is not None: # else we are in a copy initializer, and the rest will get copied
            for w in xrange(N):
                nthgrammar = deepcopy(grammar)

                # Add all the bound variables
                args = [  ]
                for xi in xrange(w):  # no first argument
                    argi = 'x%s'%xi

                    # Add a rule for the variable
                    nthgrammar.add_rule(argument_type, argi, None, variable_weight)

                    args.append(argi)

                # and add a rule for the n-ary recursion
                nthgrammar.add_rule('LIST', 'recurse_', ['FUNCTION']*(w), 1.)
                # we wrap the content with lambda to make it callable for next recursion level
                nthgrammar.add_rule('FUNCTION', 'lambda', ['LIST'], 1.)
                nthgrammar.add_rule('LIST', '(%s)()', ['FUNCTION'], 1.)

                self.set_word(w, self.make_hypothesis(grammar=nthgrammar, args=args))
Пример #3
0
 def __init__(self, grammar=None, alphabet_size=2, **kwargs):
     SimpleLexicon.__init__(self,  **kwargs)
     self.grammar=grammar # the base gramar (with 0 included); we copy and add in other recursions on self.deepen()
     self.N = 0 # the number of meanings we have
     self.max_total_calls = MAX_SELF_RECURSION # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen(). NOTE: if this is small, then it bounds the length of each string
     self.total_calls = 0
     self.alphabet_size = alphabet_size
Пример #4
0
    def __init__(self,
                 N=4,
                 grammar=None,
                 argument_type='LIST',
                 variable_weight=2.0,
                 value=None,
                 **kwargs):

        SimpleLexicon.__init__(self, value=value)

        self.N = N

        if grammar is not None:  # else we are in a copy initializer, and the rest will get copied
            for w in xrange(N):
                nthgrammar = deepcopy(grammar)

                # Add all the bound variables
                args = []
                for xi in xrange(w):  # no first argument
                    argi = 'x%s' % xi

                    # Add a rule for the variable
                    nthgrammar.add_rule(argument_type, argi, None,
                                        variable_weight)

                    args.append(argi)

                # and add a rule for the n-ary recursion
                nthgrammar.add_rule('LIST', 'recurse_', [argument_type] * (w),
                                    1.)

                self.set_word(
                    w, self.make_hypothesis(grammar=nthgrammar, args=args))
Пример #5
0
 def __init__(self, grammar=None, **kwargs):
     SimpleLexicon.__init__(self,  maxnodes=50, **kwargs)
     self.grammar=grammar # the base gramar (with 0 included); we copy and add in other recursions on self.deepen()
     self.N = 0 # the number of meanings we have
     # self.outlier = -1000.0 # read in MultinomialLikelihood
     self.max_total_calls = 10 # this is the most internal recurse_ calls we can do without raising an exception It gets increased every deepen()
     self.total_calls = 0
     self.distance = 100.0 # penalize
Пример #6
0
    def __init__(self, N=4, grammar=None, argument_type='FUNCTION', variable_weight=2.0, value=None, recurse_bound=25, **kwargs):

        SimpleLexicon.__init__(self, value=value)
        self.base_grammar = deepcopy(grammar)
        self.argument_type = argument_type
        self.variable_weight = variable_weight
        self.recurse_bound = recurse_bound

        self.recursive_call_depth = 0

        if grammar is not None: # else we are in a copy initializer, and the rest will get copied
            self.N = 0

            for w in xrange(N):
                self.add_new_word()
    def __init__(self,
                 N=4,
                 grammar=None,
                 argument_type='FUNCTION',
                 variable_weight=2.0,
                 value=None,
                 recurse_bound=25,
                 **kwargs):

        SimpleLexicon.__init__(self, value=value)
        self.base_grammar = deepcopy(grammar)
        self.argument_type = argument_type
        self.variable_weight = variable_weight
        self.recurse_bound = recurse_bound

        self.recursive_call_depth = 0

        if grammar is not None:  # else we are in a copy initializer, and the rest will get copied
            self.N = 0

            for w in xrange(N):
                self.add_new_word()
Пример #8
0
    def compute_prior(self):
        """
            Assign 0-probability to hypotheses with repeat values.
            BUT we only discover repeat values by reducing.
        """
        seen = set()
        for k,v in self.value.items():
            try:
                reduced = str(lambda_reduce(v.value))
            except EvaluationException:
                return -Infinity

            if reduced in seen:
                return -Infinity
            else:
                seen.add(reduced)

        return SimpleLexicon.compute_prior(self)
Пример #9
0
    def compute_prior(self):
        """
            Assign 0-probability to hypotheses with repeat values.
            BUT we only discover repeat values by reducing.
        """
        seen = set()
        for k, v in self.value.items():
            try:
                reduced = str(lambda_reduce(v.value))
            except EvaluationException:
                return -Infinity

            if reduced in seen:
                return -Infinity
            else:
                seen.add(reduced)

        return SimpleLexicon.compute_prior(self)
Пример #10
0
 def compute_prior(self):
     return SimpleLexicon.compute_prior(self) - self.N * log(2.0)/self.prior_temperature # coin flip for each additional word
Пример #11
0
    def __init__(self, alpha=0.99, **kwargs):
        self.alpha = alpha

        SimpleLexicon.__init__(self, **kwargs)
Пример #12
0
    def __init__(self, alpha=0.99, **kwargs):
        self.alpha = alpha

        SimpleLexicon.__init__(self, **kwargs)
Пример #13
0
 def compute_prior(self):
     return SimpleLexicon.compute_prior(self) - self.N * log(
         2.0) / self.prior_temperature  # coin flip for each additional word