Ejemplo n.º 1
0
    "Which model to run on (Number, Galileo, RationalRules, SimpleMagnetism)")
parser.add_option("--print-every",
                  dest="PRINTEVERY",
                  type="int",
                  default=1000,
                  help="Evaluation prints every this many")
options, _ = parser.parse_args()

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Define the test model
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

if options.MODEL == "Number100":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar, make_h0
    data = synchronize_variable(lambda: generate_data(100))

elif options.MODEL == "Number300":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar, make_h0
    data = synchronize_variable(lambda: generate_data(300))

elif options.MODEL == "Number1000":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar, make_h0
    data = synchronize_variable(lambda: generate_data(1000))

elif options.MODEL == "Galileo":
    from LOTlib.Examples.SymbolicRegression.Galileo import data, grammar, make_h0
elif options.MODEL == "RationalRules":
    from LOTlib.Examples.RationalRules.Shared import grammar, data, make_h0
Ejemplo n.º 2
0
                if not t.is_terminal():
                    t.resample_p = 0.0
                else:
                    t.resample_p = 1.0
        
        # initialize each chain
        MultipleChainMCMC.__init__(self, lambda: None, data, steps=steps, nchains=len(partitions), **kwargs)
        
        # And set each to the partition
        for c,p in zip(self.chains, partitions):
            c.set_state(make_h0(value=p))
        
        # and store these
        self.partitions = map(copy, partitions)


if __name__ == "__main__":
    
    from LOTlib.Examples.Number.Shared import grammar, make_h0, generate_data
    data = generate_data(300)
    
    #from LOTlib.Examples.RegularExpression.Shared import grammar, make_h0, data
        
    #from LOTlib.Examples.RationalRules.Shared import grammar, data, make_h0
    
    #PartitionMCMC(grammar, make_h0, data, 2, skip=0)
    for h in PartitionMCMC(grammar, make_h0, data, max_N=100, skip=0):
        print h.posterior_score, h
        break
    
 
Ejemplo n.º 3
0
        MHSampler.__init__(self, h0, data, **kwargs)
        self.penalty=penalty

        self.seen = Counter()

    def internal_sample(self, h):
        """
                Keep track of how many samples we've drawn for h
        """
        self.seen[h] += 1

    def compute_posterior(self, h, data):
        """
                Wrap the posterior with a penalty for how often we've seen h. Computes the penalty on the prior
        """
        mypenalty = self.seen[h] * self.penalty
        np, nl = MHSampler.compute_posterior(self, h, data)
        return np+mypenalty, nl


if __name__ == "__main__":

    from LOTlib.Examples.Number.Shared import generate_data, NumberExpression, grammar, get_knower_pattern
    from LOTlib.Miscellaneous import q

    data = generate_data(500)
    h0 = NumberExpression(grammar)
    for h in TabooMCMC(h0, data, steps=10000):

        print q(get_knower_pattern(h)), h.posterior_score, h.prior, h.likelihood, q(h)
Ejemplo n.º 4
0
parser.add_option("--out", dest="OUT", type="string", help="Output prefix", default="output/proposal")
parser.add_option("--samples", dest="SAMPLES", type="int", default=100000, help="Number of samples to run")
parser.add_option("--chains", dest="CHAINS", type="int", default=10, help="Number of chains to run in parallel")
parser.add_option("--repetitions", dest="REPETITONS", type="int", default=100, help="Number of repetitions to run")
parser.add_option("--model", dest="MODEL", type="str", default="Number", help="Which model to run on (Number, Galileo, RationalRules, SimpleMagnetism)")
parser.add_option("--print-every", dest="PRINTEVERY", type="int", default=1000, help="Evaluation prints every this many")
options, _ = parser.parse_args()

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Define the test model
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

if options.MODEL == "Number100":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar,  make_h0
    data = synchronize_variable( lambda : generate_data(100)  )

elif options.MODEL == "Number300":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar,  make_h0
    data = synchronize_variable( lambda : generate_data(300)  )
    
elif options.MODEL == "Number1000":
    # Load the data
    from LOTlib.Examples.Number.Shared import generate_data, grammar,  make_h0
    data = synchronize_variable( lambda : generate_data(1000)  )

elif options.MODEL == "Galileo":
	from LOTlib.Examples.SymbolicRegression.Galileo import data, grammar, make_h0	
elif options.MODEL == "RationalRules":
	from LOTlib.Examples.RationalRules.Shared import grammar, data, make_h0
Ejemplo n.º 5
0
        # initialize each chain
        MultipleChainMCMC.__init__(self,
                                   lambda: None,
                                   data,
                                   steps=steps,
                                   nchains=len(partitions),
                                   **kwargs)

        # And set each to the partition
        for c, p in zip(self.chains, partitions):
            c.set_state(make_h0(value=p))

        # and store these
        self.partitions = map(copy, partitions)


if __name__ == "__main__":

    from LOTlib.Examples.Number.Shared import grammar, make_h0, generate_data
    data = generate_data(300)

    #from LOTlib.Examples.RegularExpression.Shared import grammar, make_h0, data

    #from LOTlib.Examples.RationalRules.Shared import grammar, data, make_h0

    #PartitionMCMC(grammar, make_h0, data, 2, skip=0)
    for h in PartitionMCMC(grammar, make_h0, data, max_N=100, skip=0):
        print h.posterior_score, h
        break
Ejemplo n.º 6
0
class MemoizedMHSampler(MHSampler):
    """
        Same as MHSampler, but the values of compute_posterior are cached via LRUCache
    """
    def __init__(self, h0,  data, memoize=Infinity, **kwargs):
        MHSampler.__init__(self, h0, data, **kwargs)

        # self.mem stores return of compute_posterior
        self.mem = LRUCache(maxsize=memoize)

    def compute_posterior(self, h, data):
        if h in self.mem:
            ret = self.mem[h]
            h.posterior_score = sum(ret) # set this because it may not be set
            return ret
        else:
            ret = MHSampler.compute_posterior(self, h, data)
            self.mem[h] = ret
            return ret

if __name__ == "__main__":

    from LOTlib.Examples.Number.Shared import generate_data, NumberExpression, grammar, get_knower_pattern

    data = generate_data(100)
    h0 = NumberExpression(grammar)
    sampler = MemoizedMHSampler(h0, data, steps=1000)
    for h in sampler:
        pass #print q(get_knower_pattern(h)), h.posterior_score, h.prior, h.likelihood, q(h), sampler.acceptance_count, sampler.acceptance_ratio()
Ejemplo n.º 7
0
        self.penalty = penalty

        self.seen = Counter()

    def internal_sample(self, h):
        """
                Keep track of how many samples we've drawn for h
        """
        self.seen[h] += 1

    def compute_posterior(self, h, data):
        """
                Wrap the posterior with a penalty for how often we've seen h. Computes the penalty on the prior
        """
        mypenalty = self.seen[h] * self.penalty
        np, nl = MHSampler.compute_posterior(self, h, data)
        return np + mypenalty, nl


if __name__ == "__main__":

    from LOTlib.Examples.Number.Shared import generate_data, NumberExpression, grammar, get_knower_pattern
    from LOTlib.Miscellaneous import q

    data = generate_data(500)
    h0 = NumberExpression(grammar)
    for h in TabooMCMC(h0, data, steps=10000):

        print q(get_knower_pattern(
            h)), h.posterior_score, h.prior, h.likelihood, q(h)