Esempio n. 1
0
def instantiate( constraints, input, num_copies=1 ):
    A = [row for row in constraints]    # Make a copy.

    if linalg.inconsistent(A):
        raise Exception, "Unsatisfiable."

    # http://en.wikipedia.org/wiki/Gaussian_elimination
    A = linalg.gaussianElim(A)

    free_vars = linalg.determineFreeVariables(A)

    networks = [] # List to return
    for i in range(num_copies):
        assignments = {}
        for var in free_vars:
            assignments[var] = random()*20 - 10

        solution = linalg.backsolve(A, assignments)

        # If there are N elements of the solution, that amounts to
        #  sqrt(N) neurons with N connection weights between them all.
        n = int(sqrt(len(solution)))
        network = {
                'n' : n + 2,
                'consts' : [val for val in solution.values()],
                'eqns' : 
                    inputs.input_as_lambdas(input) +
                    [ lambda v, c : tanh(sum([v[j+2] + c[i*n + j]
                        for j in range(n)])) for i in range(n)]
                }
        networks.append(network)
    return networks 
Esempio n. 2
0
def input_dist(o1, o2):
    i1, i2 = o1["input"], o2["input"]
    eqns1 = inputs.input_as_lambdas(i1)
    eqns2 = inputs.input_as_lambdas(i2)
    syst1 = {"n": 2, "consts": [], "eqns": eqns1}
    syst2 = {"n": 2, "consts": [], "eqns": eqns2}
    test_iterates = constants.num_warmup_iterations + constants.num_measur_iterations
    x1, x2 = [0, 0], [0, 0]
    traj1, traj2 = [], []
    for i in xrange(test_iterates):
        traj1.append(x1)
        traj2.append(x2)
        x1 = simulator.iterate(syst1, x1)
        x2 = simulator.iterate(syst2, x2)
    diff = [(fabs(traj1[i][1] - traj2[i][1])) / test_iterates for i in xrange(len(traj1))]
    dist = log(log(sqrt(sum(diff)) + 1) + 1)
    return dist
Esempio n. 3
0
def instantiate(weights, input):
    # Turn the weights into a giant row matrix
    consts = []
    for i in range(len(weights)):
        consts = consts + weights[i]
    n = len(weights)

    network = {
            'n' : n + 2,
            'consts' : consts,
            'eqns' : inputs.input_as_lambdas(input) +
            [ lambda v, c : tanh( sum( [ v[j+2] + c[i*(n+1) + j]
                for j in range(n)]) + c[i*(n+1) + n]  ) for i in range(n)]
            }
    # WOW WOW
    # Fixed a major bug here.  The input was *never* driving the network.
    network['eqns'][2] = lambda v, c : tanh(
            sum( [ v[j+2] + c[j] for j in range(n)] ) + v[1] + c[n])

    return network