def s(time, position, u_last, delta_t, delta_x, j):
    s_step = np.zeros(2)
    s_step[:] = func.q_in(time) * func.phi(position), (1 / c.TAU) * (
        (c.V0 * (1 - u_last[j, 0] / c.RHO_MAX)) /
        (1 + c.E * (u_last[j, 0] / c.RHO_MAX)**4) -
        u_last[j, 1]) + c.MY * (u_last[j + 1, 1] - 2 * u_last[j, 1] +
                                u_last[j - 1, 1]) / (u_last[j, 0] * delta_x**2)
    return s_step
    def cLearning(self, n_train, train_data, gamma_pos, gamma_neg):
        """ Function that learns positive and negative conceptors on data with the following steps:
        :Description: Function that learns positive and negative conceptors on data with the following steps
            1. create Reservoir
            2. Feed each sample of each syllable in reservoir and collect its states
            3. Use states to compute positive conceptor
            4. Use Conceptor logic to compute negative conceptor
        
        :Parameters:
            1. n_train:     number of training samples used for each syllable
            2. train_data:  training data to be used (list with entries for each syllable) 
            3. gamma_pos:   aperture of the positive conceptors (default = 25)
            4. gamma_neg:   aperture of the negative conceptors (default = 27)
        """
        self.c_pos = []

        # loop over syllables
        for syllable in np.array(train_data):
            R_syll = np.zeros((syllable.shape[1] * (self.size + syllable.shape[2]), syllable.shape[0]))
            
            # feed each sample of syllable into reservoir and collect resulting states
            for i, sample in enumerate(syllable):
                self.res.run([sample], t_learn=len(sample), t_wash=0, load=False)
                states = np.concatenate((np.squeeze(self.res.TrainArgs.T), sample), axis=1)
                R_syll[:, i] = np.reshape(states, states.shape[0] * states.shape[1])
            
            # calculate preliminary conceptor for syllable 
            R = np.dot(R_syll, R_syll.T) / n_train
            C_tmp = np.dot(R, np.linalg.inv(R + np.eye(len(R))))
            self.c_pos.append(C_tmp)

        self.c_neg =[]

        # calculate preliminary negative conceptor for each positive conceptor
        for i in range(len(self.c_pos)):
            C = np.zeros_like(self.c_pos[0])
            for j in list(range(0, i)) + list(range(i + 1, len(self.c_pos))):
                C = fct.OR(C, self.c_pos[j])
            self.c_neg.append(C)
        
        # calculate final conceptors from preliminary ones using respective apertures
        for i in range(len(self.c_pos)):
            self.c_pos[i] = fct.phi(self.c_pos[i], gamma_pos)
            self.c_neg[i] = fct.phi(self.c_neg[i], gamma_neg)
Beispiel #3
0
def plot_ridge(alpha):
    x_plot = np.linspace(0, 1, 100)
    plt.plot(X, Y, 'o', linewidth=2)
    y_true = fun.q2(x_plot)
    plt.plot(x_plot,
             y_true,
             '--',
             color='purple',
             linewidth=2,
             label='true function')
    for i in [1, 2, 5, 10]:
        w = fun.ridge(X, Y, i, alpha)
        y_plot = np.dot(fun.phi(x_plot, i), w)
        plt.plot(x_plot, y_plot, label='M = ' + str(i), linewidth=1.5)
    plt.legend(loc=1, fontsize=10)
    plt.xlabel('x')
    plt.xticks(np.linspace(0, 1, 5))
    plt.ylabel('y')
    plt.title('lambda = ' + str(alpha))
Beispiel #4
0
def resilience(n):
    return phi(n) / (n - 1)
Beispiel #5
0
from functions import phi, prime_sieve

check = 15499 / 94744
"""
for i in range(2,94744):
    if i%1000 == 0:
        print i/1000, "%"
    if 1.0*phi(i)/(i-1) < check:
        print(i)
        break
"""
primes = prime_sieve(50000, output=[])

val = 1
for p in range(2, 50000):
    if val > 1000000:
        print "Exceeded limit"
        break
    #print(val)
    val *= p
    if 1.0 * phi(val) / (val - 1) < 0.4:
        print(val)
        break
Beispiel #6
0
    fun.graph_reg(X, Y, i, w, f=fun.q2)

#Plot 1

x_plot = np.linspace(0, 1, 100)
plt.plot(X, Y, 'o', linewidth=2)
y_true = fun.q2(x_plot)
plt.plot(x_plot,
         y_true,
         '--',
         color='purple',
         linewidth=2,
         label='true function')
for i in [1, 2, 5, 10]:
    w = fun.ml_weight(X, Y, i)
    y_plot = np.dot(fun.phi(x_plot, i), w)
    plt.plot(x_plot, y_plot, label='M = ' + str(i), linewidth=1.5)
plt.legend(loc=1, fontsize=10)
plt.xlabel('x')
plt.ylabel('y')

#2.2

#Check gradient

fun.check_grad(X, Y, M=11, t=0)

#Plot error

SSE_s = np.zeros(11)
for i in range(11):