def task31():
    training, testing, _ = generate_data_task31(lambda x: square(np.sin(x)), 0)
    #training, testing = generate_data_task31(lambda x:square(np.sin(x)), 0.1)
    rbf_nodes = get_radial_coordinates(0)
    centroids = Fixed(rbf_nodes['nodes'])
    sigma = 1.0
    RadialBasisNetwork = Network(X=training['X'],
                                 Y=training['Y'],
                                 sigma=1.0,
                                 hidden_nodes=rbf_nodes['N'],
                                 centroids=Fixed(rbf_nodes['nodes']),
                                 initializer=RandomNormal())

    RadialBasisNetwork.train(epochs=1,
                             optimizer=LeastSquares(),
                             epoch_shuffle=True)

    prediction, residual_error = RadialBasisNetwork.predict(
        testing['X'], testing['Y'])

    print('residual_error', residual_error)
    plt.plot(testing['X'], testing['Y'], label='True')
    plt.plot(testing['X'],
             np.where(prediction >= 0, 1, -1),
             label='Prediction')
    plt.ylabel('sign(sin(2x))')
    plt.xlabel('x')
    plt.scatter(rbf_nodes['nodes'], np.zeros(rbf_nodes['nodes'].size))
    plt.legend()
    plot_centroids_1d(centroids, sigma)
    plt.show()
Esempio n. 2
0
def get_leaders_and_outermosts():
    guys = Reader.read_lines("datasets/enron/enron_guys.txt")
    guys = {int(nid) : email for nid, email in guys}

    global_leaders, global_outermosts = defaultdict(list), defaultdict(list)
    for month in xrange(1, 13):
        n = Network('datasets/enron/timeslots/{:02d}-filtered2.edges'.format(month),
                    is_directed=False,
                    use_communities=True)
        communities = set(flatten_list(n.communities.values()))
        month_leaders, month_outermosts = set(), set()
        for c in communities:
            n.filter_community([c])
            leaders, outermosts = RoleMining(n).find_roles()
            [month_leaders.add(leader) for leader in leaders]
            [month_outermosts.add(outer) for outer in outermosts]
            n.unfilter_graph()

        for l, c in month_leaders:
            global_leaders[l].append((month,c))
        for o, c in month_outermosts:
            global_outermosts[o].append((month, c))

    print
    for id, months in global_outermosts.items():
        if guys[id] in the_losers:
            print guys[id], months
def task32():
    training, testing, testing_clean = generate_data_task31(
        lambda x: np.sin(x), 0.1)

    sigma = [0.1, 0.3, 0.5, 1.0, 1.3]
    tests = [1, 2, 3, 4]  # weak, tighter, random

    for t in tests:
        rbf_nodes = get_radial_coordinates(t)
        centroids = Fixed(rbf_nodes['nodes'])
        for sig in sigma:

            RadialBasisNetwork = Network(X=training['X'],
                                         Y=training['Y'],
                                         sigma=sig,
                                         hidden_nodes=rbf_nodes['N'],
                                         centroids=centroids,
                                         initializer=RandomNormal(std=0.1))

            data = RadialBasisNetwork.train(
                epochs=10000,
                epoch_shuffle=True,
                #optimizer=LeastSquares())
                optimizer=DeltaRule(eta=0.001))

            prediction_noisy, residual_error_noisy = RadialBasisNetwork.predict(
                testing['X'], testing['Y'])
            prediction_clean, residual_error_clean = RadialBasisNetwork.predict(
                testing_clean['X'], testing_clean['Y'])

            print('residual error', residual_error_clean)
            print('residual error (noisy)', residual_error_noisy)
            plt.clf()
            plt.plot(testing['X'], testing['Y'], label='True')
            plt.plot(testing['X'],
                     prediction_noisy,
                     label='Prediction (noise)')
            #plt.plot(testing_clean['X'], prediction_clean, label='Prediction (no noise)')
            #plt.ylabel('sign(sin(2x))')
            plt.ylabel('sin(2x)')
            plt.xlabel('x')
            plt.scatter(rbf_nodes['nodes'], np.zeros(rbf_nodes['nodes'].size))
            plt.legend(loc='upper right')
            plot_centroids_1d(centroids, sig)

            path = './figures/task3.2/sin(2x)_sig=' + str(
                sig) + '_set=' + rbf_nodes['name']
            plt.savefig(path + '.png')
            print(data['config'])
            save_data(
                data['config'] + '\n\nresidual error (noisy)=' +
                str(residual_error_noisy) + '\nresidual error clean=' +
                str(residual_error_clean), path + '.txt')
Esempio n. 4
0
 def __init__(self):
     self.words = []
     self.labels = []
     self.docs_x = []
     self.docs_y = []
     self.stemmer = LancasterStemmer()
     self.data = []
     self.training = []
     self.output = []
     self.out_empty=[]
     self.model=[]
     self.count=-1
     self.say=""
     self.Network=Network()
Esempio n. 5
0
 def test_longest_route(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     longest_route = "Longest Route: SYD - LAX \nDistance: 12051"
     self.assertEqual(n.get_longest_route(), longest_route)
Esempio n. 6
0
 def test_avg_city(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     avg_city = "Average City Population: 11796143"
     self.assertEqual(n.get_average_size(), avg_city)
Esempio n. 7
0
 def test_avg_route(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     avg_route = "Average Route Distance: 2300"
     self.assertEqual(n.get_average_route(), avg_route)
Esempio n. 8
0
 def test_shortest_route(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     shortest_route = "Shortest Route: WAS - NYC \nDistance: 334"
     self.assertEqual(n.get_shortest_route(), shortest_route)
Esempio n. 9
0
 def test_biggest_city(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     small_city = "Smallest Population: Essen - 589900 people"
     self.assertEqual(n.get_smallest_city(), small_city)
Esempio n. 10
0
 def test_city_hub(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     metro_hub = "Number of Routes : 6\n" \
                 "Istanbul\n" \
                 "Hong Kong"
     self.assertEqual(n.get_city_hubs(), metro_hub)
Esempio n. 11
0
 def test_url(self):
     n = Network()
     json_file = 'C:\Users\ShravanB\Documents\College\CS242\CSAir\data\CSAir.json'
     f = open(json_file, 'r')
     data = json.loads(f.read())
     n.create_metros(data['metros'])
     n.create_routes(data['routes'])
     url = "http://www.gcmap.com/mapui?P=SCL-LIM,+LIM-MEX,+LIM-BOG," \
           "+MEX-LAX,+MEX-CHI,+MEX-MIA,+MEX-BOG,+BOG-MIA,+BOG-SAO,+BOG-BUE," \
           "+BUE-SAO,+SAO-MAD,+SAO-LOS,+LOS-KRT,+LOS-FIH,+FIH-KRT,+FIH-JNB," \
           "+JNB-KRT,+KRT-CAI,+CAI-ALG,+CAI-IST,+CAI-BGW,+CAI-RUH,+ALG-MAD," \
           "+ALG-PAR,+ALG-IST,+MAD-NYC,+MAD-LON,+MAD-PAR,+LON-NYC,+LON-ESS," \
           "+LON-PAR,+PAR-ESS,+PAR-MIL,+MIL-ESS,+MIL-IST,+ESS-LED,+LED-MOW," \
           "+LED-IST,+MOW-THR,+MOW-IST,+IST-BGW,+BGW-THR,+BGW-KHI,+BGW-RUH," \
           "+THR-DEL,+THR-KHI,+THR-RUH,+RUH-KHI,+KHI-DEL,+KHI-BOM,+DEL-CCU," \
           "+DEL-MAA,+DEL-BOM,+BOM-MAA,+MAA-CCU,+MAA-BKK,+MAA-JKT,+CCU-HKG," \
           "+CCU-BKK,+BKK-HKG,+BKK-SGN,+BKK-JKT,+HKG-SHA,+HKG-TPE,+HKG-MNL," \
           "+HKG-SGN,+SHA-PEK,+SHA-ICN,+SHA-TYO,+SHA-TPE,+PEK-ICN,+ICN-TYO," \
           "+TYO-SFO,+TYO-OSA,+OSA-TPE,+TPE-MNL,+MNL-SFO,+MNL-SYD,+MNL-SGN," \
           "+SGN-JKT,+JKT-SYD,+SYD-LAX,+LAX-SFO,+LAX-CHI,+SFO-CHI,+CHI-YYZ," \
           "+CHI-ATL,+ATL-WAS,+ATL-MIA,+MIA-WAS,+WAS-YYZ,+WAS-NYC,+NYC-YYZ,+"
     self.assertEqual(n.get_url(), url)
Esempio n. 12
0
        newLine = []
        for i in range(0, len(tokens)):
            newLine.append(float(tokens[i]))
        learnData.append(newLine)

    for line in lines[84:]:
        tokens = line.split(',')
        newLine = []
        for i in range(0, len(tokens)):
            newLine.append(float(tokens[i]))
        testData.append(newLine)

    return (learnData, testData)

if __name__ == '__main__':
    '''
    P1.Build a system that approximates the quality of a concrete mixture
    based on the used ingredients.
    Input variables: 7
    Output variables: 3
    '''
    learnData, testData = getData("C:\\_MyFiles\\_FMI\\Workspace\\AI\\Lab5\\files\\data.data")

    net = Network(7, 3, [4])

    net.learn(LEARN_RATE, learnData)
    err = net.run(testData)
    err[0] = int(err[0])
    err[1] = int(err[1])
    err[2] = int(err[2])
    print('\nSLUMP, FLOW, 28-day Compressive Strength = ' + str(err))
Esempio n. 13
0
            if e.buttons() == Qt.LeftButton:  
                self.move(self.pos() + e.globalPos() - self.clickPosition)
                self.clickPosition = e.globalPos()
                e.accept()
        #Change this when turning this to exe
        def Finish():
            system('taskkill /f /im java.exe')
            QCoreApplication.instance().quit()
        self.ui.frame.mouseMoveEvent = moveWindow
        self.ui.pushButton_2.clicked.connect(Finish)
        self.ui.textEdit.setText("Start talking with the bot")
        self.ui.pushButton.clicked.connect(lambda: Clicked(bot))



    def mousePressEvent(self, event):
        self.clickPosition = event.globalPos()


if __name__ == "__main__":
    Net= Network()
    Network.RunServer()
    Talk.delete()
    bot = Bot()
    bot.read()
    bot.stem()
    bot.modelsetup()
    bot.setup()
    app = QApplication(sys.argv)
    window = Winner(bot,Net)
    sys.exit(app.exec_())
def task333():
    training, testing = ballistic_data()
    #plt.scatter(testing['X'][:,0], testing['Y'][:,0], label='col1')
    #plt.scatter(testing['X'][:,1], testing['Y'][:,1], label='col2')
    #plt.legend()
    #plt.show()
    N_hidden_nodes = 10
    sigma = 0.1
    eta = 0.1
    eta_hidden = 0.02

    centroids = LeakyCL(matrix=np.empty(
        (training['X'].shape[1], N_hidden_nodes)),
                        space=[-0.1, 0.9],
                        eta=eta_hidden)

    RadialBasisNetwork = Network(X=training['X'],
                                 Y=training['Y'],
                                 sigma=sigma,
                                 hidden_nodes=N_hidden_nodes,
                                 centroids=centroids,
                                 initializer=RandomNormal(std=0.1))

    data = RadialBasisNetwork.train(
        epochs=2000,
        epoch_shuffle=True,
        #optimizer=LeastSquares())
        optimizer=DeltaRule(eta=eta))

    prediction, residual_error = RadialBasisNetwork.predict(
        testing['X'], testing['Y'])
    print('residual error:', residual_error)

    path = './figures/task3.3/ballist_N=' + str(
        N_hidden_nodes) + '_eta=' + str(eta) + '_sigma=' + str(sigma)

    padding = 0.1
    min = prediction[:, 0].min() - padding if prediction[:, 0].min(
    ) < testing['Y'][:, 0].min() else testing['Y'][:, 0].min() - padding
    max = prediction[:, 0].max() + padding if prediction[:, 0].max(
    ) > testing['Y'][:, 0].max() else testing['Y'][:, 0].max() + padding
    plt.clf()
    plt.axis([min, max, min, max])
    plt.plot([min, max], [min, max], '--k', linewidth=1, dashes=(5, 10))
    plt.scatter(prediction[:, 0], testing['Y'][:, 0], marker='x')
    plt.title('Angle/Distance')
    plt.ylabel('True')
    plt.xlabel('Predicted')
    plt.savefig(path + '_angledistance.png')

    padding = 0.1
    min = prediction[:, 1].min() - padding if prediction[:, 1].min(
    ) < testing['Y'][:, 1].min() else testing['Y'][:, 1].min() - padding
    max = prediction[:, 1].max() + padding if prediction[:, 1].max(
    ) > testing['Y'][:, 1].max() else testing['Y'][:, 1].max() + padding
    plt.clf()
    plt.axis([min, max, min, max])
    plt.plot([min, max], [min, max], '--k', linewidth=1, dashes=(5, 10))
    plt.scatter(prediction[:, 1], testing['Y'][:, 1], marker='x')
    plt.title('Velocity/Height')
    plt.ylabel('True')
    plt.xlabel('Predicted')
    plt.savefig(path + '_velocityheight.png')

    print(data['config'])
    save_data(data['config'] + '\n\nresidual error=' + str(residual_error),
              path + '.txt')

    plt.clf()
    plt.plot(np.arange(0, len(data['t_loss'])),
             data['t_loss'],
             label='training loss')
    plt.xlabel('Epochs')
    plt.ylabel('Total approximation error')
    plt.legend(loc='upper right')
    plt.savefig(path + '_learning.png')
def task33():
    N_units = [{
        'N': 8,
        'name': 'Tight',
        'sigma': 0.5
    }, {
        'N': 12,
        'name': 'task31',
        'sigma': 1.0
    }]

    noise = [0, 0.1]

    for std in noise:
        training, testing, testing_clean = generate_data_task31(
            lambda x: np.sin(x), std)
        for hidden_layer in N_units:
            #centroids = VanillaCL(np.empty((training['X'].shape[1], hidden_layer['N'])), space=[0, 2*np.pi], eta=0.001)
            centroids = LeakyCL(np.empty(
                (training['X'].shape[1], hidden_layer['N'])),
                                space=[0, 2 * np.pi],
                                eta=0.001)

            RadialBasisNetwork = Network(X=training['X'],
                                         Y=training['Y'],
                                         sigma=hidden_layer['sigma'],
                                         hidden_nodes=hidden_layer['N'],
                                         centroids=centroids,
                                         initializer=RandomNormal(std=0.1))

            data = RadialBasisNetwork.train(
                epochs=10000,
                epoch_shuffle=True,
                #optimizer=LeastSquares())
                optimizer=DeltaRule(eta=0.001))

            prediction_noisy, residual_error_noisy = RadialBasisNetwork.predict(
                testing['X'], testing['Y'])
            prediction_clean, residual_error_clean = RadialBasisNetwork.predict(
                testing_clean['X'], testing_clean['Y'])

            print('residual error', residual_error_clean)
            print('residual error (noisy)', residual_error_noisy)

            plt.clf()
            plt.plot(testing['X'], testing['Y'], label='True')
            plt.plot(testing['X'],
                     prediction_noisy,
                     label='Prediction (noise)')
            #plt.plot(testing_clean['X'], prediction_clean, label='Prediction (no noise)')
            #plt.ylabel('sign(sin(2x))')
            plt.ylabel('sin(2x)')
            plt.xlabel('x')
            plt.scatter(centroids.get_matrix(),
                        np.zeros(hidden_layer['N']).reshape(-1, 1).T)
            plt.legend(loc='upper right')
            plot_centroids_1d(centroids, hidden_layer['sigma'])
            #plt.show()

            path = './figures/task3.3/sin(2x)_sigma=' + str(
                hidden_layer['sigma']
            ) + '_set=' + hidden_layer['name'] + '_noise=' + str(std)
            plt.savefig(path + '.png')
            print(data['config'])
            save_data(
                data['config'] + '\n\nresidual error (noisy)=' +
                str(residual_error_noisy) + '\nresidual error clean=' +
                str(residual_error_clean), path + '.txt')

            plt.clf()
            plt.plot(np.arange(0, len(data['t_loss'])),
                     data['t_loss'],
                     label='training loss')
            plt.xlabel('Epochs')
            plt.ylabel('Total approximation error')
            plt.legend(loc='upper right')
            plt.savefig(path + '_learning.png')
Esempio n. 16
0
 def test_mediators(self):
     n = Network("datasets/test/test_cbc.edges", is_directed=False,
                 communities_file="datasets/test/test_cbc.communities")
     n.print_communities()
     n.get_mediators_NBC()
import numpy as np

#TODO: put the network to "the edge of chaos"
N = 30  # number of neurons
dt = 0.1  # granularity of time
T = 12000  # ms to run the network for
T_steps = int(T / dt)  # time steps to run the network for
variables_to_monitor = []  #['v']

neurons = [IzhikevichNeuron() for i in range(N)]
enf_nrns = [0, 1, 2, N - 1]  # the list of indices of the enforced neurons
synapses = connect_reservoir(neurons,
                             inps=enf_nrns[:3],
                             outs=enf_nrns[3:],
                             prob=0.3)
net = Network(dt, neurons, synapses, variables_to_monitor)

# generate template input and output spike trains which repeats every T/10 timesteps
ist1 = PoissonSpikeTrain(T / 10).generate(11)  # in Hz
ist2 = PoissonSpikeTrain(T / 10).generate(15)
ist3 = PoissonSpikeTrain(T / 10).generate(13)
ost = PoissonSpikeTrain(T / 10).generate(14)
input_spike_train1 = np.hstack([ist1 + i * (T / 10) for i in range(20)])
input_spike_train2 = np.hstack([ist2 + i * (T / 10) for i in range(20)])
input_spike_train3 = np.hstack([ist3 + i * (T / 10) for i in range(20)])
output_spike_train = np.hstack([ost + i * (T / 10) for i in range(20)])

# fix the evolution of specified neurons and force them to spike at with the specified patterns
net.enforce_neurons(enf_nrns, [
    input_spike_train1, input_spike_train2, input_spike_train3,
    output_spike_train
Esempio n. 18
0
class Bot:
    def __init__(self):
        self.words = []
        self.labels = []
        self.docs_x = []
        self.docs_y = []
        self.stemmer = LancasterStemmer()
        self.data = []
        self.training = []
        self.output = []
        self.out_empty=[]
        self.model=[]
        self.count=-1
        self.say=""
        self.Network=Network()

    def read(self):
        with open("src/models/intents.json") as f:
            self.data=load(f)
    def dump(self):
        with open("src/models/data.pickle", "wb") as f:
            dump((self.words, self.labels, self.training, self.output), f)
    def stem(self):
        for intent in self.data["intents"]:
            for pattern in intent["patterns"]:
                wrds = word_tokenize(pattern)
                self.words.extend(wrds)
                self.docs_x.append(wrds)
                self.docs_y.append(intent["tag"])

            if intent["tag"] not in self.labels:
                self.labels.append(intent["tag"])

        self.words = [self.stemmer.stem(w.lower()) for w in self.words if w != "?"]
        self.words = sorted(list(set(self.words)))
        self.labels = sorted(self.labels)
    def modelsetup(self):
        self.out_empty = [0 for _ in range(len(self.labels))]

        for x, doc in enumerate(self.docs_x):
            bag = []

            wrds = [self.stemmer.stem(w.lower()) for w in doc]

            for w in self.words:
                if w in wrds:
                    bag.append(1)
                else:
                    bag.append(0)

            output_row = self.out_empty[:]
            output_row[self.labels.index(self.docs_y[x])] = 1
            self.training.append(bag)
            self.output.append(output_row)

        self.training = array(self.training)
        self.output = array(self.output)
        self.dump()

    def setup(self):
        ops.reset_default_graph()
        net = input_data(shape=[None, len(self.training[0])])
        net = fully_connected(net, 10)
        net = fully_connected(net, 10)
        net = fully_connected(net, len(self.output[0]), activation="softmax")
        net = regression(net)
        self.model = DNN(net)
        if exists("src/models/model.tflearn.index"):
            self.model.load("src/models/model.tflearn")
        else:
            self.model.fit(self.training, self.output, n_epoch=1000, batch_size=8, show_metric=True)
            self.model.save("src/models/model.tflearn")
    def indexWord(self,x,word):
        x=x.split(" ")
        ch=""
        for i in x:
            if i.find(word)!=-1:
                ch=i
        return ch
    def bag_of_words(self,s, words):
        bag = [0 for _ in range(len(words))]
        translate=[]
        s_words = word_tokenize(s)
        s_words = [self.stemmer.stem(word.lower()) for word in s_words]

        for se in s_words:
            for i, w in enumerate(words):
                if w == se:
                    bag[i] = 1
                if se not in words and se not in translate:
                    translate.append(se)

        return array(bag),translate
    def chat(self,x,ui):
        try:
            self.count+=1
            predinp,translate=self.bag_of_words(x, self.words)
            if translate:
                translate=self.indexWord(str(x),translate[0])
                print(translate)
            results = self.model.predict([predinp])
            results_index = argmax(results)
            tag = self.labels[results_index]
        except Exception as e:
            print(e)
        try:
            if results[0][results_index] > 0.4:
                for tg in self.data["intents"]:
                    if tg['tag'] == tag:
                        responses = tg['responses']
                self.say=choice(responses)
                if self.say=="Looking up":
                    self.say=self.Network.Connect(translate.upper())
                    ui.textEdit.setText(self.say)
                else:
                    ui.textEdit.setText(self.say)
            else:
                self.say="Sorry i can't understand i am still learning try again."
                ui.textEdit.setText(self.say)
        except Exception as e:
            print(e)
Esempio n. 19
0
#!/usr/bin/env python3

from src.TextCorpus import TextCorpus
from src.Text import Text
from src.Network import Network
from src.rule_set import english
import os

current_dir = os.path.dirname(os.path.abspath(__file__))
text_filename = os.path.join(
    current_dir,
    'datasets/bbc-news/news/tech/call-for-action-on-internet-scam.txt')

print('Analyzing ' + text_filename)
corpus = TextCorpus(os.path.join(current_dir, 'datasets/bbc-news.json'))
text = Text(corpus, open(text_filename, mode='r', encoding='utf-8').read())
print('Text of ' + str(len(text.terms)) + ' terms with average score of ' +
      str(text.avg_score))

net = Network(english)
net.merge(text)
net.save_graph('output/test')
net.log_to_html()
print('Graph saved to test-edges.xlsx!')
Esempio n. 20
0
# ___________________________________________________________NETWORK INIT
# Variables initialization
# ---------------------------------------------------
epoch = 1500  # setting training iterations
learning_rate = 0.3  # setting learning rate
layer1neurons = 8  # number of hidden layers neurons
layer2neurons = 8  # number of hidden layers neurons
layer3neurons = 8  # number of hidden layers neurons

inputlayer_neurons = features_array_n.shape[
    1]  # number of features in data set
# ---------------------------------------------------

# create the network with inputs, and desired outputs
nn = Network(features_array_n, target_array, layer1neurons, layer2neurons,
             layer3neurons)

# iterate epoch numbers of times
for i in range(epoch):
    nn.feedforward()  # run feedforward
    nn.backpropagate(learning_rate)  # run feedback
    nn.mse(nn.output, target_lst)  # compute mse

    print(str(i) + "/" + str(epoch))  # output which epoch the computer is at

# print("output")
# print(nn.output)
# ___________________________________________________________RESULT PROCESSING

print(nn.mse_lst)
Esempio n. 21
0
def run_cegar_guy_way(nnet_reader_object, which_acas_output,
                      number_of_layers_to_transfer_into_cegar_layer):
    # merge all arnodes in all the layers the user wishes to apply cegar too
    network = Network(nnet_reader_object, which_acas_output)

    network.preprocess_more_layers(
        number_of_layers_to_transfer_into_cegar_layer + 1)
    network.forward_activate_more_layers(
        number_of_layers_to_transfer_into_cegar_layer + 1)
    network.fully_activate_more_layers(
        number_of_layers_to_transfer_into_cegar_layer)

    while True:
        try:
            layer_number, table_number, list_of_keys_of_arnodes_to_merge = network.decide_best_arnodes_to_merge(
            )
            network.merge_list_of_arnodes(layer_number, table_number,
                                          list_of_keys_of_arnodes_to_merge)
        except:
            break

    while True:
        result = network.check_if_network_is_sat_or_unsat()
        if result == network.CODE_FOR_SAT:
            print('SAT')
            break
        elif result == network.CODE_FOR_UNSAT:
            print('UNSAT')
            break

        # we have a spurious counter example
        layer_number, table_number, key_in_table, partition_of_arnode_inner_nodes = network.decide_best_arnodes_to_merge(
        )
        network.split_arnode(layer_number, table_number, key_in_table,
                             partition_of_arnode_inner_nodes)
Esempio n. 22
0
def run_cegar_naive(nnet_reader_object, which_acas_output):
    """
    naive algorithm
    abstracts all the way and only then starts to refine back
    """
    network = Network(nnet_reader_object, which_acas_output)
    network.fully_activate_the_entire_network()

    # merge all arnodes
    while True:
        try:
            layer_number, table_number, list_of_keys_of_arnodes_to_merge = network.decide_best_arnodes_to_merge(
            )
            network.merge_list_of_arnodes(layer_number, table_number,
                                          list_of_keys_of_arnodes_to_merge)
        except:
            break

    while True:
        result = network.check_if_network_is_sat_or_unsat()
        if result == network.CODE_FOR_SAT:
            print('SAT')
            break
        elif result == network.CODE_FOR_UNSAT:
            print('UNSAT')
            break

        # we have a spurious counter example
        layer_number, table_number, key_in_table, partition_of_arnode_inner_nodes = network.decide_best_arnodes_to_merge(
        )
        network.split_arnode(layer_number, table_number, key_in_table,
                             partition_of_arnode_inner_nodes)
Esempio n. 23
0
from src.Network import Network
from src.Neuron import IzhikevichNeuron
from src.SpikeTrain import UniformSpikeTrain, PoissonSpikeTrain
from src.Synapse import SynapseSTDP
from src.plotting_functions import *
from matplotlib import pyplot as plt

neurons = [IzhikevichNeuron() for i in range(2)]
dt = 0.1
T = 20000  # ms to run the network for
T_steps = int(T / dt)  # time steps to run the network for
variables_to_monitor = ['v']
synapses = dict()
synapses[0, 1] = SynapseSTDP(neurons[0], neurons[1])

net = Network(dt, neurons, synapses, variables_to_monitor)
net.set_plastic_synapses(list_of_tuples=[(0, 1)])
spike_train_uniform = UniformSpikeTrain(T).generate(10)
spike_train_poisson = PoissonSpikeTrain(T).generate(10)
net.enforce_neurons(indices=[0, 1],
                    spike_trains=[spike_train_uniform, spike_train_poisson])
# net.enforce_neurons(indices=[0], spike_trains=[spike_train_uniform])

net.run(T_steps)

fig2 = plot_spikes_and_weights(net)
plt.show(block=True)
Esempio n. 24
0
    default=None,
    type=float,
    help=
    'threshold (TF-IDF value) below which concepts will not be included to the graph'
)
args = arg_parser.parse_args()

current_dir = os.path.dirname(os.path.abspath(__file__))
df_filename = os.path.normpath(
    os.path.join(current_dir, 'datasets/bbc-news.json'))
texts_path = os.path.normpath(os.path.join(current_dir, 'input'))

print('Loading DF metrics from ')
corpus = TextCorpus(df_filename)

print('Creating English semantic network...')
net = Network(english, args.threshold)
print('Reading texts from ' + texts_path)
for filename in glob.iglob(os.path.join(texts_path, '**/*.*'), recursive=True):
    text = Text(corpus,
                open(filename, mode='r', encoding='utf-8').read(),
                name=re.search('[^\\\/]+$', filename).group(0))
    print('Text of ' + str(len(text.terms)) + ' terms with average score of ' +
          str(text.avg_score))
    net.merge(text)

net.save_graph('output/output')
net.log_to_html()
print('Graph saved to ' + current_dir +
      '/output/output-edges.xlsx (output-nodes.xlsx)!')