Exemple #1
0
def main():
    start_time = time.time()
    data_directory = "/Users/soonerfan237/Desktop/MerckActivity/TrainingSet5/"
    files = glob.glob(data_directory + "ACT*.csv")
    activity_list = []
    for file in files:
        print(file)
        match = re.search(r'ACT([0-9]+)', file)
        activity_list.append(int(match.group(1)))

    feature_dict, molecule_dict_filter = CombineData.CombineData(
        data_directory)
    print("TOTAL MOLECULES: " + str(len(molecule_dict_filter)))
    molecule_dict_filter = NormalizeActivity.NormalizeActivity(
        molecule_dict_filter, activity_list)
    feature_dict_filter, molecule_dict_filter = FindVariableFeatures.FindVariableFeatures(
        data_directory, feature_dict, molecule_dict_filter)
    molecule_dict_filter = FindCorrelatedFeatures.FindCorrelatedFeatures(
        feature_dict_filter, molecule_dict_filter, activity_list)
    molecule_dict_filter = RemoveOutliers.RemoveOutliers(molecule_dict_filter)

    for i in activity_list:
        NeuralNet.NeuralNet(
            data_directory, i,
            molecule_dict_filter)  #i corresponds to activity number to predict
        #ConvNeuralNet.ConvNeuralNet(data_directory, i, molecule_dict_filter)
        GenerativeAdversarialNetwork.GenerativeAdversarialNetwork(
            data_directory, i, molecule_dict_filter, 100)
    elapsed_time = (time.time() - start_time) / 60
    print("This took " + str(elapsed_time) + " minutes.")
    print("DONE!")
Exemple #2
0
def mutateThatJohn(neuralNetToMutate, nodes_per_layer):
    #don't mutate the activation functions
    activation_funcs=neuralNetToMutate.activation_funcs
    #creates new neural network
    neuralNetToReturn= nn.NeuralNet(activation_funcs,nodes_per_layer)
    weightsOf=neuralNetToMutate.weights
    #turn weights to one dimension
    weights1d= turnThreeToOne(weightsOf)
    #get total amount of weights
    totalAmtOfWeights=len(weights1d)
    b=0
    c=0
    #loop until we get two different indices
    while c==b:
        b= random.randInt(0,totalAmtOfWeights)
        c= random.randInt(0,totalAmtOfWeights)
    #grab sublist from those indices
    if b<c:
        subList=weights1d[b:c]
    else:
        subList=weights1d[c:b]
    #reverse the list
    subList.reverse()
    #replace elements in the list with reversed list
    if b<c:
        weights1d[b:c]=subList
    else:
        weights1d[c:b]=subList
    #need it back in three dimensions
    weightsBack=turnOneToThree(weights1d, nodes_per_layer)
    #set mutated weights to new neural net
    neuralNetToReturn.weights=weightsBack
    #return mutated neural net
    return neuralNetToReturn
def q1Test(testFile):
    if 'sigmoid' in testFile.name and 'Activation' not in testFile.name:
        value = float(testFile.readline().strip())
        sPercep = NeuralNet.Perceptron()
        solution = sPercep.sigmoid(value)
    else:
        testFuncName = testFile.readline().strip()
        getData = getattr(NeuralNetUtil, testFuncName)
        examples, tests = getData()
        testRangeStart = testFile.readline().strip()
        testRangeEnd = testFile.readline().strip()
        testRangeStart = 0 if testRangeStart=='None' else int(testRangeStart)
        testRangeEnd = len(examples) if testRangeEnd=='None' else int(testRangeEnd)
        examples = examples[testRangeStart:testRangeEnd]
        
        if 'feedforward' in testFile.name:
            sNet = NeuralNet.NeuralNet([16,24,10])
            
            file = open('test_cases/nnet')
            net = cPickle.load(file)
            copyWeights(sNet,net)
            
            solution = []
            for example in examples:
                solution.append(sNet.feedForward(example[0]))
        elif 'Activation' in testFile.name:
            file = open('test_cases/percep')
            percep = cPickle.load(file)
            
            sPercep = NeuralNet.Perceptron(inSize = percep.inSize-1, weights = percep.weights)
            solution = []
            for example in examples:
                solution.append(sPercep.sigmoidActivation(example[0]))
    return solution
Exemple #4
0
def q4Test(testFile, module=NeuralNet):
    testFuncName = testFile.readline().strip()
    getData = getattr(NeuralNetUtil, testFuncName)
    examples = getData()
    testRangeStart = testFile.readline().strip()
    testRangeEnd = testFile.readline().strip()
    testRangeStart = 0 if testRangeStart == 'None' else int(testRangeStart)
    testRangeEnd = len(examples) if testRangeEnd == 'None' else int(
        testRangeEnd)
    examples = (examples[0][testRangeStart:testRangeEnd],
                examples[1][testRangeStart:testRangeEnd])

    alph = float(testFile.readline())
    weight = float(testFile.readline())

    file = open('test_cases/nnet', 'rb')
    net = pickle.load(file)

    sNet = NeuralNet.NeuralNet([16, 24, 10])
    copyWeights(sNet, net)

    solution = NeuralNet.buildNeuralNet(examples,
                                        alpha=alph,
                                        weightChangeThreshold=weight,
                                        startNNet=sNet)
    return solution[1]
def runNeuralNet():
    import NeuralNet
    nn = NeuralNet.NeuralNet(trainingData='PLANKTON',
                             hiddenLayersSize=[59, 59],
                             activationFunctions=['sigmoid'] * 3)
    print[np.shape(ob) for ob in nn.Thetas]
    nn.train(maxNumIts=5000, regParams=[0.01] * 3, trainToMax=True)
Exemple #6
0
def genRandom(population, nodes_per_layer):
    neuralNets=[]
    layers=len(nodes_per_layer)
    for p in range(0,population):
        activation_funcs=[]
        for q in range(0,layers):
            activation_funcs.append(random.randInt(0,9))
        net= nn.NeuralNet(activation_funcs,nodes_per_layer)
        neuralNets.append(net)
    return neuralNets
Exemple #7
0
    def __init__(self, screen, startPos, fitness=0.0): 
        # Call the parent's constructor
        super().__init__()
 
        # Create a player
        self.width = 40 #width of player
        self.height = 60 #height of player
        self.enemy = None #initialize the opponent
        self.enemyPos = None #Later, will be used to track where the opponent is

        self.maxHearts = 4 #max "health"
        self.numHearts = 4 #track how many hits we've taken

        self.numDeaths = 0 #track how many times we've died 
        self.numKills = 0 #track how many times we've killed the opponent
        self.numGoals = 0 #How many times have we made it to the other side of the board?
        self.numHits = 0 #How many times the player has made a successful attack
        self.runningDistance = 0 #How far have the player moved (running total per life)
        self.maxDistance = 0 #How far the player moved during the round
        self.fitness = fitness #the total fitness of this player

        self.sword = None
        self.isAttacking = False
        self.attackDelay = 30 #30 frames between each attack
        self.jumpDelay = 30
        self.respawnDelay = 30 #Don't redraw the player upon death for 30 frames

        self.direction = "right"
        self.image = pygame.image.load('Images/playerrightangry.png')

        # So our player can modify the overall screen
        self.screen = screen

        #The Neural Net (initialized)
        self.brain = None

        # Set a referance to the image rect.
        self.rect = self.image.get_rect()

        #coordinate point
        self.startPos = startPos
        self.startx = self.startPos[0]
        self.starty = self.startPos[1]

        # Set speed vector of player
        self.change_x = 0
        self.change_y = 0
 
        # List of sprites we can bump against
        self.level = None

        self.genomeInputs = 6
        self.genomeOutputs = 4
        self.brain = NeuralNet(self.genomeInputs, self.genomeOutputs)
        self.vision = []
Exemple #8
0
    def __init__(self, skip_neural=False):
        self.sensors = [Button(i) for i in cfg.SENSE_PINS]
        self.buffer_size = cfg.window_update_hz * cfg.window_size_s
        self.buffer = [0x00] * self.buffer_size
        self.idle = True
        self.score = 0.5
        self.last_NN_outputs = None

        if not skip_neural:
            self.neuralModel = NeuralNet(model_path=cfg.model_path)
            self.neuralModel.load_weights()
Exemple #9
0
def crossover(neuralNetwork1, neuralNetwork2, nodes_per_layer):
    #get the weights of each neural network
    weights1=neuralNetwork1.weights
    weights2=neuralNetwork2.weights
    #get the activation functions of each neural network
    activation1=neuralNetwork1.activation_funcs
    activation2=neuralNetwork2.activation_funcs
    #get number of activation functions
    actives=len(activation1)
    #get random int for one-point crossover of activation functions
    cross=random.randInt(0,actives)
    activation_funcs=[]
    for a in range(0,actives):
        if a<cross:
            activation_funcs.append(activation1[a])
        else:
            activation_funcs.append(activation2[a])
    #create initial neural net to return
    net= nn.NeuralNet(activation_funcs,nodes_per_layer)
    #get total amount of weights
    totalAmtOfWeights=0
    for a in range(0,len(weights)):
        for b in range(0,len(weights[a])):
            for c in range(0,len(weights[a][b])):
                totalAmtOfWeights+=1
    #grab two random ints for two-point crossover
    b=0
    c=0
    while b==c:
        b= random.randInt(0,totalAmtOfWeights)
        c= random.randInt(0,totalAmtOfWeights)
    #find the lower index
    if b<c:
        first=b
        second=c
    else:
        first=c
        second=b
    counter=0
    #crossover weights to child
    for a in range(0,len(weights)):
        for b in range(0,len(weights[a])):
            for c in range(0,len(weights[a][b]))
                if counter<first:
                    net.weights[a][b][c]=weights1[a][b][c]
                else if counter>=first and counter<second:
                    net.weights[a][b][c]=weights2[a][b][c]
                else:
                    net.weights[a][b][c]=weights1[a][b][c]
                counter+=1
    #return child
    return net
Exemple #10
0
def build_model(learning_rate, last_activation=softmax):
    num_of_features = 9
    dim_hidden_1 = 12
    dim_hidden_2 = 6
    dim_hidden_3 = 2
    layer = Layer(num_of_features, dim_hidden_1, ReLU)
    layer1 = Layer(dim_hidden_1, dim_hidden_2, tanh_f)
    layer2 = SkipLayer(dim_hidden_2, num_of_features)
    layer3 = Layer(dim_hidden_2, dim_hidden_3, last_activation)

    return NeuralNet([layer, layer1, layer2, layer3],
                     skip_layer=layer2,
                     learning_rate=learning_rate)
Exemple #11
0
def run_main(features, labels):

    max_iteration = FLAGS.max_iteration
    n_layer = FLAGS.n_layer  # the last layer is the output of network
    n_feat = FLAGS.n_feat
    n_nodes = FLAGS.n_nodes

    nn_model = nn.NeuralNet(n_layer, n_nodes, n_feat, FLAGS.func_num)
    # datatuple = tuple([features, labels, [], [], [], []])
    # nn_train.StochasticGradientDescent(datatuple, nn_model)

    w0 = dict_to_nparray(nn_model.model)

    print gradientCheck(gradfunc, objfunc, w0, [features[1]], [labels[1]],
                        max_iteration, nn_model)
Exemple #12
0
def genRandom(population, nodes_per_layer):
    neuralNets=[]
    layers=len(nodes_per_layer)
    for p in range(0,population):
        activation_funcs=[]
        for q in range(0,layers-1):
            #we only want tanh, arctan, or constant activation
            alpha=random.randint(0,2)
            #if 2, change to 3 which is constant
            if(alpha==2):
                activation_funcs.append(3)
            else:
                activation_funcs.append(alpha)
        net= nn.NeuralNet(activation_funcs,nodes_per_layer)
        neuralNets.append(net)
    return neuralNets
Exemple #13
0
def main():
    xOrNet = NN.NeuralNet(layers=[2, 3, 1])
    inpt = [1, 1]
    outpt = [0]

    xOrNet.updateInput(inpt)

    setInitialWeights(xOrNet)

    xOrNet.fireNet(func='sigmoid')
    print(AF.sigmoid(xOrNet.getOutput()))

    erOut = 0.0 - AF.sigmoid(xOrNet.getOutput())
    dOut = AF.dsigmoid(xOrNet.getOutput()) * erOut

    print(dOut)
Exemple #14
0
def q3Test(testFile,module=NeuralNet):
    testFuncName = testFile.readline().strip()
    getData = getattr(NeuralNetUtil, testFuncName)
    examples, tests = getData()
    testRangeStart = testFile.readline().strip()
    testRangeEnd = testFile.readline().strip()
    testRangeStart = 0 if testRangeStart=='None' else int(testRangeStart)
    testRangeEnd = len(examples) if testRangeEnd=='None' else int(testRangeEnd)
    examples = examples[testRangeStart:testRangeEnd]
    sNet = NeuralNet.NeuralNet([16,24,10])
    
    file = open('test_cases/nnet')
    net = cPickle.load(file)
    copyWeights(sNet,net)
    
    solution = sNet.backPropLearning(examples,0.1)
    return solution
Exemple #15
0
def generateOffspring(parents, populationSize, mutationRate):
    offspring = []
    for i in range(populationSize - len(parents)):
        # Randomly select 2 parents
        parentIndicies = random.sample(range(0, len(parents)), 2)
        weightsOffSpring, biasVecOffSpring = crossover(
            parents[parentIndicies[0]], parents[parentIndicies[1]])
        newWeights, newBiasVec = mutate(weightsOffSpring, biasVecOffSpring,
                                        mutationRate)
        offspringMatrix = vectorToMat(numpy.array([newWeights]),
                                      numpy.array([parents[0].layers]))[0]
        offspringNN = NN.NeuralNet()
        offSpringBiasVectors = splitBiasVector(newBiasVec, offspringMatrix)
        offspringNN.setLayers(offspringMatrix)
        offspringNN.setBiasVectors(offSpringBiasVectors)
        offspring.append(offspringNN)
    return offspring
    def __init__(self, root):

        self.root = root
        root.title("Movie Reccomendation")
        self.canvas = tk.Canvas(root, width=1000, height=1000)
        self.canvas.place(x=0, y=0, anchor=tk.NW)

        self.nMoviesPicked = 0
        self.nMaxMoviesPicked = 10  #The total number of movies the user will pick as "Liked" or "Not Liked"
        self.Network = NeuralNet.NeuralNet(dfMovieData)

        self.imDB = imdb.IMDb()

        self.font = "SourceCodePro"
        self.listMoviesReccomend = []
        self.movieSample = None

        self.placeButtons()
        self.askMovie()  # Start
Exemple #17
0
def main(viz=False):

    tic()
    data = load_mnist()
    print ("Data loaded in %.1fs" % toc())

    # Create a neural network with matching input/output dimensions
    #
    cfg = NeuralNetCfg(L1=1e-6,init_scale=0.05)
    cfg.input(data.Xshape)
    cfg.hidden(800,"logistic",dropout=0.5)
    cfg.hidden(800,"logistic",dropout=0.25)
    cfg.output(data.Yshape,"softmax")

    model = NeuralNet(cfg)

    # Rescale the data to match the network's domain/range
    #
    data.rescale(model.ideal_domain(),model.ideal_range())

    # Train the network
    #
    report_args = { 'verbose'   : True,
                    'interval'  : 5,       # how many epochs between progress reports (larger is faster)
                    'window_size' : "compact",
                    'visualize' : viz}

    trainer = TrainingRun(model,data,report_args,
                          learn_rate=2,
                          learn_rate_decay=.995,
                          momentum=[(0,.5),(400,0.9)],
                          batchsize=64)

    print "Memory available after data loaded:", memory_info(gc=True)

    tic()
    trainer.train(100)  # train for several epochs
    print ("Training took %.1fs" % toc())
Exemple #18
0
 def __init__(self, x, y, angle):
     #dimensions of car
     self.length = 20
     self.width = 15
     #Angle between center of bumper, car center of mass and side of bumper.
     self.cornerAngle = math.atan(self.width / self.length)
     #position of car
     self.x = x
     self.y = y
     #speed of wheels, and angle away from forward
     self.v = 0
     self.wheelAngle = 0
     #angle car makes with north, counterclockwise
     self.angle = angle
     #The distance to an obstruction at left, right and front, frontLeft, and frontRight
     self.fDist = 0
     self.flDist = 0
     self.lDist = 0
     self.rDist = 0
     self.frDist = 0
     #The car is coupled to a Neural Net, it has 1 hidden layer, 5 inputs (the above 5 distances)
     #8 neurons in the hidden layer, and two outputs, which determine if the car steers left or right.
     self.neuralnet = NeuralNet(3, 2, 1, 8)
Exemple #19
0
    def reproduce(self, other, crossRate, mutRate):
        weights1, weights2 = self.NN.getFlattenedValues(
        ), other.NN.getFlattenedValues()
        #assert(len(weights1) == len(weights2))
        childWeights = copy.copy(weights1)
        #crossover stage
        if (random.random() < crossRate):
            pos = random.randint(0, len(childWeights) - 1)
            childWeights[pos:] = weights2[pos:]

        #mutation stage
        for i in range(len(childWeights)):
            if (random.random() < mutRate):
                weight = childWeights[i]
                changePercent = (2 * random.random()) - 1
                weight = weight * (1 + changePercent)  #mutate
                childWeights[i] = weight

        childNumInputs = self.NN.numInputs
        childNumOutputs = self.NN.numOutputs
        childNumHiddenLayerNeurons = self.NN.numLayerNeurons[:-1]
        childNN = NeuralNet.NeuralNet(childNumInputs, childNumOutputs,
                                      childNumHiddenLayerNeurons, childWeights)
        return individual(childNN)
Exemple #20
0
def main():
    start_time = time.time()
    time_periods = ['day', 'week', 'month', 'year']
    fileObject = open("stock_symbol_list.pickle", 'rb')
    symbols = pickle.load(fileObject)
    fileObject.close()
    #symbols = ['DVN', 'MMM', 'YYY', 'AAPL','ADBE','AMZN']
    #symbols = ['DVN']
    result_list = []
    random.shuffle(symbols)
    #for time_period in time_periods:
    #    result_list.append("==================")
    #    result_list.append(time_period)
    #    stockdata_dict, max_num_of_days = DownloadData.DownloadData(symbols[:1], time_period)
    #    stockdata_dict = RemoveOutliers.RemoveOutliers(stockdata_dict)
    #    for i in range(10, 1000, 10):
    #        if i < max_num_of_days:
    #            r_squared, rmse = NeuralNet.NeuralNet(stockdata_dict, max_num_of_days, i)
    #            result_list.append("num of days: "+str(i))
    #            result_list.append("r_squared: "+str(r_squared))
    #            result_list.append("rmse: " + str(rmse))

    #            fileObject = open("result_list.pickle",'wb') # open the file for writing
    #            pickle.dump(stockdata_dict,fileObject)
    #            fileObject.close()

    fileObject = open("stockdata_dict_year_FULL_tuesday.pickle", 'rb')
    max_num_of_days = 13
    stockdata_dict = pickle.load(fileObject)
    #stockdata_dict = RemoveOutliers.RemoveOutliers(stockdata_dict)
    fileObject.close()
    r_squared, rmse = NeuralNet.NeuralNet(stockdata_dict, max_num_of_days, 10)

    elapsed_time = (time.time() - start_time) / 60
    print("This took " + str(elapsed_time) + " minutes.")
    print("DONE!")
Exemple #21
0
import numpy as np
import copy
import NeuralNet
import load_datasets

# X = (hours sleeping, hours studying), y = score on test
X = ([2, 9], [1, 5], [3, 6])
y = np.array(([92], [86], [89]), dtype=float)

X = [[0,0,1], [0,1,1], [1,0,1], [1,1,1]]

y = [0, 1, 1, 0]

n= 1;

train_iris, train_labels_iris, test_iris, test_labels_iris = load_datasets.load_iris_dataset(0.03)
train_votes, train_labels_votes, test_votes, test_labels_votes = load_datasets.load_congressional_dataset(0.02)
train_monks, train_labels_monks, test_monks, test_labels_monks = load_datasets.load_monks_dataset(n)

train = train_votes
labels = train_labels_votes

NN = NeuralNet.NeuralNet(1, 2, len(train[0]), 1)
for i in xrange(1000):
    NN.train(train, labels)

print "Actual Output: \n" + str(labels)
print "Predicted Output: \n" + str(NN.forward(train).T[0])

    # [0,1,0,1,1,1,0,0,0,0,0,0,1,1,2,1], 0))
        return frame, self.reward, self.game_over

    def get_frames(self):
        return np.array(list(self.frames))

    def get_ambient_data(self):
        return [self.ball_x - (self.paddle_x + self.PADDLE_WIDTH / 2)]


if __name__ == "__main__":
    game = CatchGame()
    NAME = 'weuler'

    brain = NeuralNet.NeuralNet([], [],
                                1,
                                5,
                                1,
                                saved_weight1=w1,
                                saved_weight2=w2)

    game.reset()
    input_t = game.get_frames()
    game_over = False

    report = []

    while not game_over:
        output = brain.get_output(game.get_ambient_data())

        if output > 0.5:
            action = 2
        else:
Exemple #23
0
        #print("Model weights after training",self.model.get_weights());

        return [train_loss_results, train_accuracy_results]

    def plotStats(self, train_loss_results, train_accuracy_results):
        fig, axes = plt.subplots(2, sharex=True, figsize=(12, 8))
        fig.suptitle('Training Metrics - Call, Raise, or Fold')

        axes[0].set_ylabel("Loss", fontsize=14)
        axes[0].plot(train_loss_results)

        axes[1].set_ylabel("Accuracy", fontsize=14)
        axes[1].set_xlabel("Epoch", fontsize=14)
        axes[1].plot(train_accuracy_results)

        plt.show()


if __name__ == '__main__':
    import tensorflow as tf
    import NeuralNet

    #pdb.set_trace();
    tf.enable_eager_execution()
    net = NeuralNet.NeuralNet(
        train=False,
        csvPath="/home/the2b/Documents/school/ai/project/src/test6.csv",
        dataUrl="http://127.0.0.1/test6.csv")
    trainingRes = net.trainModel(epochs=501, batchSize=72, bufferSize=10000)
    net.plotStats(trainingRes[0], trainingRes[1])
Exemple #24
0
__author__ = 'Aaron'

# IMPORTS
import sys

from States import *
from constants import *
from NeuralNet import *

# GLOBAL VARIABLES
clock = pygame.time.Clock()
ann = NeuralNet(NUM_INPUTS, NUM_OUTPUTS, NUM_HIDDEN, NUM_PER_HIDDEN)


# STATE MANAGER
class StateManager(object):
    def __init__(self, ann=None):
        """
        Initializes the state manager.
        Contains "global" variables to hold neural network and score.
        """
        self.ann = ann
        self.fitness = 0

        self.state = None
        self.go_to(MenuState())

    def go_to(self, state):
        self.state = state
        self.state.manager = self
Exemple #25
0
from NeuralNet import *
from Config import Config
import tflowtools as TFT
import sys
from CaseManager import *
import numpy as np
import mnist.mnist_basics as mnist
from Case import *
import CSVReader

if __name__ == '__main__':
    args = ArgumentParser.parseArgs()
    config = Config(args)
    caseManager = CaseManager(
        config,
        cfunc=lambda: config.src_function(*config.src_args
                                          ),  # * unpacks list arguments
        vfrac=config.vfrac,
        tfrac=config.tfrac,
        case_fraction=config.case_fraction,
        src_function=config.src_function,
        src_args=config.src_args,
        src_path=config.src_file_path)

    nn = NeuralNet(config, caseManager)

    nn.do_training()

    # nn.do_testing()
    # TFT.fireup_tensorboard('probeview')
Exemple #26
0
import matplotlib.pyplot as plt
import sys
import load_datasets
import NeuralNet  # importer la classe du Reseau de Neurones
import DecisionTree  # importer la classe de l'Arbre de Decision

# importer dautres fichiers et classes si vous en avez developpes
# importer dautres bibliotheques au besoin, sauf celles qui font du machine learning

decision_tree_iris = DecisionTree.DecisionTree()
decision_tree_congress = DecisionTree.DecisionTree()
decision_tree_monks1 = DecisionTree.DecisionTree()
decision_tree_monks2 = DecisionTree.DecisionTree()
decision_tree_monks3 = DecisionTree.DecisionTree()

rn_iris = NeuralNet.NeuralNet()
rn_congress = NeuralNet.NeuralNet()
rn_monks1 = NeuralNet.NeuralNet()
rn_monks2 = NeuralNet.NeuralNet()
rn_monks3 = NeuralNet.NeuralNet()

# Charger/lire les datasets
(train_iris, train_labels_iris, test_iris,
 test_labels_iris) = load_datasets.load_iris_dataset(0.7)
(train_congress, train_labels_congress, test_congress,
 test_labels_congress) = load_datasets.load_congressional_dataset(0.7)
(train_monks1, train_labels_monks1, test_monks1,
 test_labels_monks1) = load_datasets.load_monks_dataset(1)
(train_monks2, train_labels_monks2, test_monks2,
 test_labels_monks2) = load_datasets.load_monks_dataset(2)
(train_monks3, train_labels_monks3, test_monks3,
Exemple #27
0
import numpy as np
import matplotlib.pylab as plt
import matplotlib.animation as anima
import NeuralNet


def animate(i):
    NN.train([input_vector, true_outcomes], iterations=i)
    yar = NN.output_values
    ax1.clear()
    ax1.plot(input_vector, yar)


Fs = 100
f = 5
sample = 100
input_vector = np.arange(100., sample+100, 0.1)
test_vector = np.arange(0., sample+100, 0.1)
true_outcomes = np.sin(2 * np.pi * f * input_vector / Fs)
NN = NeuralNet.NeuralNet(len(input_vector), len(true_outcomes), 20)
fig = plt.figure()
ax1 = fig.add_subplot(1, 1, 1)
ani = anima.FuncAnimation(fig, animate, interval=1)
plt.show()

outcome = NN.predict(test_vector)
plt.plot(test_vector, outcome)
Exemple #28
0
from NeuralNet import *
inputs = 2
myRange = 1

myNet1 = NeuralNet(3, 3, inputs)
myNet2 = NeuralNet(3, 3, inputs)

myinput = myNet1.shapeInput([0, 0, 0, 0, 0, 0, 0, 0, 0, 0])

myNet1.updateNeurons(myinput, myRange)
myinput = myNet1.shapeInput([0, 0, 0, 0, 0, 0, 0, 0, 0, 0])

myNet2.updateNeurons(myinput, myRange)

myinput = myNet1.shapeInput(myNet2.speak())

###conversation

change = 0

for i in range(10000):
    myinput = myNet1.shapeInput(myNet2.winning)
    myNet1.updateNeurons(myinput, myRange)
    print "Person 1: ", myNet1.winning

    myinput = myNet1.shapeInput(myNet2.winning)
    myNet2.updateNeurons(myinput, myRange)
    print "Person 2: ", myNet2.winning

    distance = float(
        math.sqrt((((myNet1.winning[0] - myNet1.winning[1])**2)) +
Exemple #29
0
import numpy as np
import matplotlib.pyplot as plt
import random
import sys
#import load_datasets as loader
import NeuralNet  # importer la classe du Réseau de Neurones
#import DecisionTree  # importer la classe de l'Arbre de Décision
# importer d'autres fichiers et classes si vous en avez développés
# importer d'autres bibliothèques au besoin, sauf celles qui font du machine learning

nn = NeuralNet.NeuralNet(4, 3)

test_data_location = 'data/mnist_test.csv'
train_data_location = 'data/mnist_train.csv'

test1 = np.loadtxt(test_data_location,
                   encoding='utf-8',
                   dtype=int,
                   skiprows=1,
                   delimiter=',')
train1 = np.loadtxt(train_data_location,
                    encoding='utf-8',
                    dtype=int,
                    skiprows=1,
                    delimiter=',')

test_labels1 = test1[:, 0].astype(int)
train_labels1 = train1[:, 0].astype(int)
train1 = train1[:, 1:] / 255
test1 = test1[:, 1:] / 255
Exemple #30
0
def main_NN():
    n = 50
    np.random.seed(1)

    X, x_grid, y_grid, z, z_true = Franke_dataset(n, noise=0.0)
    z = np.reshape(z, (n * n, 1))

    iters = 5000
    lmbd = 0.0
    gamma = 1e-5

    n_categories = 1

    n_params = 5
    n_gammas = 3
    params = np.zeros(n_params)
    params[1:] = np.logspace(1, -2, n_params - 1)
    gammas = np.logspace(-5, -6, n_gammas)

    print(params)
    print(gammas)

    test_frac = 0.3
    n_test = int(test_frac * n**2)
    X_train, X_test, z_train, z_test = train_test_split(X,
                                                        z,
                                                        test_size=test_frac,
                                                        random_state=123)
    z_test_1d = np.ravel(z_test)
    train_single_NN = True
    # plot_surf(x_grid, y_grid, z.reshape(n,n), cm.coolwarm, 1)
    # plt.show()

    if train_single_NN == True:
        # config = [4,16,4]
        # config = [30,20,30,20,30,20]
        config = [100, 50]
        hidden_a_func = ['tanh', 'tanh']
        output_a_func = ''
        # config = [16,8,4]
        NN = NeuralNet(X_train, z_train, config, hidden_a_func, output_a_func,
                       'reg')
        NN.train(iters, gamma, lmbd=lmbd)
        z_pred = NN.predict_regression(X_test)

        r2_score = metrics.r2_score(z_test_1d, z_pred)
        mse = metrics.mean_squared_error(z_test_1d, z_pred)

        print("gamma =", gamma)
        print("lmbd =", lmbd)
        print("r2 =", r2_score)
        print("mse =", mse)

        print("--------------\n")

        # x_test, y_test = np.meshgrid(X_test[:,0], X_test[:,1])
        # n_test_1d = int(np.sqrt(n_test))
        # x_grid, y_grid = get_grid(X_test)

        # plot_surf(x_grid, y_grid, z_pred.reshape(n_test_1d, n_test_1d), cm.coolwarm)
        # plot_surf(x_grid, y_grid, z_test.reshape(n_test_1d, n_test_1d), cm.gray, alpha=0.5)
        # plt.show()

        # plt.imshow(z_pred.reshape(n,n))
        # plt.show()
        #
        # plt.imshow(z_train.reshape(n,n))
        # plt.show()

    # exit()
    config = [80, 60]
    hidden_a_func = ['sigmoid', 'tanh']
    NN_grid = NeuralNet(X_train, z_train, config, hidden_a_func, '', 'reg')
    NN_grid.grid_search(X_test, z_test_1d, params, gammas, 'reg', config)

    # Iterate over multiple hidden layer configurations
    # for i in range(1, 6):
    #     for j in range(0, i+1):
    #         for k in range(0, 1):
    #             config = [i, j, k]
    #             NN_grid.grid_search(X_test, y_test, params, gammas, config)

    best_accuracy, best_config, best_lmbd, best_gamma = NN_grid.return_params()

    print("\n--- Grid search done ---")
    print('Best accuracy:', best_accuracy)
    print("with configuration", best_config, "lmbd =", best_lmbd, "gamma =",
          best_gamma)