Exemplo n.º 1
0
 def start(self, newlistNN, initialNN, train, epochs, errorarray):
     '''Kieran Ringel
     Iterates over number of generations calling methods to perform selection, crossover and mutation.
     This creates a new population that replaces the previous population. The loss function is then calculated for
     all NN in the new generation. The best NN and it's weight are saved, to see if the next generation will
     produce a fitter NN. The new population then replaces the old. Once all the generations have gone through, the
     best NN is returned.'''
     maxfitness = 999999      #initialized to a high number so it will be replaced
     generations = 20        #TUNE
     for i in range(generations):    #loops through to keep updating best NN every generation as well as the population
         newerrorarray = []
         while len(newlistNN) < len(initialNN):  #uses generational replacement
             parents = GeneticAlgorithm.selection(self, self.initialNN, errorarray)
             child = GeneticAlgorithm.crossover(self, parents)
             newlistNN.append(GeneticAlgorithm.mutation(self, child))
         for pop in range(len(newlistNN)):   #once new population has been generated, goes through to get errors
             GAerror = 0
             self.NN = newlistNN[pop]
             for i in range(epochs): #iterates for a number of epochs, generally kept low to prevent overfitting
                 train = train.sample(frac=1).reset_index(drop=True)
                 for row, trainpoints in train.iterrows():    #iterate through training data points
                    node_values = ff.feedforward(self, trainpoints)  #feeds forward to calculate all node values for NN
                    GAerror += self.calcerror(node_values[-1], trainpoints['class'])
             GAerror /= epochs * len(train)
             errorarray.append(GAerror)      #appends to error list
             if GAerror <= maxfitness:       #if the most fit NN
                 bestNN = self.NN            #save NN
                 maxfitness = GAerror        #save error to be compared to
         errorarray = newerrorarray  #replaces population errors
         initialNN = newlistNN       #replaces population
     return(bestNN)
 def selection(self, target, cross, train, epochs):
     '''Kieran Ringel
     The loss function is calculated as the fitness for the target NN and the resulting NN, if the target NN performs
     better it remains in the population, otherwise the calculated NN replaces it. Throughout all of the generations
     the NN with the best fitness is saved.'''
     error = 0
     tests = [target,
              cross]  #makes list of initial target NN and NN made by the DE
     errors = []
     for test in tests:  #gets loss function for both
         self.NN = test
         for i in range(
                 epochs
         ):  # iterates for a number of epochs, generally kept low to prevent overfitting
             train = train.sample(frac=1).reset_index(drop=True)
             for row, trainpoints in train.iterrows(
             ):  # iterate through training data points
                 node_values = ff.feedforward(
                     self, trainpoints
                 )  # feeds forward to calculate all node values for NN
                 error += self.calcerror(node_values[-1],
                                         trainpoints['class'])
         error /= epochs * len(train)
         errors.append(error)
     index = errors.index(min(errors))
     if min(
             errors
     ) < DifferentialEvoluation.minimumerror:  #if it is the best NN that has been seen
         DifferentialEvoluation.bestNN = tests[
             index]  #sets the best NN to this NN
     return (tests[index])  #returns better NN
Exemplo n.º 3
0
 def test(self, test):
     '''Kieran Ringel
     Calls methods to feed forward through trained NN and then calculated the error on the testing set'''
     tot_error = 0
     for row, testpoints in test.iterrows():
         node_values = ff.feedforward(self, testpoints)  #feedsforward to calculated all nodes for NN
         tot_error += self.calcerror(node_values[-1], testpoints['class'])   #looks at output nodes to calculate error
     print(tot_error/len(test))
     return(tot_error/len(test))
Exemplo n.º 4
0
 def train(self, train):
     '''Kieran Ringel
     Calls needed methods to feedforward and then back propagate the error'''
     epochs = 1     #TUNE
     #print('epochs', epochs)
     self.NN = self.initialNN    #sets NN to initalized NN, so for cross validation each fold starts with a randomly initalized NN
     for i in range(epochs): #iterates for a number of epochs, generally kept low to prevent overfitting
         train = train.sample(frac=1).reset_index(drop=True)
         for row, trainpoints in train.iterrows():    #iterate through training data points
            node_values = ff.feedforward(self, trainpoints)  #feeds forward to calculate all node values for NN
            error = bp.backerror(self, node_values, trainpoints['class'])    #backpropagates the error on the output nodes
            bp.backpropagate(self, error, node_values, trainpoints)  #uses backpropagated error to change weights on the NN
Exemplo n.º 5
0
    def train(self, train):
        '''Kieran Ringel
        Calls needed methods to feedforward and then back propagate the error'''
        #print(self.initialNN1)
        epochs = 1     #TUNE
        self.temp = []
        for pop in range(self.population):  # creates an array of of NN
            self.temp.append(self.initNN(self.file, self.hlayers, self.hnodes, self.classification))
        self.initialNN = self.temp
        errorarray = []

        if self.type == "PSO":
            pso.__init__(self, self.initialNN, self.population, epochs, train)
        
        for pop in range(self.population):
            GAerror = 0
            self.NN = self.initialNN[pop]    #sets NN to initalized NN, so for cross validation each fold starts with
                                            # a randomly initalized NN
            for i in range(epochs): #iterates for a number of epochs, generally kept low to prevent overfitting
                train = train.sample(frac=1).reset_index(drop=True)
                for row, trainpoints in train.iterrows():    #iterate through training data points
                   node_values = ff.feedforward(self, trainpoints)  #feeds forward to calculate all node values for NN
                  
                   if self.type == "BP":
                       error = bp.backerror(self, node_values, trainpoints['class'])    #backpropagates
                                                                                        # the error on the output nodes
                       bp.backpropagate(self, error, node_values, trainpoints)  #uses backpropagated
                                                                                # error to change weights on the NN
                   if self.type == "GA":
                       GAerror += self.calcerror(node_values[-1], trainpoints['class']) #used to calculated error for each NN for GA
            if self.type == "GA":
                GAerror /= epochs * len(train)
                errorarray.append(GAerror)           #adds to array of error correlating to initialized NNs
        if self.type == "GA":
            self.NN = ga.__init__(self, self.initialNN, train, epochs, errorarray)  #calls GA
        if self.type == "DE":
            self.NN = de.__init__(self, self.initialNN, train, epochs)    #calls DE
            print('returned best', self.NN)
Exemplo n.º 6
0
def MoveCars(env, nbrOfTimeStepsToTimeout, GA, dt, sensor, car, num,
             smallXYVariance, Chromosomes_Fitness, Chromosomes, Network_Arch,
             unipolarBipolarSelector, collison_value):
    carLocations = env.start_points  # Car Initial Location[X, Y] in [Meters]
    carHeadings = env.start_headings  # Car Initial Heading Counter Clock Wise[Degrees]
    steerAngles = env.start_steerAngles  # [Degrees] Counter Clock Wise(Same for all cars)

    # timesteps = 1
    # Old_Locations = []
    # for i in range(int(nbrOfTimeStepsToTimeout) - 1):
    #     l = []
    #     for j in range(2):
    #         l.append(0)
    #     Old_Locations.append(l)

    Generation_ids = 0  #At which generation
    Chromosome_ids = 1  #At which chromosome
    timeStepsDone = 0  #How many time steps passed
    prev_carLines = []
    BestFitnessChromoID = 1
    Car_Finished_Pool = 0
    nbrOfParentsToKeep = math.ceil(GA.PercentBestParentsToKeep *
                                   GA.populationSize / 100)  #For replacement

    All_Chromosomes = []  #All chromosome weights
    All_Chromosomes_Fitness = [
    ]  #Fitness of each chromosome (in terms of time)

    #To store things from surviving chromosomes in later on
    for i in range(GA.populationSize):
        l = []
        for j in range(GA.chromosomeLength):
            l.append(0)
        All_Chromosomes.append(l)
        All_Chromosomes_Fitness.append(0)

    # Iterating Generations
    while (1):
        # Move Car and Draw Environment - Get Sensor Readings and Collision State
        LifeTimes = 0  # In number of draw steps(multiple of GA.dt)
        sensor_readings = []
        y = 0
        print(
            "Sensor readings: "
        )  ###############input sensor readings with angles - spectrum - distance
        for i in range(sensor.size):
            sensor_readings.append(int(input()))

        dist = min(sensor_readings
                   )  #will be used to determines if there is a collision
        id = sensor_readings.index(dist)

        collison_bools = False
        if dist <= collison_value:
            collison_bools = True
        else:
            collison_bools = False

        timeStepsDone = timeStepsDone + 1

        # Increase lifetimes by 1

        # Update Fitness
        Fitness = LifeTimes  #fitness used as a measure of time steps (steps is an iteration of this while loop)
        LifeTimes = LifeTimes + 1
        Fitness += 1
        # If car is almost in same place after nbrOfTimeStepsToTimeout has passed, set rotating_around_my_self_bool
        rotating_around_my_self_bool = 0
        # if (LifeTimes >= nbrOfTimeStepsToTimeout):
        #     Old_Locations.append(carLocations)
        #     mean_x = statistics.mean(Old_Locations[:][0])
        #     mean_y = statistics.mean(Old_Locations[:][1])
        #     x = Old_Locations[0]
        #     for i in range(len(x)):
        #         try:
        #             x[i] = math.pow((x[i] - mean_x), 2)
        #         except OverflowError:
        #             x[i] = float('inf')
        #         var_x = statistics.mean(x)  # numpy.mean(( - mean_x) ^ 2)
        #         x = Old_Locations[1]
        #         for i in range(len(x)):
        #             try:
        #                 x[i] = math.pow((x[i] - mean_y), 2)
        #             except OverflowError:
        #                 x[i] = float('inf')
        #         var_y = statistics.mean(x)
        #
        #         if var_x <= smallXYVariance and var_y <= smallXYVariance:
        #             rotating_around_my_self_bool = 1
        # else:
        #     Old_Locations[LifeTimes - 1][0] = carLocations[0]
        #     Old_Locations[LifeTimes - 1][1] = carLocations[1]

        if (collison_bools):
            if (Fitness > max(Chromosomes_Fitness)):
                BestFitnessChromoID = Chromosome_ids  # Save Best Fitness

            Chromosomes_Fitness[Chromosome_ids] = Fitness

            if (Fitness >= GA.goodFitness
                ):  #if fitness is better than good fitness, save it
                Car_Finished_Pool = 1
                BestFitnessChromoID = Chromosome_ids

            # ResetCarAndLifeTime(carLocations, env, 0, carHeadings, steerAngles, LifeTimes, prev_carLines)

            if (Car_Finished_Pool != 1):
                Chromosome_ids = Chromosome_ids + 1

        elif (rotating_around_my_self_bool == 1):
            All_Chromosomes_Fitness[Chromosome_ids] = 0  # TODO Is this good ?
            # ResetCarAndLifeTime(carLocations, env, 0, carHeadings, steerAngles, LifeTimes, prev_carLines)

            if (Car_Finished_Pool != 1):
                Chromosome_ids = Chromosome_ids + 1
            rotating_around_my_self_bool = 0

        # Jump to car next Generation if necessary
        if (Chromosome_ids >= GA.populationSize and (Car_Finished_Pool != 1)):
            if (Generation_ids >= GA.nbrOfGenerations_max):
                Car_Finished_Pool = 1
                Chromosome_ids = BestFitnessChromoID
            else:
                # if (GA.replacement_option == 0)
                All_Chromosomes[(i - 1) * GA.populationSize:i *
                                GA.populationSize] = Chromosomes
                x = 0
                for i in range((i - 1) * GA.populationSize,
                               i * GA.populationSize):
                    All_Chromosomes_Fitness[i] = Chromosomes_Fitness[x]
                    x += 1

                y += 1
                tmp = All_Chromosomes_Fitness.copy()
                idx = numpy.argsort(tmp, kind='mergesort',
                                    axis=0).tolist()[::-1]
                idx2 = numpy.array(idx).tolist()[0:nbrOfParentsToKeep]
                ParentsToKeep = []
                for i in range(len(idx2)):
                    ParentsToKeep.append(All_Chromosomes[idx2[i]])

                tmp = Chromosomes_Fitness.copy()
                idx = numpy.argsort(tmp, kind='mergesort',
                                    axis=0).tolist()[::-1]
                idx2 = numpy.array(idx).tolist()[0:len(idx) -
                                                 nbrOfParentsToKeep]
                Current_Chromosomes = []
                Current_Fitness = []
                for i in range(len(idx2)):
                    Current_Chromosomes.append(Chromosomes[idx2[i]])
                    Current_Fitness.append(Chromosomes_Fitness[idx2[i]])

                Chromosomes_Childs = []
                Chromosomes_Childs = ApplyGA(GA, Current_Chromosomes,
                                             Current_Fitness)
                Chromosomes = []
                for i in range(len(ParentsToKeep)):
                    Chromosomes.append(ParentsToKeep[i])
                for i in range(len(Chromosomes_Childs)):
                    Chromosomes.append(Chromosomes_Childs[i])

                Chromosome_ids = 1
                Generation_ids = Generation_ids + 1
                for i in range(len(Chromosomes_Fitness)):
                    Chromosomes_Fitness[i] = 0
                BestFitnessChromoID = 1
        current_chromosome = Chromosomes[Chromosome_ids]

        # Apply sensor reading to ANN to calculate steerAngle

        outputs = Feedforward(sensor_readings, current_chromosome,
                              Network_Arch, unipolarBipolarSelector)
        steerAngles = numpy.pi / 2 * (outputs[1] - outputs[0]
                                      )  # From - 90 to 90 degrees
        frontWheel = []
        backWheel = []
        # 2D car steering physics(Calculate carLocation and carHeading)
        frontWheel.append(
            float(carLocations[0] + car.wheelBase / 2 * math.cos(carHeadings)))
        frontWheel.append(
            float(carLocations[1] + car.wheelBase / 2 * math.sin(carHeadings)))
        backWheel.append(
            float(carLocations[0] - car.wheelBase / 2 * math.cos(carHeadings)))
        backWheel.append(
            float(carLocations[1] - car.wheelBase / 2 * math.sin(carHeadings)))
        backWheel[0] = backWheel[0] + car.speed * dt * math.cos(carHeadings)
        backWheel[1] = backWheel[1] + car.speed * dt * math.sin(carHeadings)
        frontWheel[0] = frontWheel[0] + car.speed * dt * math.cos(carHeadings +
                                                                  steerAngles)
        frontWheel[1] = frontWheel[1] + car.speed * dt * math.sin(carHeadings +
                                                                  steerAngles)
        for i in range(len(carLocations)):
            carLocations[i] = (frontWheel[i] + backWheel[i]) / 2
        carHeadings = math.atan2(frontWheel[1] - backWheel[1],
                                 frontWheel[0] - backWheel[0])

        # print("Front Wheel: ", frontWheel)
        # print("Back Wheel: ", backWheel)
        print("Steering Angles: ", steerAngles)
# Concatenating BERT and entity embeddings
X_train = np.concatenate((X_train,X_etrain),axis=1)
X_test = np.concatenate((X_test,X_etest),axis=1)

y_train = torch.nn.functional.one_hot(y_train.to(torch.int64), num_classes=2)
y_test = torch.nn.functional.one_hot(y_test.to(torch.int64), num_classes=2)
y_train = y_train.float()
y_test = y_test.float()

# Setting model parameters
learning_rate = 1e-4
epochs = 10

# Initialize model and loss function
model = Feedforward(X_train.shape[1], X_train.shape[0])
criterion = torch.nn.BCELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)

# Print model summary
summary(model, input_size=(X_train.shape[1], X_train.shape[0]))

# Model loss evaluation before training
model.eval()
y_pred = model(X_test)
before_train = criterion(y_pred, y_test)
print('Test loss before training' , before_train.item())

# Model training
model.train()
for epoch in range(epochs):   
Exemplo n.º 8
0
    def pso(self, initialNN, population, epochs, train):

        inertia = 15  ##inertia to be tuned
        cog = 3.6  ##cognitive componet to be tuned
        soc = 7.4  ##social componenet to be tuned

        constrict = 3.8  #constriction coefficient to be tuned
        exiterror = 54.2  #so we are not stuck in loop forever.Controls while loop
        globalBestP = self.initialNN[
            0]  #initial the best positon to a random NN
        globalBestE = 10000000  #the best error
        swarm = [0] * self.population  #a population of NN

        for particle in range(
                len(swarm)
        ):  #same as population of NN, a population of NN is a swarm
            self.NN = self.initialNN[particle]  #a particle

            position = []  #holds the position of the particle
            numw = 0
            for layer in range(len(self.NN)):
                for node in range(len(self.NN[layer])):
                    for weight in range(len(self.NN[layer][node])):
                        numw += 1
                        position.append(self.NN[layer][node][weight])

            fit = 0  #calculates fitness for every NN
            for i in range(epochs):
                train = train.sample(frac=1).reset_index(drop=True)

                for row, trainpoints in train.iterrows():

                    node_values = ff.feedforward(self, trainpoints)
                    fit += PSO.fitness(self, node_values[-1],
                                       trainpoints['class'])
            fit /= epochs * len(train)
            velocity = [0] * numw
            for w in range(
                    numw):  #computes velocity for the position of the particle
                velocity[w] = rand.uniform(0, 0.1)
            swarm[particle] = part(
                position, fit, velocity, position, fit
            )  #creates a particle object, which will hold info associated with particle
            if swarm[particle].fitness < globalBestE:  #if fitness is better than global fitness, update fitness to new fitness and new best position is the associated position
                globalBestE = swarm[particle].fitness
                globalBestP = swarm[particle].position

        while (globalBestE > exiterror):

            for particle in range(len(swarm)):
                fit = 0
                for i in range(epochs):  #calculates fitness of position
                    train = train.sample(frac=1).reset_index(drop=True)
                    for row, trainpoints in train.iterrows():
                        node_values = ff.feedforward(self, trainpoints)
                        fit += PSO.fitness(self, node_values[-1],
                                           trainpoints['class'])
                fit /= epochs * len(train)
                swarm[particle].fitness = fit

                if swarm[particle].fitness < swarm[
                        particle].bestfit:  #if fitness is better than previous personal fitness, update to new fitness and new position
                    swarm[particle].bestfit = swarm[particle].fitness
                    swarm[particle].bestposition = swarm[particle].position
                if swarm[particle].fitness < globalBestE:  #if fitness is better than global fitness, update fitness to new fitness and new best position is the associated position
                    globalBestE = swarm[particle].fitness
                    globalBestP = swarm[particle].position

                newV = [0] * numw
                newP = [0] * numw
                for w in range(numw):  #calculating new velocity and position
                    beta1 = cog * rand.uniform(0, 1)
                    beta2 = soc * rand.uniform(0, 1)
                    betatot = beta1 + beta2
                    aval = abs(betatot * (betatot - 4))
                    sqr = math.sqrt(aval)
                    X = (
                        2 * constrict
                    ) / aval  #this is using the fancy constriction coefficent to help PSO converge :)

                    newV[w] = X * (
                        (inertia * swarm[particle].velocity[w]) +
                        (beta1 * (swarm[particle].bestposition[w] -
                                  swarm[particle].position[w])) +
                        (beta2 *
                         (globalBestP[w] - swarm[particle].position[w]))
                    )  #CALCULATE VELOCITY
                    newP[w] = swarm[particle].position[w] + newV[w]

                swarm[particle].velocity = newV  #updating new velocity
                swarm[particle].position = newP  #updating new position

        return (globalBestP)