def evalFunc_WSC(individual, Original, weights, constraints=[]): userSize = Original.shape[0] permissionSize = Original.shape[1] if (constraints and not feasible(individual, userSize, permissionSize, constraints)): fitness = len(weights) else: numberOfRoles = len(individual[0]) array = decoder.resolveRoleModelChromosomeIntoBoolArray( individual[0], userSize, permissionSize) conf, accs = matrixOps.compareMatrices(array, Original) numberOfUR = statistics.URCnt(individual[0]) numberOfRP = statistics.RPCnt(individual[0]) w1 = weights[0] w2 = weights[1] w3 = weights[2] w4 = weights[3] worstCase_numberOfRoles = min(userSize, permissionSize) numberOfRoles_normalized = utils.normalization( numberOfRoles, 1, worstCase_numberOfRoles) accs_normalized = utils.normalization(accs, 0, Original.sum()) numberOfUR_normalized = utils.normalization( numberOfUR, userSize, userSize * worstCase_numberOfRoles) numberOfRP_normalized = utils.normalization( numberOfRP, permissionSize, permissionSize * worstCase_numberOfRoles) fitness = (w1 * numberOfRoles_normalized + w2 * numberOfUR_normalized + w3 * numberOfRP_normalized + w4 * accs_normalized) return fitness,
def evalFunc_WSC_Star_RoleDis(individual, Original, weights, population, constraints=[]): userSize = Original.shape[0] permissionSize = Original.shape[1] if (constraints and not feasible(individual, userSize, permissionSize, constraints)): fitness = len(weights) else: numberOfRoles = len(individual[0]) array = decoder.resolveRoleModelChromosomeIntoBoolArray( individual[0], userSize, permissionSize) conf, accs = matrixOps.compareMatrices(array, Original) numberOfUR = statistics.URCnt(individual[0]) numberOfRP = statistics.RPCnt(individual[0]) count = 0 for rm in population: count += len(rm[0]) numberOfRolesInPop = count / len(population) roleDis = abs(numberOfRolesInPop - numberOfRoles) if (roleDis < 3): roleDis = 0 w1 = weights[0] w2 = weights[1] w3 = weights[2] w4 = weights[3] w5 = weights[4] w6 = weights[5] worstCase_numberOfRoles = min(userSize, permissionSize) numberOfRoles_normalized = utils.normalization( numberOfRoles, 1, worstCase_numberOfRoles) accs_normalized = utils.normalization(accs, 0, Original.sum()) conf_normalized = utils.normalization(conf, 0, Original.size - Original.sum()) numberOfUR_normalized = utils.normalization( numberOfUR, userSize, userSize * worstCase_numberOfRoles) numberOfRP_normalized = utils.normalization( numberOfRP, permissionSize, permissionSize * worstCase_numberOfRoles) roleDis_normalized = utils.normalization( roleDis, 0, max(numberOfRolesInPop, permissionSize - numberOfRolesInPop, userSize - numberOfRolesInPop)) fitness = (w1 * numberOfRoles_normalized + w2 * numberOfUR_normalized + w3 * numberOfRP_normalized + w4 * accs_normalized + w5 * conf_normalized + w6 * roleDis_normalized) return fitness,
def evalFunc_URCnt(individual, Original, constraints=[]): userSize = Original.shape[0] permissionSize = Original.shape[1] numberOfUR = statistics.URCnt(individual[0]) if (constraints and not feasible(individual, userSize, permissionSize, constraints)): worstCase_numberOfRoles = min(userSize, permissionSize) fitness = worstCase_numberOfRoles else: fitness = numberOfUR return fitness,
def evalFunc_AssignmentCnt2(individual, Original, constraints=[]): userSize = Original.shape[0] permissionSize = Original.shape[1] if (constraints and not feasible(individual, userSize, permissionSize, constraints)): worstCase_numberOfRoles = min(userSize, permissionSize) fitness = worstCase_numberOfRoles * 2 else: numberOfUR = statistics.URCnt(individual[0]) numberOfRP = statistics.RPCnt(individual[0]) fitness = numberOfUR + numberOfRP return fitness,
def getIndWithLowestURRPCnt(population, OriginalFile, topk=1): Original = numpy.matrix(parser.read(OriginalFile)) #top_inds = population toplist = [] for ind in population: count = statistics.URCnt(ind[0]) + statistics.RPCnt(ind[0]) if (len(toplist) <= topk): toplist.append([count, ind]) toplist.sort(key=lambda tup: tup[0]) else: if (count < toplist[0][0]): toplist[0] = [count, ind] toplist.sort(key=lambda tup: tup[0]) for t in toplist: print(str(t[0]) + ":\t" + str(t[1]))
def printStatistics(population, OriginalFile, topk=1): Original = numpy.matrix(parser.read(OriginalFile)) #top_inds = population top_inds = tools.selNSGA2(population, k=topk) i = 0 for top_ind in top_inds: conf = statistics.Conf(top_ind[0], Original) accs = statistics.Accs(top_ind[0], Original) roleCnt = statistics.RoleCnt(top_ind[0]) urCnt = statistics.URCnt(top_ind[0]) rpCnt = statistics.RPCnt(top_ind[0]) print("\nTOP INDIVIDUAL: " + str(i)) print("conf: " + str(conf)) print("accs: " + str(accs)) print("roleCnt: " + str(roleCnt)) print("urCnt: " + str(urCnt)) print("rpCnt: " + str(rpCnt)) i += 1
def evalFunc_Int_AssignmentCnt(individual, Original, userAttributeValues, constraints=[]): userSize = Original.shape[0] permissionSize = Original.shape[1] if (constraints and not feasible(individual, userSize, permissionSize, constraints)): worstCase_interp = 1 fitness = worstCase_interp else: AssignmentCnt = statistics.URCnt(individual[0]) + statistics.RPCnt( individual[0]) AssignmentCnt_normalized = utils.normalization( AssignmentCnt, userSize + permissionSize, userSize * permissionSize) interp = statistics.Interp(individual[0], userAttributeValues) fitness = AssignmentCnt_normalized - interp + 1 return fitness,
def evolution_multi(Original, evalFunc, populationSize, CXPB, addRolePB, removeRolePB, removeUserPB, removePermissionPB, addUserPB, addPermissionPB, NGEN, freq, numberTopRoleModels, optimization, fortin=False, untilSolutionFound=False, pickleFile="", checkpoint=False, prevFiles="", userAttributeValues=[], constraints=[], printPopulations=False, pop_directory="", fixedRoleCnt=0): # Validations if (len(evalFunc) < 2): raise ValueError("Less than 2 objectives not possible") if (len(evalFunc) > 3): raise ValueError("More than 3 objectives not supported") if (not (populationSize % 4 == 0)): raise ValueError("Population size has to be a multiple of 4") logger.debug("Prepare evolutionary algorithm...") time = [] results = defaultdict(list) genStart = 0 population = [] # Create Logbook logbook = tools.Logbook() # Register Optimization weights = () for obj in evalFunc: if (obj == "FBasic" or obj == "FEdge"): weights += (1.0, ) else: weights += (-1.0, ) creator.create("FitnessMinMax", base.Fitness, weights=weights) creator.create("Individual", list, fitness=creator.FitnessMinMax) # Get Checkpoint '''if (checkpoint and len(prevFiles)!=0): prevFile = prevFiles[0] if (os.path.isfile(prevFile)): print("Read checkpoint...") cp = pickle.load(open(prevFile, "rb")) population = cp["population"] genStart = int(cp["generation"]) Original = cp["Original"] results=cp["results"] time=cp["time"] prevFiles=cp["prevFiles"] prevFiles.append(prevFile) logbook=cp["logbook"] random.setstate(cp["rndstate"]) print("DONE.\n") else: print("Checkpoint file does not exit") else: print("Use checkpoint: False") checkpoint = False''' userSize = int(Original.shape[0]) permissionSize = int(Original.shape[1]) # Toolbox toolbox = base.Toolbox() # Chromosome generator toolbox.register("chromosome", init.generateChromosome, userSize, userSize, permissionSize, optimization=optimization, fixedRoleCnt=fixedRoleCnt) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.chromosome, 1) toolbox.register("population", tools.initRepeat, list, toolbox.individual) # Genetic Operators toolbox.register("evaluate", evals.evalFunc_Multi, Original=Original, evalFunc=evalFunc, userAttributeValues=userAttributeValues, constraints=constraints) toolbox.register("mate", operators.mateFunc, optimization=optimization) toolbox.register("mutate", operators.mutFunc, addRolePB=addRolePB, removeRolePB=removeRolePB, removeUserPB=removeUserPB, removePermissionPB=removePermissionPB, addUserPB=addUserPB, addPermissionPB=addPermissionPB, userSize=userSize, permissionSize=permissionSize, optimization=[optimization, optimization]) if (fortin): toolbox.register("preselect", fortin2013.selTournamentFitnessDCD) toolbox.register("select", fortin2013.selNSGA2) else: toolbox.register("preselect", tools.selTournamentDCD) toolbox.register("select", tools.selNSGA2) # Register statistics statsConf = tools.Statistics( key=lambda ind: statistics.Conf(ind[0], Original)) statsAccs = tools.Statistics( key=lambda ind: statistics.Accs(ind[0], Original)) statsRoleCnt = tools.Statistics(key=lambda ind: statistics.RoleCnt(ind[0])) statsURCnt = tools.Statistics(key=lambda ind: statistics.URCnt(ind[0])) statsRPCnt = tools.Statistics(key=lambda ind: statistics.RPCnt(ind[0])) statsInterp = tools.Statistics( key=lambda ind: statistics.Interp(ind[0], userAttributeValues)) mstats = None if (len(evalFunc) >= 2): statsFitness1 = tools.Statistics(key=lambda ind: ind.fitness.values[0]) statsFitness2 = tools.Statistics(key=lambda ind: ind.fitness.values[1]) mstats = tools.MultiStatistics(fitnessObj1=statsFitness1, fitnessObj2=statsFitness2, Conf=statsConf, Accs=statsAccs, RoleCnt=statsRoleCnt, URCnt=statsURCnt, RPCnt=statsRPCnt, Interp=statsInterp) if (len(evalFunc) == 3): statsFitness3 = tools.Statistics(key=lambda ind: ind.fitness.values[2]) mstats = tools.MultiStatistics(fitnessObj1=statsFitness1, fitnessObj2=statsFitness2, fitnessObj3=statsFitness3, Conf=statsConf, Accs=statsAccs, RoleCnt=statsRoleCnt, URCnt=statsURCnt, RPCnt=statsRPCnt, Interp=statsInterp) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) logbook.header = "gen", "evals" for o in range(1, len(evalFunc) + 1): logbook.chapters["fitnessObj" + str(o)].header = "min", "avg", "max", "std" logbook.chapters["Conf"].header = "min", "avg", "max", "std" logbook.chapters["Accs"].header = "min", "avg", "max", "std" logbook.chapters["RoleCnt"].header = "min", "avg", "max", "std" logbook.chapters["URCnt"].header = "min", "avg", "max", "std" logbook.chapters["RPCnt"].header = "min", "avg", "max", "std" logbook.chapters["Interp"].header = "min", "avg", "max", "std" # Creating the population if (not population): logger.info("Generate new population of " + str(populationSize) + " individuals") population = toolbox.population(n=populationSize) solutionFound = [None, None, None] # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if (fit[0] == 0): solutionFound[0] = 0 if (fit[1] == 0): solutionFound[1] = 0 if (max(fit) == 0): solutionFound[2] = 0 # Save population in JSON file if (printPopulations): pop_subdirectory = pop_directory + "\\Generation_" + str(genStart) #if not os.path.exists(pop_subdirectory): # os.makedirs(pop_subdirectory) utils.saveDiversity(genStart, population, pop_subdirectory + "_diversity.json") utils.savePopulation(genStart, population, pop_subdirectory + "_population.pkl") #visual.showBestResult(population, genStart, Original, pop_subdirectory+"\\Individual", "Individual", "Individual from Generation "+str(genStart), False, False, True, False) # Log statistics for first generation if ((len(logbook) == 0) or (logbook.pop(len(logbook) - 1)["gen"] != genStart)): record = mstats.compile(population) logbook.record(gen=genStart, evals=len(invalid_ind), **record) printText = "Generation " + str(genStart) + ":\t" + str( logbook.stream) + "\n" for o in range(1, len(evalFunc) + 1): printText += "\n" + str( logbook.chapters["fitnessObj" + str(o)].stream) printText += str(logbook.chapters["Conf"].stream)+"\n"\ +str(logbook.chapters["Accs"].stream)+"\n"+\ str(logbook.chapters["RoleCnt"].stream)+"\n"\ +str(logbook.chapters["URCnt"].stream)+"\n"\ +str(logbook.chapters["RPCnt"].stream)+"\n"\ +str(logbook.chapters["Interp"].stream) logger.info(printText) # Begin the evolution logger.info("Start evolution...") start = datetime.datetime.now() logger.info("Start time: " + str(start)) #hof = tools.HallOfFame(maxsize=1) # This is just to assign the crowding distance to the individuals # no actual selection is done population = toolbox.select(population, len(population)) generation = genStart + 1 stop = False logger.info("Start evolution with Generation " + str(genStart)) while ((not stop) and (generation <= genStart + NGEN)): # Vary the population offspring = toolbox.preselect(population, len(population)) offspring = [toolbox.clone(ind) for ind in offspring] for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() <= CXPB: toolbox.mate(ind1, ind2) toolbox.mutate(ind1) toolbox.mutate(ind2) del ind1.fitness.values, ind2.fitness.values #offspring = algorithms.varOr(population, toolbox, 100, cxpb=CXPB, mutpb=MUTPB) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if (not solutionFound[0] and fit[0] == 0): solutionFound[0] = generation if (not solutionFound[1] and fit[1] == 0): solutionFound[1] = generation if (not solutionFound[2] and max(fit) == 0): solutionFound[2] = generation #if (fit[0] == 0): # stop = True # Select the next generation population population = toolbox.select(population + offspring, len(population)) # Add Fitness values to results if generation % freq == 0: for ind in population: results[generation].append(ind.fitness.values) # Log statistics for generation record = mstats.compile(population) logbook.record(gen=generation, evals=len(invalid_ind), **record) printText = "Generation " + str(generation) + ":\t" + str( logbook.stream) + "\t" for o in range(1, len(evalFunc) + 1): printText += str( logbook.chapters["fitnessObj" + str(o)].stream) + "\t" printText += str(logbook.chapters["Conf"].stream)+"\t"\ +str(logbook.chapters["Accs"].stream)+"\t"\ +str(logbook.chapters["RoleCnt"].stream)+"\t"\ +str(logbook.chapters["URCnt"].stream)+"\t"\ +str(logbook.chapters["RPCnt"].stream)+"\t"\ +str(logbook.chapters["Interp"].stream) logger.info(printText) if generation % int((genStart + NGEN) / 10) == 0: if (printPopulations): pop_subdirectory = pop_directory + "\\Generation_" + str( generation) #if not os.path.exists(pop_subdirectory): # os.makedirs(pop_subdirectory) utils.saveDiversity(generation, population, pop_subdirectory + "_diversity.json") utils.savePopulation(generation, population, pop_subdirectory + "_population.pkl") #visual.showBestResult(offspring, genStart, Original, pop_subdirectory+"\\Individual", "Individual", "Individual from Generation "+str(generation), False, False, True, False) generation += 1 utils.printDiversity(pop_directory, int((genStart + NGEN) / 10)) utils.savePopulation(generation, population, pop_subdirectory + "_population.pkl") end = datetime.datetime.now() timediff = end - start time.append(timediff.total_seconds()) generation -= 1 # Print final population #visual.printpopulation(population) logger.info("==> Generation " + str(generation)) logger.info("DONE.\n") # Set Checkpoint fileExt = "_M" for obj in evalFunc: fileExt += "_" + obj[:5] fileExt += "_" + str(len(population)) + "_" + str(generation) '''if (checkpoint): fileExt = "_cont_" + str(len(population)) + "_" + str(generation) + "_" + str(CXPB) + "_" + str(MUTPB_All) pickleFile = "Checkpoint"+fileExt+".pkl" print("Save checkpoint into "+str(pickleFile)) cp = dict(population=population, generation=generation, rndstate=random.getstate(), Original=Original, results=results, time=time, populationSize=populationSize, CXPB=CXPB, prevFiles=prevFiles, MUTPB_All=MUTPB_All, addRolePB=addRolePB, removeRolePB=removeRolePB, removeUserPB=removeUserPB, removePermissionPB=removePermissionPB, addUserPB=addUserPB, addPermissionPB=addPermissionPB, logbook=logbook) pickle.dump(cp, open(pickleFile, "wb"), 2) print("DONE.\n")''' top = toolbox.select(population, k=numberTopRoleModels) return population, results, generation, time, prevFiles, top, logbook, fileExt, solutionFound
def evolution(Original, evalFunc, populationSize, tournsize, CXPB, MUTPB_All, addRolePB, removeRolePB, removeUserPB, removePermissionPB, addUserPB, addPermissionPB, NGEN, freq, numberTopRoleModels, optimization, untilSolutionFound=False, eval_weights=[], pickleFile="", checkpoint=False, prevFiles="", userAttributeValues=[], constraints=[], printPopulations=False, pop_directory="", fixedRoleCnt=0): logger.info("Prepare evolutionary algorithm...") time = [] results = defaultdict(list) genStart = 0 population = [] # Create Logbook logbook = tools.Logbook() # Register Optimization if (evalFunc == "FBasic" or evalFunc == "FEdge"): creator.create("FitnessMinMax", base.Fitness, weights=(1.0, )) # MAXIMIZATION else: creator.create("FitnessMinMax", base.Fitness, weights=(-1.0, )) # MINIMIZATION creator.create("Individual", list, fitness=creator.FitnessMinMax) # Get Checkpoint '''if (checkpoint and len(prevFiles)!=0): prevFile = prevFiles[0] if (os.path.isfile(prevFile)): print("Read checkpoint...") cp = pickle.load(open(prevFile, "rb")) population = cp["population"] genStart = int(cp["generation"]) Original = cp["Original"] results=cp["results"] time=cp["time"] prevFiles=cp["prevFiles"] prevFiles.append(prevFile) logbook=cp["logbook"] random.setstate(cp["rndstate"]) print("DONE.\n") else: raise ValueError("Checkpoint file does not exit") else: print("Use checkpoint: False") checkpoint = False''' userSize = int(Original.shape[0]) permissionSize = int(Original.shape[1]) # Toolbox toolbox = base.Toolbox() # Register Chromosome Generator toolbox.register("chromosome", init.generateChromosome, maxRoles=userSize, userSize=userSize, permissionSize=permissionSize, optimization=optimization, fixedRoleCnt=fixedRoleCnt) # Register Individual and Population Initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.chromosome, 1) toolbox.register("population", tools.initRepeat, list, toolbox.individual) # Register Evaluation Function if (evalFunc == "FBasic"): toolbox.register("evaluate", evals.evalFunc_FBasic, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "FEdge"): toolbox.register("evaluate", evals.evalFunc_FEdge, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "FBasicMin"): toolbox.register("evaluate", evals.evalFunc_FBasicMin, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "FEdgeMin"): toolbox.register("evaluate", evals.evalFunc_FEdgeMin, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "FBasicMin_INT"): toolbox.register("evaluate", evals.evalFunc_FBasicMin_INT, Original=Original, weights=eval_weights, userAttributeValues=userAttributeValues, constraints=constraints) elif (evalFunc == "FEdgeMin_INT"): toolbox.register("evaluate", evals.evalFunc_FEdgeMin_INT, Original=Original, weights=eval_weights, userAttributeValues=userAttributeValues, constraints=constraints) elif (evalFunc == "WSC"): toolbox.register("evaluate", evals.evalFunc_WSC, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "WSC_Star"): toolbox.register("evaluate", evals.evalFunc_WSC_Star, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "WSC_Star_RoleDis"): toolbox.register("evaluate", evals.evalFunc_WSC_Star_RoleDis, Original=Original, weights=eval_weights, constraints=constraints) elif (evalFunc == "Violations"): toolbox.register("evaluate", evals.evalFunc_Violations, Original=Original, constraints=constraints) elif (evalFunc == "AvgRoleConf_A"): toolbox.register("evaluate", evals.evalFunc_AvgRoleConfViolations_Availability, Original=Original, constraints=constraints) elif (evalFunc == "Confidentiality"): toolbox.register("evaluate", evals.evalFunc_Confidentiality, Original=Original, constraints=constraints) elif (evalFunc == "Availability"): toolbox.register("evaluate", evals.evalFunc_Availability, Original=Original, constraints=constraints) elif (evalFunc == "RoleCnt"): toolbox.register("evaluate", evals.evalFunc_RoleCnt, Original=Original, constraints=constraints) elif (evalFunc == "URCnt"): toolbox.register("evaluate", evals.evalFunc_URCnt, Original=Original, constraints=constraints) elif (evalFunc == "RPCnt"): toolbox.register("evaluate", evals.evalFunc_RPCnt, Original=Original, constraints=constraints) elif (evalFunc == "AvgRoleConf"): toolbox.register("evaluate", evals.evalFunc_AvgRoleConfViolations, Original=Original, constraints=constraints) elif (evalFunc == "Interpretability"): toolbox.register("evaluate", evals.evalFunc_Interpretability, Original=Original, userAttributeValues=userAttributeValues, constraints=constraints) else: raise ValueError('Evaluation function not known') # Register Variation Operators toolbox.register("mate", operators.mateFunc, optimization=optimization) toolbox.register("mutate", operators.mutFunc, addRolePB=addRolePB, removeRolePB=removeRolePB, removeUserPB=removeUserPB, removePermissionPB=removePermissionPB, addUserPB=addUserPB, addPermissionPB=addPermissionPB, userSize=userSize, permissionSize=permissionSize, optimization=[optimization, optimization]) toolbox.register("select", tools.selTournament, tournsize=tournsize) # Register Statistics statsFitness = tools.Statistics(key=lambda ind: ind.fitness.values[0]) statsConf = tools.Statistics( key=lambda ind: statistics.Conf(ind[0], Original)) statsAccs = tools.Statistics( key=lambda ind: statistics.Accs(ind[0], Original)) statsRoleCnt = tools.Statistics(key=lambda ind: statistics.RoleCnt(ind[0])) statsURCnt = tools.Statistics(key=lambda ind: statistics.URCnt(ind[0])) statsRPCnt = tools.Statistics(key=lambda ind: statistics.RPCnt(ind[0])) statsInterp = tools.Statistics( key=lambda ind: statistics.Interp(ind[0], userAttributeValues)) mstats = tools.MultiStatistics(Fitness=statsFitness, Conf=statsConf, Accs=statsAccs, RoleCnt=statsRoleCnt, URCnt=statsURCnt, RPCnt=statsRPCnt, Interp=statsInterp) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) logbook.header = "gen", "evals" logbook.chapters["Fitness"].header = "min", "avg", "max", "std" logbook.chapters["Conf"].header = "min", "avg", "max", "std" logbook.chapters["Accs"].header = "min", "avg", "max", "std" logbook.chapters["RoleCnt"].header = "min", "avg", "max", "std" logbook.chapters["URCnt"].header = "min", "avg", "max", "std" logbook.chapters["RPCnt"].header = "min", "avg", "max", "std" logbook.chapters["Interp"].header = "min", "avg", "max", "std" # Creating the population if (not population): logger.info("Generate new population of " + str(populationSize) + " individuals") population = toolbox.population(n=populationSize) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] if (evalFunc == "WSC_Star_RoleDis"): fitnesses = [ toolbox.evaluate(population=population, individual=ind) for ind in invalid_ind ] else: fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Save population in JSON file if (printPopulations): pop_subdirectory = pop_directory + "\\Generation_" + str(genStart) #if not os.path.exists(pop_subdirectory): # os.makedirs(pop_subdirectory) utils.saveDiversity(genStart, population, pop_subdirectory + "_diversity.json") utils.savePopulation(genStart, population, pop_subdirectory + "_population.pkl") #visual.showBestResult(population, genStart, Original, pop_subdirectory+"\\Individual", "Individual", "Individual from Generation "+str(genStart), False, False, True, False) # Log statistics for first generation if ((len(logbook) == 0) or (logbook.pop(len(logbook) - 1)["gen"] != genStart)): record = mstats.compile(population) logbook.record(gen=genStart, evals=len(invalid_ind), **record) logger.info("Generation " + str(genStart) + ":\t" + str(logbook.stream) + "\n" + str(logbook.chapters["Fitness"].stream) + "\n" + str(logbook.chapters["Conf"].stream) + "\n" + str(logbook.chapters["Accs"].stream) + "\n" + str(logbook.chapters["RoleCnt"].stream) + "\n" + str(logbook.chapters["URCnt"].stream) + "\n" + str(logbook.chapters["RPCnt"].stream) + "\n" + str(logbook.chapters["Interp"].stream)) # Begin the evolution logger.info("Start evolution...") start = datetime.datetime.now() logger.info("Start time: " + str(start)) #hof = tools.HallOfFame(maxsize=1) generation = genStart + 1 stop = False logger.info("Start evolution with Generation " + str(genStart)) while ((not stop) and (generation <= genStart + NGEN)): population = toolbox.select(population, k=len(population)) offspring = algorithms.varAnd(population, toolbox, cxpb=CXPB, mutpb=MUTPB_All) # Evaluate individuals, which need a evaluation invalid_ind = [ind for ind in offspring if not ind.fitness.valid] if (evalFunc == "WSC_Star_RoleDis"): fitnesses = [ toolbox.evaluate(population=offspring, individual=ind) for ind in invalid_ind ] else: fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Stop condition if (untilSolutionFound): array = decoder.resolveRoleModelChromosomeIntoBoolArray( ind[0], Original.shape[0], Original.shape[1]) conf, accs = matrixOps.compareMatrices(array, Original) stop = (conf + accs) == 0 # Add Fitness values to results if generation % freq == 0: for ind in offspring: results[generation].append(ind.fitness.values) # Log statistics for generation '''record = stats.compile(offspring) logbook.record(gen=generation, evals=len(invalids), **record) logger.info("Generation "+str(generation)+":\t"+str(logbook.stream))''' record = mstats.compile(offspring) logbook.record(gen=generation, evals=len(invalid_ind), **record) logger.info("Generation " + str(generation) + ":\t" + str(logbook.stream) + "\t" + str(logbook.chapters["Fitness"].stream) + "\t\t" + str(logbook.chapters["Conf"].stream) + "\t\t" + str(logbook.chapters["Accs"].stream) + "\t\t" + str(logbook.chapters["RoleCnt"].stream) + "\t\t" + str(logbook.chapters["URCnt"].stream) + "\t\t" + str(logbook.chapters["RPCnt"].stream) + "\t\t" + str(logbook.chapters["Interp"].stream)) if generation % int((genStart + NGEN) / 10) == 0: if (printPopulations): pop_subdirectory = pop_directory + "\\Generation_" + str( generation) #if not os.path.exists(pop_subdirectory): # os.makedirs(pop_subdirectory) utils.saveDiversity(generation, population, pop_subdirectory + "_diversity.json") #utils.savePopulation(generation,population,pop_subdirectory+"_population.pkl") #visual.showBestResult(offspring, genStart, Original, pop_subdirectory+"\\Individual", "Individual", "Individual from Generation "+str(generation), False, False, True, False) population = offspring generation += 1 utils.printDiversity(pop_directory, int((genStart + NGEN) / 10)) utils.savePopulation(generation, population, pop_subdirectory + "_population.pkl") end = datetime.datetime.now() timediff = end - start time.append(timediff.total_seconds()) generation -= 1 # Print final population #visual.printpopulation(population) logger.info("==> Generation " + str(generation)) logger.info("DONE.\n") # Set Checkpoint fileExt = "_S_" + evalFunc + "_" + str( len(population)) + "_" + str(generation) ''' if (checkpoint): fileExt = "_cont_" + str(len(population)) + "_" + str(generation) + "_" + str(CXPB) + "_" + str(MUTPB_All) pickleFile = "Checkpoint"+fileExt+".pkl" print("Save checkpoint into "+str(pickleFile)) cp = dict(population=population, generation=generation, rndstate=random.getstate(), Original=Original, results=results, time=time, populationSize=populationSize, CXPB=CXPB, prevFiles=prevFiles, MUTPB_All=MUTPB_All, addRolePB=addRolePB, removeRolePB=removeRolePB, removeUserPB=removeUserPB, removePermissionPB=removePermissionPB, addUserPB=addUserPB, addPermissionPB=addPermissionPB, logbook=logbook) pickle.dump(cp, open(pickleFile, "wb"), 2) print("DONE.\n") ''' return population, results, generation, time, prevFiles, tools.selBest( population, k=numberTopRoleModels), logbook, fileExt