def mutation(self): """ Method that perform mutation on each offspring. """ # initialize list of mutated individuals mutated_population = [] # move the elite to next population for i in range(self.elite_size): mutated_population.append(self.population[i]) # copy individuals to be mutated population_before_mutation = self.population.copy()[self.elite_size:] # use multiple processes to mutate individuals and compute fitness of mutated individuals with concurrent.futures.ProcessPoolExecutor( max_workers=CORES_NUMBER) as executor: # process individuals results = [ executor.submit(_multiprocessing_mutation, individual) for individual in population_before_mutation ] for futures in concurrent.futures.as_completed(results): # store the result mutated_population.append(futures.result()) # assign probabilities to be selected self._assign_probabilities(mutated_population)
def crossover(self): """ Method that perform crossover in population. Uses parents chosen by selection operator. """ # initialize list of individuals moving to next population next_population = [] # move the elite to next population for i in range(self.elite_size): next_population.append(self.population[i]) # use multiple processes to combine genetic information and compute fitness of the offspring with concurrent.futures.ProcessPoolExecutor( max_workers=CORES_NUMBER) as executor: # process parents results = [ executor.submit(_multiprocessing_crossover, parents, self.mutations_number) for parents in self.parents ] for futures in concurrent.futures.as_completed(results): # store the result next_population.append(futures.result()) # assign probabilities to be selected self._assign_probabilities(next_population)
def __init__(self, population_size, elite_size, mutations_number, use_palette): """ Constructor for the class Population. Stochastically generates initial population. :param population_size: number of individuals in population. """ # set population size self.population_size = population_size # set elite size self.elite_size = elite_size # set mutations number self.mutations_number = mutations_number # initialize list of individuals population = [] # initialize total fitness total_fitness = 0 # use multiple processes to create individuals and compute their fitness with concurrent.futures.ProcessPoolExecutor( max_workers=CORES_NUMBER) as executor: results = [] # create progenitor individual = Individual(number_of_mutations=mutations_number, use_palette=use_palette) for i in range(self.population_size): # generate sibling sibling = Individual(use_palette=use_palette) # process sibling results.append(executor.submit(_multiprocessing_init, sibling)) for futures in concurrent.futures.as_completed(results): # store the result population.append(futures.result()) # sort the population by value of fitness population = sorted(population, key=itemgetter('fitness')) # aggregate the fitness of population elite for i in range(self.elite_size): total_fitness += population[i]['fitness'] # auxiliary variable to scale the fitness value probability_scale = population[0]['fitness'] + population[ self.elite_size - 1]['fitness'] for i in range(self.population_size): # assign probability of being selected based on the fitness value of individual probability = (probability_scale - population[i]['fitness'] ) / total_fitness if i < self.elite_size else 0 population[i]['probability'] = probability # save the population self.population = population # initialize parents list self.parents = []
def get_results(u): try: #_uuid_ = uuid.UUID(u) _uuid_ = u [futures, pid] = pid_dict[_uuid_] jd = json.dumps({"job_id": str(_uuid_), "result": futures.result()}) print(jd) return jd except KeyError: print('Key not found') except ValueError: print('UUID not found')
def processDir(self, inFolder, outFolder, isReverse=False): if isReverse: for root, dirs, fileList in os.walk(inFolder): for cfile in fileList: with open(os.path.join(inFolder,cfile), 'rb') as handle: for line in handle: id = line[0:8] ext = self.stripTrailingSpaces(line[8:13]) data = line[13:].rstrip("\n") deData = self.decompress(data.decode('hex')) self.mHandle = open(os.path.join(outFolder,id+ext),"w") self.mHandle.write(deData.rstrip("\n")) self.mHandle.close() else: index = 0 self.mHandle = open(os.path.join(outFolder, self.addTrailing(str(index),"0")),"w") #self.mIDhandle = open("IDMap.txt","w") for root, dirs, fileList in os.walk(inFolder): with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executer: requests = { executer.submit(self.processFile, os.path.join(root,cfile), os.path.join(outFolder,cfile)) : cfile for cfile in fileList } for futures in concurrent.futures.as_completed(requests): pFile = "" try: if self.mID != 10000000 and self.mID % self.mLimit == 0 : index = index + 1 self.mHandle.close() self.mHandle = open(os.path.join(outFolder,self.addTrailing(str(index),"0")),"w") logging.info("Processed " + str(self.mID)) pFile, buffer = futures.result() if buffer: id = os.path.basename(pFile) name, ext = os.path.splitext(id) #self.mIDhandle.write(pFile + "\t" + str(self.mID) + "\n") self.mHandle.write(str(self.mID)) self.mHandle.write(self.resize(ext)) self.mHandle.write(buffer) self.mHandle.write("\n") self.mID = self.mID + 1 except Exception as ex: logging.error("Failed to write sequence: " + str(ex) + ":" + pFile) #self.mIDhandle.close() self.mHandle.close()
def initiate_clients_consent(clients, host='127.0.0.1', port=4445): def _consent(client): resp = client['session'].get( f'http://{host}:{port}/oauth2/auth/requests/consent?consent_challenge={client["login_challenge"]}', allow_redirects=False) if resp.ok: return client return None consent_clients = [] with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: futures = (executor.submit(_consent, c) for c in clients) for futures in concurrent.futures.as_completed(futures): try: consent_clients.append(futures.result()) except Exception as e: test_logger.error(e) pass return [x for x in consent_clients if x is not None]
def getAlltheFrames(videoSource,videoName,filterToCheck,ssimIndex): #print("inside ","getAlltheFrames") frameCounter = 0 success ,firstframe = videoSource.read() fps = videoSource.get(cv2.CAP_PROP_FPS) resizedframe = cv2.resize(firstframe,(640,360)) executor = concurrent.futures.ThreadPoolExecutor(2000) totalFrame = videoSource.get(cv2.CAP_PROP_FRAME_COUNT) while(success): success ,frame = videoSource.read() frameName = "./output/"+str(videoName)+"_Frame_"+str(frameCounter)+".jpg" #pixMarkedName = "./output/"+str(videoName)+"_Frame_"+str(frameCounter)+".jpg" maskedName = "./difference/"+str(videoName)+"_Frame_"+str(frameCounter)+".jpg" if success: if toskiporNot(firstframe,frame,ssimIndex) or frameCounter == 0: print("Working on Frame number #",frameCounter+1,"Out of ", int(totalFrame)) futures = executor.submit(countPixel,resizedframe,frameName,maskedName,filterToCheck) logging.debug("Thread No # "+ str(executor._counter())) logging.debug("Processed frame No #" + str(frameCounter+1) +" and Bad Pixels No # "+str(futures.result())) firstframe = frame frameCounter = frameCounter +1 resizedframe = cv2.resize(frame,(640,360)) else: break return frameCounter
def submit_result(self, funcs, *args, **kargs): futures = self.exe.submit(funcs, *args, **kargs) # Exe.Results.append( futures.result()) return futures.result()
def read(self, key): with concurrent.futures.ThreadPoolExecutor() as executor: futures = executor.submit(self.read_method, key) return futures.result()