def init_population(self, num, target): genome = self.population[0].genome for i in range(num): worker = Worker.Worker(target, genome=genome) worker.mutate_genome(self.age, 0.2, self.mutationRate) self.population.append(worker) self.populationPool = num
def preview_or_run_operations(self, really_delete, operations=None): """Preview operations or run operations (delete files)""" assert(isinstance(really_delete, bool)) import Worker self.start_time = None if None == operations: operations = {} for operation in self.get_selected_operations(): operations[operation] = self.get_operation_options(operation) assert(isinstance(operations, dict)) if 0 == len(operations): GuiBasic.message_dialog(self.window, _("You must select an operation"), gtk.MESSAGE_WARNING, gtk.BUTTONS_OK) return try: self.set_sensitive(True) self.textbuffer.set_text("") self.progressbar.show() print "STOP NOW" print self,stop_now self.worker = Worker.Worker(self, really_delete, operations, self.stop_now) except: traceback.print_exc() err = str(sys.exc_info()[1]) self.append_text(err + "\n", 'error') else: self.start_time = time.time() worker = self.worker.run() gobject.idle_add(worker.next)
def evaluate(self, all): # Disable all button to start computing self.btn_eval1.setEnabled(False) self.btn_evalall.setEnabled(False) self.progressBar.setValue(0) # Load image path for computing self.process_left_path = [] self.process_right_path = [] if all == True: self.eval_all = True self.process_left_path = self.left_path self.process_right_path = self.right_path self.res_disp = self.disp_path else: self.process_left_path = [self.left_path[self.idx]] self.process_right_path = [self.right_path[self.idx]] self.res_disp = [self.disp_path[self.idx]] self.label_7.setText("Working") # Listen to worker to process image pairs worker = Worker(self.process_left_path, self.process_right_path, self.res_disp, self.idx) worker.signals.result.connect(self.track_progress) worker.signals.finished.connect(self.work_finished) # Execute try: self.threadpool.start(worker) except: pass
def start_threads(self): self.__workers_done = 0 self.__threads = [] self.__threadsstatus = [] self.stat = [] self.qtrs = [] self.shifts = [] self.a = [0,1,2,3] port = 5555 for idx in range(self.NUM_THREADS): worker = Worker.Worker(idx, port) thread = QThread() thread.setObjectName('thread_' + str(idx)) self.__threads.append((thread, worker)) # need to store worker too otherwise will be gc'd self.__threadsstatus.append(self.sensor_im_1) #нужно продумать как именно добавлять статусы многопоточности в виджите self.stat.append(-1) self.qtrs.append([1.,0.,0.,0.]) self.shifts.append([0.,0.,0.]) worker.moveToThread(thread) worker.sig_shifts.connect(self.on_worker_shifts) worker.sig_qtr.connect(self.on_worker_qtr) worker.sig_status.connect(self.on_worker_status) # worker.sig_msg.connect(self.log_text.append) # control worker: self.sig_abort_workers.connect(worker.abort) # get read to start worker: # self.sig_start.connect(worker.work) # needed due to PyCharm debugger bug (!); comment out next line thread.started.connect(worker.work) #(self.port) port = port + 1 thread.start() # this will emit 'started' and start thread event loop
def __init__(self, parent=None): """ ctor for the main tray widget. Initialises all the values properly and configures initial state. :param parent: Parent to create from :return: Sempy object """ self.config = SempyConfig() self.settings = QSettings(QSettings.IniFormat, QSettings.UserScope, "Sempy", "config") QSystemTrayIcon.__init__(self, QIcon("res/semaphore.png"), parent) if os.path.exists( self.settings.fileName()) and self.settings.value("token"): self.current_info = {} self.last_info = {} self.enabled_repos = [] self.token = str(self.settings.value("token")) self.interval = int(self.settings.value("interval")) self.logger = Logger(os.path.dirname(self.settings.fileName())) self.req_counter = 0 self.update_enabled_repos() self.menu = self.create_menu(parent) self.setContextMenu(self.menu) logging.debug("Starting RequestThread") self.req_thread = Worker(interval=self.interval, function=self.update_enabled_repos) self.req_thread.done_signal.connect(self.update_menu) self.req_thread.start()
def login(self, account, password, status, proxy, host, port, use_http=False): '''start the login process''' self.account = e3.Account(account, password, status, host) worker = Worker.Worker('emesene2', self, proxy, use_http) worker.start() self.add_action(e3.Action.ACTION_LOGIN, (account, password, status))
def connect(): print("connected: create new thread") global clientThread clientThread = myapp.ClientThread(mainCls=mainCls) clientThread.set_thread_id(request.sid) threadUser = workerCls.Worker(request.sid, clientThread, socketio=socketio) threadUser.start()
def Start(self): try: for site in self.cfg['websites']: w = Worker(site, self.cfg) w.Start() except Exception as e: logging.error('Manager Start error: %s'%e)
def convert(self): self.setAllDirs() worker = Worker.Worker(self.processScreenshots) worker.signals.result.connect(self.progressPrint) worker.signals.finished.connect( lambda: self.progressPrint('Conversion complete')) self.threadpool.start(worker)
def __init__(self, playerName, worker=None): self.playerName = playerName self.wins = 0 self.loss = 0 if type(worker) != type(Worker.Worker): self.worker = Worker.Worker() else: self.worker = worker
def load_entities(self): # Load entity images maraudersurf = pygame.image.load( os.path.join(self._cwdpath, "Images", "maraudert.png")).convert_alpha() scvsurf = pygame.image.load( os.path.join(self._cwdpath, "Images", "scv2.png")).convert_alpha() marinesurf = pygame.image.load( os.path.join(self._cwdpath, "Images", "marine.png")).convert_alpha() # Create units and enemies and set up data structures marinerect = pygame.Rect(700, 300, 45, 45) worker1 = Worker.Worker(15, scvsurf, marinerect, False) marinerect = pygame.Rect(700, 350, 45, 45) worker2 = Worker.Worker(15, scvsurf, marinerect, False) self._entitylist = [] self._entitylist.append(worker1) self._playerinfo.givepopulation(1) self._entitylist.append(worker2) self._playerinfo.givepopulation(1) thesize = 5 # Fill entity List structure for x in range(0, thesize): marinerect = pygame.Rect(100, 500 + x * 30, 50, 50) entity1 = Marine.Marine(15, marinesurf, marinerect, False) self._entitylist.append(entity1) self._playerinfo.givepopulation(1) # Initialize and fill the entity QuadTree self._entityquadtree = Quadtree.Quadtree() for x in range(0, len(self._entitylist)): self._entityquadtree.insertstart(self._entitylist[x]) # Fill enemy List structure self._enemyentitylist = [] for x in range(0, thesize): marinerect = pygame.Rect(400, 250 + x * 30, 50, 50) entity1 = Marauder.Marauder(15, maraudersurf, marinerect, True) self._enemyentitylist.append(entity1) # Initialize and fill the enemy QuadTree self._enemyentityquadtree = Quadtree.Quadtree() for x in range(0, len(self._enemyentitylist)): self._enemyentityquadtree.insertstart(self._enemyentitylist[x])
def __init__(self,host,worker=None): self.hostName = host if type(worker) != type(Worker.Worker): self.worker = Worker.Worker() else: self.worker = worker self.cards = [] self.currentCard = None self.currentState = None self.removedCharacters = None
def init_population(self, num, target): genome = self.population[0].genome for i in range(num): worker = Worker.Worker(target, genome=genome) for i in range(len(worker.genome)): if worker.genome[i][-1] != 1: worker.genome[i].append(1) worker.mutate_genome(0.2, canAdd=target.canAdd, canRemove=target.canRemove) self.population.append(worker)
def buildWorker(self, entityList, scvsurf, entityquadtree, enemyEntityList, enemyEntityQuadtree, playerInfo): #Create units and enemies and set up data structures # (1,2) = x,y coords on background surface, (3,4) = image size if not self.enemy and playerInfo.resources >= 1: for x in range(50, 1000, 20): spotTaken = False for entity in entityList: #Some entity is already in that location if self.x + x == entity.rect[0]: spotTaken = True # If we try to place a worker in the same spot of another the game will crash, here we have found an empty spot if spotTaken == False: # print(self.x + x, self.y + x) workerrect = pygame.Rect(self.x + x,self.y + x,45,45) worker1 = Worker.Worker(15,scvsurf,workerrect,self.enemy) #Handle if enemy/user was created entityList.append(worker1) entityquadtree.insertstart(worker1) #W/out Node is none error playerInfo.givepopulation(1) playerInfo.removeresources(1) #Stop looping return elif self.enemy: #Enemy for x in range(50, 1000, 20): spotTaken = False for enemyEntity in enemyEntityList: #Some entity is already in that location if self.x + x == enemyEntity.rect[0]: spotTaken = True if spotTaken == False: workerrect = pygame.Rect(self.x + x,self.y + x,45,45) worker1 = Worker.Worker(15,scvsurf,workerrect,self.enemy) #Handle if enemy/user was created enemyEntityList.append(worker1) enemyEntityQuadtree.insertstart(worker1) #W/out Node is none error #Stop looping return
def assign_job(worker: Worker.Worker(), jobDesc: list): """ Set the Job of a worker to the description provided in a jobDesc. If no second element was provided, give the worker a Minimum Wage job, with the given name """ length = len(jobDesc) if length == 2: worker.set_job(jobDesc[0], float(jobDesc[1])) elif length == 1: worker.set_job(jobDesc[0], MIN_WAGE) else: print("Error: the given description was too long/short")
def applyWorker(function, blockData, address): q = Queue() workers = [] for process in range(4): worker = Worker(target=function, name='process_{}'.format(process), args=(blockData, (process*1000)**2, address), queue=q) workers.append(worker) worker.start() result = q.get() if(result): for worker in workers: worker.terminate() print('{} Has been terminated'.format(worker.name)) return result
def init_species(self, num): print("initializing...") self.species = [] # For all workers we want to make. for i in range(0): # Create and store a new worker object. worker = Worker.Worker(self.target) t = self.target.initializationType if t == "sequential": worker.initialize_sequential_genome() elif t == "random": worker.initialize_random_genome(self.target.initialSize) newSpecies = Crossover_Species.Species(worker, self.maxAge) newSpecies.init_population(self.speciesSize, self.target) self.species.append(newSpecies) for i in range(self.speciesSize * 2): worker = Worker.Worker(self.target) t = self.target.initializationType if t == "sequential": worker.initialize_sequential_genome() elif t == "random": worker.initialize_random_genome(self.target.initialSize) newSpecies = Basic_Species.Species(worker, self.maxAge) self.basicSpecies.append(newSpecies)
def doAnalysis(vectorSize=DEFAULT_VECTOR_SIZE, weightLimit=DEFAULT_WEIGHT_LIMIT, mergeLimit=DEFAULT_MERGE_LIMIT): # Get the name list of to be processed file nameList = getNameList() # for item in nameList: # print(item) if (nameList == None or len(nameList) == 0): print("No data for clustering") return # Load the data worker = Worker.Worker() for filename in nameList: data = worker.loadData(filename) worker.addNews(data) # worker.printAllNews() keywordDict = worker.getKeyWordDict() pass print("len(keywordDict):" + str(len(keywordDict))) # convert the news into vector newsVectorList = worker.vectorLization(vectorSize, weightLimit=weightLimit) #The all news set newsList = worker.getAllNews() #clustering cluster = Cluster.Cluster(newsVectorList=newsVectorList) newsMergedList = cluster.getHierResult(mergeLimit=mergeLimit) resultNewsList = list() for i in range(len(newsMergedList)): print("Cluster:" + str(i)) itemList = newsMergedList[i] tempNewsList = list() for newsID in itemList: news = newsList[newsID] print(news.toString()) tempNewsList.append(news.toDict()) resultNewsList.append(tempNewsList) exportResultToDisk(resultNewsList)
def execute(self): # get workers going if (self._workers == 0): self._workers = multiprocessing.cpu_count() * 2 self.workers = [ Worker.Worker(self.tasks, self.results) for i in xrange(self._workers) ] for w in self.workers: w.start() # enqueue jobs for t in self._targets: self.tasks.put(TaskLogic.TaskLogic(t, self._proxy, self._type)) # add poison pills to stack for i in xrange(self._workers): self.tasks.put(None) # wait for tasks to finish self.tasks.join() return True
args = dict(zip(argNames, sys.argv)) tasks = ['sendqueue', 'progressqueue'] if 'task' not in args: print('===> Not found task in tasks') for vtask in tasks: print('- ' + vtask) exit() task = args['task'] if task in tasks: # words.Worker().sendqueue() method_to_call = getattr(words.Worker(), task) if task == 'sendqueue': import daemon with daemon.DaemonContext(): while True: method_to_call() #time.sleep(env.INTERVAL-14) time.sleep(env.INTERVAL * 60) else: method_to_call() else: print('===> Not found task in tasks') for vtask in tasks: print('- ' + vtask) exit()
def update(self): ''' The main function that gets called every X contains all the child table updating functions Args: None Returns: None ''' def _success(worker): #Called when one of the workers is successfully completed return def _error(worker): #Called if there was an error logging.error('~~~~ Error with the {} ~~~~'.format(worker)) def _midCheck(): ''' Monitors the middle man list for unfilled orders Args: None Returns: None ''' headers = {'Accept': 'application/json', 'Authorization' : self.trader.headers['Authorization']} for tick in self.midTicks: if tick.transID: try: url = 'https://api.robinhood.com/orders' + '/' + tick.transID[1] res = requests.get(url, headers = headers).json() if tick.transID[0] == 'sell': if res['state'] in ['partially_filled', 'filled', 'confirmed']: self.sell(tick, fromMidPrice = float(res['price'])) else: if res['state'] in ['partially_filled', 'filled']: self.purchase(tick, fromMidPrice = float(res['price'])) except Exception as e: logging.info('~~~~ Mid Check Error: {} ~~~~'.format(e)) def _tickUpdate(curList): ''' Updates the tick objects in the respective list Args: curList (str): string name of list that is being updated Returns: None ''' listDict = {'Hold' : self.hTicks, 'Queue' : self.qTicks} tickData = robinTicks(self.trader, [tick.T for tick in listDict[curList]], self.afterHours()) if len(tickData) != len(listDict[curList]): logging.error('~~~~ {} and Fetch Lengths Do Not Match ~~~~'.format(curList)) return else: for tickDict in tickData: try: idx = [tick.T for tick in listDict[curList]].index(tickDict['Sym']) except ValueError: return listDict[curList][idx].update( data = tickDict['Data'], purPrice = self.purPrice.value(), spy = self.spy ) def _queueCall(): ''' Performs all the necessaries for the Queue table, is put in a worker and executes in the background Args: None Returns: None ''' if len(self.qTicks): _tickUpdate('Queue') #If actually trading, iterate through Queue and if the projected cost doesn't exceed budget see if #it meets purchasing criteria, else just update if not self.startBut.isEnabled(): for tick in self.qTicks: logging.info('Queue {}'.format(tick.T)) transPrice = tick.C * tick.PQ try: if self._dtCost + transPrice < self.budget and transPrice < float(self.buyingPower.text()) and transPrice < float(self.cash.text()): if tick.toBuy( purPrice = self.purPrice.value(), spy = self.spy ): self._executeOrder(tick, orderType = 'Buy') except TypeError: pass def _holdCall(): ''' Performs all the necessaries for the Holdings table, is put in a worker and executes in the background Args: None Returns: None ''' if len(self.hTicks): _tickUpdate('Hold') if not self.startBut.isEnabled(): for tick in self.hTicks: if tick.tradeable: logging.info('Hold {}'.format(tick.T)) if tick.toSell( purPrice = self.purPrice.value(), spy = self.spy ): self._executeOrder(tick, 'Sell') #Robinhood portfolio and account info, creates an empty one if an error is thrown #such as having 0 in the portfolio try: self.portfolio = self.trader.portfolios() self.account = self.trader.get_account()['margin_balances'] except IndexError: logging.info('~~~~ Portfolio Empty ~~~~') self.portfolio = { 'equity' : 0, 'extended_hours_equity' : 0, } self.account = { 'unsettled_funds' : 0, 'start_of_day_dtbp' : 0, 'unallocated_margin_cash': 0 } except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError, TimeoutError) as e: logging.info('~~~~ Connection Error: {} ~~~~'.format(e)) return #Updates the market tracker bar self.marketBar(fetchMarkets()) now = datetime.datetime.now(self.tz).time() #Set the Equity to current value depending on if it's aH or not if self.afterHours(now): self.equity.setText('%.2f' % (float(self.portfolio['extended_hours_equity']))) #Disable Trading aH if not TESTING: if not self.startBut.isEnabled(): self.tradeActs() else: self.equity.setText('%.2f' % (float(self.portfolio['equity']))) if self.portfolio['equity']: #Plt that stuff if it's during the trading day self.graphData[0].append(now.strftime('%H:%M:%S')) self.graphData[1].append(float(self.portfolio['equity'])) xdict = dict(enumerate(self.graphData[0])) ax = self.graph.getAxis('bottom') ax.setTicks([xdict.items()]) self.graph.plot(list(xdict.keys()), self.graphData[1], pen = self.ePen, clear = False) self.buyingPower.setText('%.2f' % (float(self.account['start_of_day_dtbp']))) self.cash.setText('%.2f' % (float(self.account['unallocated_margin_cash']))) self.uFund.setText('%.2f' % (float(self.account['unsettled_funds']))) if not TESTING: if not self.startBut.isEnabled(): #If end of day approaching, close out all positions regardless of profit if now > datetime.time(hour = 15, minute = 58, second = 0, tzinfo = self.tz): self.dump() #Safety-net for SEC guideline of >25000 on Non-Margin for day trading if self.marginSpin.value() < float(self.equity.text()) < self.marginSpin.value() + 100: if self.notYetWarned: self.warn('Near Thresh') self.notYetWarned = False if float(self.equity.text()) < self.marginSpin.value(): logging.info('~~~~ Equity Fell Below Threshold ~~~~') self.warn('Below Thresh') self.tradeActs() self.purPrice.setMaximum(float(self.cash.text())) else: #Allow for dumping of stocks at end of the day if just testing, if testing AH doesn't auto dump if not self.startBut.isEnabled(): if self.startTime < datetime.time(hour = 16, minute = 0, second = 0, tzinfo = self.tz): if now > datetime.time(hour = 15, minute = 58, second = 0, tzinfo = self.tz): self.dump() if len(self.hTicks) > 0: holdWorker = Worker(_holdCall) holdWorker.signals.finished.connect(lambda : _success('Hold')) holdWorker.signals.error.connect(lambda : _error('Hold')) self.pool.start(holdWorker) self.hModel.layoutChanged.emit() self.holding.viewport().update() #Only calls the update function if there's stuff in the table, saves memory if len(self.qTicks) > 0: queueWorker = Worker(_queueCall) queueWorker.signals.finished.connect(lambda : _success('Queue')) queueWorker.signals.error.connect(lambda : _error('Queue')) self.pool.start(queueWorker) self.qModel.layoutChanged.emit() self.queue.viewport().update() if len(self.midTicks) > 0: midWorker = Worker(_midCheck) midWorker.signals.finished.connect(lambda : _success('Middle')) midWorker.signals.error.connect(lambda : _error('Middle')) self.pool.start(midWorker)
def preview_or_clean(operations, really_clean): """Preview deletes and other changes""" cb = CliCallback() worker = Worker.Worker(cb, really_clean, operations).run() while worker.next(): pass
from mpi4py import MPI import time from Worker import * from Master import * from IO import * from graph import * comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() if rank == size - 1: # master process master = Master(rank, size - 1, comm) master.Run() else: worker = Worker("../graph/large.graph", rank, size - 1, comm) worker.Local_Graph() worker.Local_Step()
tf.reset_default_graph() """ Creates the master worker that maintains the master network. We then initialize the workers array. """ global_episodes = tf.Variable(0, dtype=tf.int32, name='global_episodes', trainable=False) total_frames = tf.Variable(0, dtype=tf.int32, name='total_frames', trainable=False) learning_rate = tf.train.polynomial_decay(LEARNING_RATE, total_frames, MAX_ITERATION // 2, LEARNING_RATE * 0.1) with tf.device("/cpu:0"): summary_writer = tf.summary.FileWriter("./train/" + SUMMARY_NAME + str(BETA[BETA_i])) summary = Summary(summary_writer) master_worker = Worker('global', env, GAMMA, learning_rate, global_episodes, total_frames, model_path, False, False, num_workers, summary, BETA) workers = [] for i in range(num_workers): print (i) workers.append( Worker(i, env, GAMMA, learning_rate, global_episodes, total_frames, model_path, render, save_img, num_workers, summary, BETA[BETA_i])) """ Initializes tensorflow variables """ with tf.Session(config=tf.ConfigProto(intra_op_parallelism_threads=1)) as session: saver = tf.train.Saver(max_to_keep=5) if load: print ("Loading....") c = tf.train.get_checkpoint_state(model_path)
""" Initializes tensorflow variables """ os.environ["CUDA_VISIBLE_DEVICES"]='0' #config = tf.ConfigProto() config = tf.ConfigProto(device_count={"CPU":4}) config.intra_op_parallelism_threads=4 config.inter_op_parallelism_threads=4 config.allow_soft_placement=True config.log_device_placement=False config.gpu_options.allow_growth = True with tf.Session(config=config) as session: with tf.device("/cpu:0"): summary_writer = tf.summary.FileWriter("./Summary/"+Constants.SUMMARY_NAME) summary = Summary(summary_writer, Constants.MODE) master_worker = Worker('global', session, learning_rate, epochs, epochs_test, total_graphs, train_nodes, test_nodes, summary) workers = [] for i in range(Constants.NUM_WORKER): print (i) workers.append(Worker(i, session, learning_rate, epochs, epochs_test, total_graphs, train_nodes, test_nodes, summary)) saver = tf.train.Saver(max_to_keep=1) if Constants.LOAD: print ("Loading....") c = tf.train.get_checkpoint_state(Constants.MODEL_PATH) saver.restore(session,c.model_checkpoint_path) print ("Graph loaded!") else: session.run(tf.global_variables_initializer()) coord = tf.train.Coordinator()
def AppExecute(self): self.ThreadPool = QtCore.QThreadPool() WorkerThread = Worker(self.AppWorker) WorkerThread.signals.progress.connect(self.ProgressFunction) WorkerThread.signals.finished.connect(self.DisplaySchedule) self.ThreadPool.start(WorkerThread)
def StartWorker(self): import Worker self.wthread = Worker.Worker() self.wthread.start()
import socket, sys, re, os sys.path.append("../lib") # for params import params import framed_socket, Worker switchesVarDefaults = ( (('-l', '--listenPort') ,'listenPort', 50001), (('-?', '--usage'), "usage", False), # boolean (set if present) ) progname = "ftp-server" paramMap = params.parseParams(switchesVarDefaults) listenPort = paramMap['listenPort'] listenAddr = '' # Symbolic name meaning all available interfaces if paramMap['usage']: params.usage() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((listenAddr, listenPort)) s.listen(1) # allow only one outstanding request # s is a factory for connected sockets while True: conn, addr = s.accept() # wait until incoming connection request (and accept it) Worker.Worker(conn,addr).start()
''' Get Work Days ''' WorkDays = [] for day in datastore["WorkDays"]: WorkDays.append(wd.WorkDay(day["Day"], day["StartTime"], day["EndTime"])) ''' Get Workers ''' Workers = [] for worker in datastore["Workers"]: newWorker = w.Worker(worker["Name"], worker["Hours"]) for day in worker["Availablity"]: newDay = w.AvailableDay(day["Day"], day["StartTime"], day["EndTime"]) newWorker.AvailableDayList.append(newDay) Workers.append(newWorker) ''' Assign workers to days ''' # Add workers to TimeSlots based on availibility for worker in Workers: for workDay in WorkDays:
def repopulate(self): assert len( self.population ) >= 2, "There must be at least 2 workers in the self.population to repopulate!" # print("Initial population of self.population: ",len(self.population)) # Set up local variables and loop through workers. minFitness = self.population[0].fitness maxFitness = self.population[0].fitness averageFitness = 0 parentWorkers = [] for worker in self.population: # Calculate average fitness, store min and max fitness values for normalization later. averageFitness += worker.fitness if worker.fitness < minFitness: minFitness = worker.fitness if worker.fitness > maxFitness: maxFitness = worker.fitness averageFitness /= len(self.population) if minFitness - maxFitness == 0: minFitness = 10 # Select part of the population to be parent candidates. Note that this is not guaranteed to select any parents. # To combat this, there is a check performed at the end of the loop to ensure there will always be at least # 2 parents. for worker in self.population: fitness = abs( (worker.fitness - minFitness) / (maxFitness - minFitness)) # random() returns a value in the range [0.0,1.0) which means that the higher the fitness value the # higher the probability of parent selection. #print("fitness:",worker.fitness,"age:",self.age,"max:",self.maxAge) if float(fitness) >= self.age / self.maxAge: parentWorkers.append(worker) if len(parentWorkers) >= len(self.population): return if len(parentWorkers) == 0: print("species died through repopulation\n", self.age, "\n", self.maxAge) print("num workers:", len(self.population)) for worker in self.population: print(worker.fitness) self.completionFlag = True return if len(parentWorkers) == 1: p1 = parentWorkers[0] p2 = Worker.Worker(target=p1.target, genome=p1.genome) p2.mutate_genome(self.age, 0.2, self.mutationRate) parentWorkers.append(p2) children = [] # Loop until we have re-filled the population. while len(children) + len(parentWorkers) < self.populationPool: p1, idx = self.select_random_worker(parentWorkers, minFitness, maxFitness) parentWorkers.pop(idx) p2, idx = self.select_random_worker(parentWorkers, minFitness, maxFitness) parentWorkers.append(p1) newGenome = p1.create_genome(p2) child = Worker.Worker(p1.target, genome=newGenome) children.append(child) self.population = [] for worker in children: worker.mutate_genome(self.age, 0.2, self.mutationRate) self.population.append(worker) for worker in parentWorkers: self.population.append(worker)