def initialise_simulation(self, simulation): self.simulation = simulation Cache.get_cache().flush() self.configurations.load_configuration_for_simulation(simulation) self.configurations.configurations_dict = deepcopy( self.configurations.configurations_dict) segment_table_fixture_path = join(segment_table_dir_path, simulation.segment_table_file_name) SegmentTable.load(segment_table_fixture_path)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.cache = Cache.get_cache() migration_queues = [] for _ in range(self.total_num_islands): migration_queues.append(Queue(maxsize=1)) self.migration_queues = migration_queues
def run(self): Cache.get_cache().flush() if self.resume: self.resume_simulation() else: self.processes = self.init_processes() for p in self.processes: p.start() self.logger.info('Started process {}'.format(p.name)) self.maintain_island_processes() self.collect_all_island_results() for p in self.processes: self.logger.info('Joining process {}'.format(p.name)) p.join() p.terminate() return self.best_hypothesis
def __init__(self, simulation, migration_coordinator, result_queue, island_number, simulation_total_islands, max_generations, simulation_total_generations, initial_generation=0, initial_population=None): self.logger = Logger.get_logger() self.island_number = island_number self.simulation_total_islands = simulation_total_islands self.island_name = '{}_{}'.format(ga_config.PROCESS_NAME_PREFIX, self.island_number) self.init_random_seed() self.cache = Cache.get_cache() self.initial_generation = initial_generation self.generation = initial_generation self.max_generations = max_generations self.simulation_total_generations = simulation_total_generations self.simulation = simulation self.migration_coordinator = migration_coordinator self.result_queue = result_queue self._population = None if initial_population: self.population = initial_population self.invalidate_all_population_fitness() self.var_and, self.crossover_rate, self.mutation_rate = self.init_crossover_mutation_rates( ) self.target_hypothesis, self.target_hypothesis_energy = self.init_target_hypothesis( ) self.new_individuals_in_generation = 0 self.elite = None self.stats, self.hall_of_fame = self.init_stats() self.logbook = self.init_logbook() self._non_inf_fitness = None self.toolbox = self.init_toolbox()
cache = Cache(conf.settings['cachefile']) # Init Notifications class notify = Notifications() # Init Syncer class syncer = Syncer(conf.configs) # Ensure lock folder exists lock.ensure_lock_folder() # Init thread class thread = Thread() # Logic vars uploader_delay = cache.get_cache('uploader_bans') syncer_delay = cache.get_cache('syncer_bans') plex_monitor_thread = None sa_delay = cache.get_cache('sa_bans') ############################################################ # MISC FUNCS ############################################################ def init_notifications(): try: for notification_name, notification_config in conf.configs[ 'notifications'].items(): notify.load(**notification_config) except Exception:
from rule import get_context_regex from segment_table import LEFT_APPLICATION_BRACKET, LEFT_CENTER_BRACKET, LEFT_IDENTITY_BRACKET from segment_table import RIGHT_APPLICATION_BRACKET, RIGHT_CENTER_BRACKET, RIGHT_IDENTITY_BRACKET LEFT_BRACKETS = [ LEFT_APPLICATION_BRACKET, LEFT_CENTER_BRACKET, LEFT_IDENTITY_BRACKET ] RIGHT_BRACKETS = [ RIGHT_APPLICATION_BRACKET, RIGHT_CENTER_BRACKET, RIGHT_IDENTITY_BRACKET ] BRACKETS = RIGHT_BRACKETS + LEFT_BRACKETS configurations = Configuration() uniform_encoding = UniformEncoding() cache = Cache.get_cache() right_context_dfas = dict() left_context_dfas = dict() rule_transducers = dict() def clear_module(): right_context_dfas.clear() left_context_dfas.clear() rule_transducers.clear() # Wrappers for FAdo.reex.ParseReg1 and str2regexp(). # Needed when running multiple processes of simulations since dbm writes to the same shelve file by default. class ParseReg1MultiProcess(ParseReg1): def __init__(self, no_table=0, table=None):