def _initialize_worker(self): ''' This function initializes the worker process in the evolutionary algorithm. The workers are in charge of running the simulations with the parameters received from the master. ''' logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() # Extract parameters to explore self._extract_parameters() # Make a copy of the simulation options and extract the algorithm section self.simulation_options = copy.deepcopy(self.config_options) self.simulation_options.pop('algorithm') return
def __init__(self, **kwargs): ''' Constructor of the class. It creates a new simulation object. 1 of the parameters have to be defined @param config_options Dictionary with the parameters of the simulations. @param config_file Name of the file where the simulation parameters are stored. ''' if 'config_options' in kwargs: self.config_options = kwargs.pop('config_options') # This code forces exception in the GSLSolver # self.config_options['goclayer']['epsilon_rr_ip'] = 5.7832127 # self.config_options['mfgocsynapsis']['max_weight'] = 1.49209e-9 # self.config_options['mfgocsynapsis']['learning_step'] = 1.0142578e-5 # self.config_options['goclayer']['beta_ip'] = 1.713656 # self.config_options['mfgocsynapsis']['minus_plus_ratio'] = 1.77053 # self.config_options['goclayer']['epsilon_rc_ip'] = 476.5509369 # self.config_options['goclayer']['tau_ip'] =111.14285 elif 'config_file' in kwargs: self.config_file = kwargs.pop('config_file') # logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) else: logger.error( 'Non-specified simulation configuration options or configuration file' ) raise Exception('Non-DefinedSimulationConfig') super(CurrentSimulation, self).__init__(**kwargs) return
class CurrentSimulation(object): ''' This class defines a simulation where the parameters are taking from the configuration file passed as a parameter. ''' def __init__(self, **kwargs): ''' Constructor of the class. It creates a new simulation object. 1 of the parameters have to be defined @param config_options Dictionary with the parameters of the simulations. @param config_file Name of the file where the simulation parameters are stored. ''' if 'config_options' in kwargs: self.config_options = kwargs.pop('config_options') # This code forces exception in the GSLSolver # self.config_options['goclayer']['epsilon_rr_ip'] = 5.7832127 # self.config_options['mfgocsynapsis']['max_weight'] = 1.49209e-9 # self.config_options['mfgocsynapsis']['learning_step'] = 1.0142578e-5 # self.config_options['goclayer']['beta_ip'] = 1.713656 # self.config_options['mfgocsynapsis']['minus_plus_ratio'] = 1.77053 # self.config_options['goclayer']['epsilon_rc_ip'] = 476.5509369 # self.config_options['goclayer']['tau_ip'] =111.14285 elif 'config_file' in kwargs: self.config_file = kwargs.pop('config_file') # logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) else: logger.error( 'Non-specified simulation configuration options or configuration file' ) raise Exception('Non-DefinedSimulationConfig') super(CurrentSimulation, self).__init__(**kwargs) return def initialize(self): ''' Initialize all the objects needed for running the simulation. ''' # Read simulation general options if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() if 'log_file' in self.config_options['simulation']: Logger2File(logger, self.config_options['simulation']['log_file']) if 'verbosity' not in self.config_options['simulation']: self.config_options['simulation']['verbosity'] = 'debug' logger.warning( 'Non-specified simulation verbosity. Using default value %s', self.config_options['simulation']['verbosity']) numeric_level = getattr( logging, self.config_options['simulation']['verbosity'].upper(), None) if not isinstance(numeric_level, int): self.config_options['simulation']['verbosity'] = 'info' numeric_level = getattr( logging, self.config_options['simulation']['verbosity'].upper(), None) logger.warning( 'Invalid simulation verbosity. Using default value %s', self.config_options['simulation']['verbosity']) raise ValueError('Invalid log level: %s' % self.config_options['simulation']['verbosity']) logger.setLevel(numeric_level) if 'use_mpi' not in self.config_options['simulation']: self.config_options['simulation']['use_mpi'] = False if 'time' in self.config_options['simulation']: self.simulation_time = self.config_options['simulation']['time'] else: self.simulation_time = 1 if 'visualize_animation' not in self.config_options['simulation']: self.config_options['simulation']['visualize_animation'] = False if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False logger.debug('Simulation time fixed to %ss', self.simulation_time) self.new_config_options = self.config_options # Initialize cerebellar model logger.debug('Creating cerebellum generator') if 'run_simulation' in self.config_options[ 'simulation'] and self.config_options['simulation'][ 'run_simulation']: # Nest has to be imported before mpi4py if self.config_options['simulation']['use_mpi']: import SpikingCerebellum.NestCerebellarModel as NestGenerator else: import SpikingCerebellum.NestCerebellarModelNoMPI as NestGenerator self.cerebellum = NestGenerator.NestCerebellarModel( config_dict=self.config_options) else: # Get the path of the config_file import SpikingCerebellum.SavedCerebellarModel as SavedGenerator data_path = self.config_options['simulation']['data_path'] simulation_name = self.config_options['simulation'][ 'simulation_name'] # Read the old configuration file being saved with the simulation and containing specific network information #self.config_options = ReadConfigFile(data_path+'/'+'SimulationConfig.cfg') self.config_options['simulation']['run_simulation'] = False # Ignore original paths and names #self.config_options['simulation']['data_path'] = data_path #self.config_options['simulation']['simulation_name'] = simulation_name self.config_options['simulation']['record_to_file'] = False self.config_options['network'][ 'load_file'] = data_path + '/' + simulation_name + '/network.h5' self.cerebellum = SavedGenerator.SavedCerebellarModel( config_dict=self.config_options) logger.debug('Initializing cerebellum generator') self.cerebellum.initialize_simulation() #self.cerebellum.visualize_network() # Initialize oscillatory input current if 'oscillations' in self.config_options: logger.debug('Creating AC Current generator') self.cerebellum.add_ac_current( **self.config_options['oscillations']) # Initialize frequency stimulation input current if 'stimulation' in self.config_options: logger.debug('Creating DC Current generator') self.config_options['stimulation'][ 'simulation_time'] = self.simulation_time #self.config_options['stimulation']['number_of_fibers'] = self.cerebellum.mflayer.number_of_neurons self.config_options['stimulation'][ 'number_of_fibers'] = numpy.count_nonzero( self.cerebellum.mflayer.is_local_node) self.config_options['stimulation'][ 'rng'] = self.cerebellum.get_global_py_rng() if (self.config_options['simulation']['record_to_file']): self.config_options['stimulation'][ 'save_pattern_file'] = self.config_options['simulation'][ 'data_path'] + '/stimulation_pattern.h5' else: self.config_options['stimulation']['save_pattern_file'] = None import Stimulation.PatternGenerator as PatternGenerator self.pattern_generator = PatternGenerator.PatternGenerator( **self.config_options['stimulation']) self.pattern_generator.initialize() self.pattern_length, self.pattern_activations = self.pattern_generator.get_all_patterns( ) self.pattern_length_cum = self.pattern_generator.pattern_length_cum # Initialize current pulse stimulation for keyid in self.config_options: if keyid.startswith('pulse_current'): logger.debug('Adding pulse current %s', keyid) self.cerebellum.add_pulse_current(**self.config_options[keyid]) elif keyid.startswith('sin_current'): logger.debug('Adding sinusoidal current %s', keyid) self.cerebellum.add_ac_current(**self.config_options[keyid]) self.current_time = 0. def run_simulation(self, **kwargs): ''' Run the simulation according to the configuration file. @param end_time Time until when simulation will be run ''' if 'end_time' in kwargs: end_time = kwargs.pop('end_time') if end_time > self.simulation_time: logger.warning( 'Simulation time is shorter than end_time. Simulating %ss', self.simulation_time) end_time = min(end_time, self.simulation_time) else: end_time = self.simulation_time logger.info('Running the simulation from %ss until time %ss', self.current_time, end_time) if 'stimulation' in self.config_options and self.new_config_options[ 'simulation']['run_simulation']: init_index = bisect.bisect_left(self.pattern_length_cum, self.current_time) end_index = bisect.bisect_left(self.pattern_length_cum, end_time) for index in range(init_index, end_index + 1): sim_time = min( self.pattern_length_cum[index] - self.current_time, end_time - self.current_time) # Substitution with nest step_current_generator is prefered, but it runs slower. self.cerebellum.set_dc_current( amplitude=self.pattern_activations[index]) logger.debug('Running the simulation %ss until %ss', sim_time, self.cerebellum.simulation_time + sim_time) self.cerebellum.simulate_network(sim_time) self.current_time = self.cerebellum.simulation_time else: sim_time = end_time - self.current_time logger.debug('Running the simulation %ss until %ss', sim_time, self.cerebellum.simulation_time + sim_time) self.cerebellum.simulate_network(sim_time) self.current_time = self.cerebellum.simulation_time return def visualize_results(self): ''' Visualize the results of the simulation ''' import Visualization.SimulFigure as SimulFigure import Visualization.AxesNeuronPropertyLine as AxesNeuronPropertyLine import Visualization.AxesPatternLine as AxesPatternLine import Visualization.AxesRasterPlot as AxesRasterPlot import Visualization.AxesWeightEvolutionLine as AxesWeightEvolutionLine import Visualization.AxesWeightHistogram as AxesWeightHistogram import Visualization.AxesWeightActivationPlot as AxesWeightActivationPlot import Visualization.AxesFiringOffset as AxesFiringOffset import Visualization.AxesActivationFiringOffset as AxesActivationFiringOffset import matplotlib.pylab # figure7 = SimulFigure.SimulFigure(simulation = self, numRows=4,numColumns=1,figsize=[23,14],dpi=80) # figure7.add_subplot(fig_position=1,axes_type=AxesNeuronPropertyLine.AxesNeuronPropertyLine, # axes_parameters= {'data_provider':self.cerebellum, # 'property':'Vm', # 'layer':'goclayer', # 'visible_data_only':True, # 'show_legend':False, # 'x_length': 1.}) # figure7.add_subplot(fig_position=2,axes_type=AxesNeuronPropertyLine.AxesNeuronPropertyLine, # axes_parameters= {'data_provider':self.cerebellum, # 'property':'Gexc', # 'layer':'goclayer', # 'visible_data_only':True, # 'show_legend':False, # 'x_length': 1.}) # figure7.add_subplot(fig_position=3,axes_type=AxesNeuronPropertyLine.AxesNeuronPropertyLine, # axes_parameters= {'data_provider':self.cerebellum, # 'property':'Ginh', # 'layer':'goclayer', # 'visible_data_only':True, # 'show_legend':False, # 'x_length': 1.}) # animation.add_subplot(fig_position=2,axes_type=AxesRasterPlot.AxesRasterPlot, # axes_parameters= {'data_provider':self.cerebellum, # 'pattern_provider':self.pattern_generator, # 'layer':'mflayer', # 'cell_index': range(50), # 'visible_data_only':True, # 'show_legend':True, # 'x_length':1.}) # figure7.add_subplot(fig_position=2,axes_type=AxesWeightEvolutionLine.AxesWeightEvolutionLine, # axes_parameters= {'data_provider':self.cerebellum, # 'layer':'mfgocsynapsis', # 'source_indexes': range(100), # 'target_indexes': range(1), # 'visible_data_only':True, # 'show_legend':False}) # figure7.add_subplot(fig_position=3,axes_type=AxesWeightHistogram.AxesWeightHistogram, # axes_parameters= {'data_provider':self.cerebellum, # 'layer':'mfgocsynapsis', # 'num_bins': 60}) # figure7.add_subplot(fig_position=4,axes_type=AxesWeightActivationPlot.AxesWeightActivationPlot, # axes_parameters= {'data_provider':self.cerebellum, # 'pattern_provider': self.pattern_generator, # 'layer':'mfgocsynapsis'}) # figure7.plot_at_time() figure8 = SimulFigure.SimulFigure(simulation=self, numRows=1, numColumns=4, figsize=[23, 14], dpi=80) figure8.add_subplot(fig_position=1, axes_type=AxesRasterPlot.AxesRasterPlot, axes_parameters={ 'data_provider': self.cerebellum, 'layer': 'grclayer', 'visible_data_only': True, 'show_legend': False, 'cell_index': range(100), 'x_length': 1. }) figure8.add_subplot(fig_position=2, axes_type=AxesRasterPlot.AxesRasterPlot, axes_parameters={ 'data_provider': self.cerebellum, 'layer': 'goclayer', 'visible_data_only': True, 'show_legend': False, 'x_length': 1. }) figure8.add_subplot(fig_position=3, axes_type=AxesWeightHistogram.AxesWeightHistogram, axes_parameters={ 'data_provider': self.cerebellum, 'layer': 'mfgocsynapsis', 'visible_data_only': True, 'target_indexes': [0], 'show_legend': False }) figure8.add_subplot( fig_position=4, axes_type=AxesWeightActivationPlot.AxesWeightActivationPlot, axes_parameters={ 'data_provider': self.cerebellum, 'pattern_provider': self.pattern_generator, 'layer': 'mfgocsynapsis', 'show_legend': False }) figure8.plot_at_time() matplotlib.pylab.show() def visualize_animation(self): ''' Visualize the results of the simulation ''' import Visualization.SimulAnimation as SimulAnimation import Visualization.AxesNeuronPropertyLine as AxesNeuronPropertyLine import Visualization.AxesPatternLine as AxesPatternLine import Visualization.AxesRasterPlot as AxesRasterPlot import Visualization.AxesWeightEvolutionLine as AxesWeightEvolutionLine import Visualization.AxesWeightHistogram as AxesWeightHistogram import Visualization.AxesWeightActivationPlot as AxesWeightActivationPlot import Visualization.AxesFiringOffset as AxesFiringOffset import Visualization.AxesActivationFiringOffset as AxesActivationFiringOffset import Visualization.AxesReceptiveField as AxesReceptiveField import matplotlib.pylab # Adjust the frame_rate depending on whether the simulation is running at the same time if self.config_options['simulation']['run_simulation']: frame_rate = 1.0 else: frame_rate = 1.0 animation = SimulAnimation.SimulAnimation( simulation=self, numRows=2, numColumns=2, blit=True, end_time=self.simulation_time, frame_rate=frame_rate, figsize=[23, 14], dpi=80) # animation.add_subplot(fig_position=2,axes_type=AxesNeuronPropertyLine.AxesNeuronPropertyLine, # axes_parameters= {'data_provider':self.cerebellum, # 'property':'Vm', # 'layer':'goclayer', # 'visible_data_only':True, # 'show_legend':False, # 'x_length': 1.}) # animation.add_subplot(fig_position=2,axes_type=AxesNeuronPropertyLine.AxesNeuronPropertyLine, # axes_parameters= {'data_provider':self.cerebellum, # 'property':'Gexc', # 'layer':'goclayer', # 'visible_data_only':True, # 'show_legend':True, # 'x_length': 1.}) animation.add_subplot(fig_position=1, axes_type=AxesRasterPlot.AxesRasterPlot, axes_parameters={ 'data_provider': self.cerebellum, 'pattern_provider': self.pattern_generator, 'layer': 'mflayer', 'cell_index': range(100), 'visible_data_only': True, 'show_legend': False, 'x_length': 1. }) animation.add_subplot(fig_position=2, axes_type=AxesRasterPlot.AxesRasterPlot, axes_parameters={ 'data_provider': self.cerebellum, 'layer': 'goclayer', 'visible_data_only': True, 'show_legend': False, 'x_length': 1. }) animation.add_subplot( fig_position=3, axes_type=AxesActivationFiringOffset.AxesActivationFiringOffset, axes_parameters={ 'data_provider': self.cerebellum, 'oscillation_freq': self.config_options['oscillations']['frequency'], 'pattern_provider': self.pattern_generator, 'layer': 'mflayer', 'visible_data_only': True, 'show_legend': False }) # animation.add_subplot(fig_position=4,axes_type=AxesFiringOffset.AxesFiringOffset, # axes_parameters= {'data_provider':self.cerebellum, # 'oscillation_freq':self.config_options['oscillations']['frequency'], # 'layer':'grclayer', # #'cell_index': range(100), # 'visible_data_only':True, # 'x_length': 1}) # animation.add_subplot(fig_position=4,axes_type=AxesPatternLine.AxesPatternLine, # axes_parameters= {'pattern_provider':self.pattern_generator, # 'visible_data_only':True, # 'show_legend':False, # 'x_length':1.}) animation.add_subplot( fig_position=4, axes_type=AxesWeightHistogram.AxesWeightHistogram, axes_parameters={ 'data_provider': self.cerebellum, 'layer': 'grcgocsynapsis', 'visible_data_only': True, 'show_legend': False }) # animation.add_subplot(fig_position=5,axes_type=AxesWeightEvolutionLine.AxesWeightEvolutionLine, # axes_parameters= {'data_provider':self.cerebellum, # 'layer':'mfgocsynapsis', # 'source_indexes': range(100), # 'target_indexes': [0], # 'visible_data_only':True, # 'show_legend':False}) # animation.add_subplot(fig_position=5,axes_type=AxesWeightActivationPlot.AxesWeightActivationPlot, # axes_parameters= {'data_provider':self.cerebellum, # 'pattern_provider': self.pattern_generator, # 'layer':'mfgocsynapsis', # 'target_indexes': [0], # 'show_legend':False}) # animation.add_subplot(fig_position=7,axes_type=AxesReceptiveField.AxesReceptiveField, # axes_parameters= {'data_provider':self.cerebellum, # 'pattern_provider': self.pattern_generator, # 'layer':'mfgrcsynapsis', # 'visible_data_only':True, # 'show_legend':False, # 'target_indexes': [0], # 'x_length':100.}) matplotlib.pylab.show() def analyze_MI(self): ''' Analyze the estimators that have been set in the configuration file ''' if self.config_options['simulation']['use_mpi']: import Analysis.MutualInformation as MutualInformation else: import Analysis.MutualInformationNoMPI as MutualInformation # Extract every mutual information to explore parameter_keys = [ key for key in self.config_options.keys() if key.startswith('mutual_information') ] mutual_information = [] for key in parameter_keys: if not 'layer' in self.config_options[key]: logger.error( 'Layer name has not been specified in the mutual information section' ) raise Exception('NonSpecifiedLayer') if not 'window_length' in self.config_options[key]: logger.error( 'Window length has not been specified in the mutual information section' ) raise Exception('NonSpecifiedWindowLenght') if not 'time_bin' in self.config_options[key]: logger.error( 'time bin has not been specified in the mutual information section' ) raise Exception('NonSpecifiedTimeBin') if not 'record_to_file' in self.config_options[key]: self.config_options[key]['record_to_file'] = False logger.info('Analyzing mutual information in section %s', key) MIAnalysis = MutualInformation.MutualInformation( data_provider=self.cerebellum, pattern_generator=self.pattern_generator, layer=self.config_options[key]['layer'], window_length=self.config_options[key]['window_length'], time_bin=self.config_options[key]['time_bin']) MIAnalysis.initialize() MIAnalysis.runAtTime(self.current_time) mutual_information.append(MIAnalysis.mutual_information / MIAnalysis.max_mutual_information) if self.config_options[key]['record_to_file']: filename = self.config_options['simulation'][ 'data_path'] + '/' + self.config_options['simulation'][ 'simulation_name'] + '/' + key logger.debug( 'Writing mutual information from section %s to file %s', key, filename) MIAnalysis.writeToFile(file_name=filename) return mutual_information def analyze_av_MI(self): ''' Analyze the estimators that have been set in the configuration file ''' if self.config_options['simulation']['use_mpi']: import Analysis.IndividualMI as IndividualMI else: import Analysis.IndividualMINoMPI as IndividualMI # Extract every mutual information to explore parameter_keys = [ key for key in self.config_options.keys() if key.startswith('individual_mutual_information') ] mutual_information = [] for key in parameter_keys: if not 'layer' in self.config_options[key]: logger.error( 'Layer name has not been specified in the mutual information section' ) raise Exception('NonSpecifiedLayer') if not 'window_length' in self.config_options[key]: logger.error( 'Window length has not been specified in the mutual information section' ) raise Exception('NonSpecifiedWindowLenght') if not 'time_bin' in self.config_options[key]: logger.error( 'time bin has not been specified in the mutual information section' ) raise Exception('NonSpecifiedTimeBin') if not 'record_to_file' in self.config_options[key]: self.config_options[key]['record_to_file'] = False logger.info( 'Analyzing individual mutual information in section %s', key) MIAnalysis = IndividualMI.IndividualMI( data_provider=self.cerebellum, pattern_generator=self.pattern_generator, layer=self.config_options[key]['layer'], window_length=self.config_options[key]['window_length'], time_bin=self.config_options[key]['time_bin']) MIAnalysis.initialize() MIAnalysis.runAtTime(self.current_time) mutual_information.append(MIAnalysis.mutual_information / MIAnalysis.max_mutual_information) if self.config_options[key]['record_to_file']: filename = self.config_options['simulation'][ 'data_path'] + '/' + self.config_options['simulation'][ 'simulation_name'] + '/' + key logger.debug( 'Writing individual mutual information from section %s to file %s', key, filename) MIAnalysis.writeToFile(file_name=filename) return mutual_information def analyze_Hits(self): ''' Analyze the estimators that have been set in the configuration file ''' if self.config_options['simulation']['use_mpi']: import Analysis.HitAnalysisNoMPI as HitAnalysis else: import Analysis.HitAnalysisNoMPI as HitAnalysis # Extract every mutual information to explore parameter_keys = [ key for key in self.config_options.keys() if key.startswith('hit_analysis') ] hit_analysis = [] for key in parameter_keys: if not 'layer' in self.config_options[key]: logger.error( 'Layer name has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedLayer') if not 'window_length' in self.config_options[key]: logger.error( 'Window length has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedWindowLenght') if not 'time_bin' in self.config_options[key]: logger.error( 'time bin has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedTimeBin') if not 'record_to_file' in self.config_options[key]: self.config_options[key]['record_to_file'] = False logger.info('Analyzing hit analysis in section %s', key) Analysis = HitAnalysis.HitAnalysis( data_provider=self.cerebellum, pattern_generator=self.pattern_generator, layer=self.config_options[key]['layer'], window_length=self.config_options[key]['window_length'], time_bin=self.config_options[key]['time_bin']) Analysis.initialize() Analysis.runAtTime(self.current_time) hit_analysis.append(Analysis.hit_index) if self.config_options[key]['record_to_file']: filename = self.config_options['simulation'][ 'data_path'] + '/' + self.config_options['simulation'][ 'simulation_name'] + '/' + key logger.debug('Writing hit analysis from section %s to file %s', key, filename) Analysis.writeToFile(file_name=filename) return hit_analysis def analyze_Hits_Top(self): ''' Analyze the estimators that have been set in the configuration file ''' if self.config_options['simulation']['use_mpi']: import Analysis.HitTopAnalysisNoMPI as HitTopAnalysis else: import Analysis.HitTopAnalysisNoMPI as HitTopAnalysis # Extract every mutual information to explore parameter_keys = [ key for key in self.config_options.keys() if key.startswith('hit_top_analysis') ] hit_analysis = [] for key in parameter_keys: func_params = { 'data_provider': self.cerebellum, 'pattern_generator': self.pattern_generator } if not 'layer' in self.config_options[key]: logger.error( 'Layer name has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedLayer') else: func_params['layer'] = self.config_options[key]['layer'] if not 'window_length' in self.config_options[key]: logger.error( 'Window length has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedWindowLenght') else: func_params['window_length'] = self.config_options[key][ 'window_length'] if not 'time_bin' in self.config_options[key]: logger.error( 'time bin has not been specified in the hit analysis section' ) raise Exception('NonSpecifiedTimeBin') else: func_params['time_bin'] = self.config_options[key]['time_bin'] if not 'record_to_file' in self.config_options[key]: self.config_options[key]['record_to_file'] = False if 'number_of_cells' in self.config_options[key]: func_params['number_of_cells'] = self.config_options[key][ 'number_of_cells'] logger.info('Analyzing hit analysis in section %s', key) Analysis = HitTopAnalysis.HitTopAnalysis(**func_params) Analysis.initialize() Analysis.runAtTime(self.current_time) hit_analysis.append(Analysis.top_n_average) if self.config_options[key]['record_to_file']: filename = self.config_options['simulation'][ 'data_path'] + '/' + self.config_options['simulation'][ 'simulation_name'] + '/' + key logger.debug('Writing hit analysis from section %s to file %s', key, filename) Analysis.writeToFile(file_name=filename) return hit_analysis
class EvolutionaryAlgorithm(object): ''' This class implements an evolutionary algorithm where the parameters are taken from the configuration file passed as a parameter. ''' # Cell name translation operatorTranslatorDict = { 'OnePoint' : tools.cxOnePoint, 'TwoPoint' : tools.cxTwoPoint, 'Gaussian' : tools.mutGaussian, 'MutUniform' : mutUniformCustom, 'Tournament' : tools.selTournament } operatorParamDict = { 'OnePoint' : [], 'TwoPoint' : [], 'Gaussian' : ['gaussian_mu','gaussian_sigma','gaussian_indpb'], 'Tournament' : ['tournament_size'], 'MutUniform' : ['uniform_indpb'] } paramTranslatorDict = { 'gaussian_mu' : 'mu', 'gaussian_sigma' : 'sigma', 'gaussian_indpb' : 'indpb', 'uniform_indpb' : 'indpb', 'tournament_size' : 'tournsize' } def __init__(self,**kwargs): ''' Constructor of the class. It creates a new object. @param config_file Name of the file with the options of the model. ''' logger = logging.getLogger('EvolutionaryAlgorithm') if ('config_file' in kwargs): self.config_file = kwargs.pop('config_file') else: logger.error('Non-specified simulation config file') raise Exception('Non-DefinedSimulationConfig') super(EvolutionaryAlgorithm, self).__init__(**kwargs) return def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' self.comm = MPI.COMM_WORLD self.mpi_rank = self.comm.Get_rank() self.mpi_size = self.comm.Get_size() if (self.mpi_rank==0): self._initialize_master() else: self._initialize_worker() if (self.mpi_size == 1): logger.error("MPI Error. Only one MPI process has been created. No workers defined.") raise Exception('MPIError') return def _initialize_logger(self): if 'log_file' in self.config_options['algorithm']: Logger2File(logger, self.config_options['algorithm']['log_file']) if 'verbosity' not in self.config_options['algorithm']: self.config_options['algorithm']['verbosity'] = 'debug' logger.warning('Non-specified simulation verbosity. Using default value %s',self.config_options['algorithm']['verbosity']) numeric_level = getattr(logging, self.config_options['algorithm']['verbosity'].upper(), None) if not isinstance(numeric_level, int): self.config_options['algorithm']['verbosity'] = 'info' numeric_level = getattr(logging, self.config_options['algorithm']['verbosity'].upper(), None) logger.warning('Invalid simulation verbosity. Using default value %s',self.config_options['algorithm']['verbosity']) raise ValueError('Invalid log level: %s' % self.config_options['algorithm']['verbosity']) logger.setLevel(numeric_level) def _initialize_master(self): ''' This function initializes the master process in the evolutionary algorithm. The master is in charge of providing individual to the workers. Thus, the master reads the algorithm configuration. ''' logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Number of generations if 'number_of_generations' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_generations'] = 1 logger.warning('Non-specified number_of_generations parameter. Using default value %s', self.config_options['algorithm']['number_of_generations']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_individual' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_individual'] = 64 logger.warning('Non-specified number_of_individual parameter. Using default value %s', self.config_options['algorithm']['number_of_individual']) if 'fill_idle_nodes' not in self.config_options['algorithm']: self.config_options['algorithm']['fill_idle_nodes'] = False logger.warning('Non-specified fill_idle_nodes parameter. Using default value %s', self.config_options['algorithm']['fill_idle_nodes']) # Crossover probability if 'crossover_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_probability'] = 1. logger.warning('Non-specified crossover_probability parameter. Using default value %s', self.config_options['algorithm']['crossover_probability']) # Crossover operator if 'crossover_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_operator'] = 'OnePoint' logger.warning('Non-specified crossover_operator parameter. Using default value %s', self.config_options['algorithm']['crossover_operator']) # Mutation probability if 'mutation_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_probability'] = 1. logger.warning('Non-specified mutation_probability parameter. Using default value %s', self.config_options['algorithm']['mutation_probability']) # Mutation operator if 'mutation_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_operator'] = 'Gaussian' logger.warning('Non-specified mutation_operator parameter. Using default value %s', self.config_options['algorithm']['mutation_operator']) # Selection operator if 'selection_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['selection_operator'] = 'Tournament' logger.warning('Non-specified selection_operator parameter. Using default value %s', self.config_options['algorithm']['selection_operator']) # Hall of fame size if 'hall_of_fame_size' not in self.config_options['algorithm']: self.config_options['algorithm']['hall_of_fame_size'] = 1 logger.warning('Non-specified hall_of_fame_size parameter. Using default value %s', self.config_options['algorithm']['hall_of_fame_size']) # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None if 'saving_step' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_step'] = 1 logger.warning('Non-specified saving_step parameter. Using default value %s', self.config_options['algorithm']['saving_step']) if 'evaluated_individuals_file' not in self.config_options['algorithm']: self.config_options['algorithm']['evaluated_individuals_file'] = None else: if os.path.isfile(self.config_options['algorithm']['evaluated_individuals_file']): logger.warning('Evaluated individual file %s already exists. New individuals will be appended', self.config_options['algorithm']['evaluated_individuals_file']) # Initialize the simulation seeds if they have not been initialized if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() # Extract parameters to explore self._extract_parameters() # Initialize the evolutionary algorithm self._initialize_algorithm() # Initialize communication manager self.simulationQueue = Queue.Queue() self.completeQueue = Queue.Queue() self.managerThread = threading.Thread(target=self._manage_communications) return def _initialize_worker(self): ''' This function initializes the worker process in the evolutionary algorithm. The workers are in charge of running the simulations with the parameters received from the master. ''' logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() # Extract parameters to explore self._extract_parameters() # Make a copy of the simulation options and extract the algorithm section self.simulation_options = copy.deepcopy(self.config_options) self.simulation_options.pop('algorithm') return def _extract_parameters(self): # Extract every parameter to explore self.parameter_keys = [key for key in self.config_options.keys() if key.startswith('parameter')] self.parameter_dic = [] key_list = [] for key in self.parameter_keys: self.parameter_dic.append(self.config_options.pop(key)) key_list.append(key) # Print fitness value for evaluated individual if (self.config_options['algorithm']['evaluated_individuals_file'] is not None): with open(self.config_options['algorithm']['evaluated_individuals_file'], 'a') as file: file.write('# ') for val in key_list: file.write('%s\t' % val) file.write('\n') for key,parameter in zip(self.parameter_keys,self.parameter_dic): # Check if the section and parameter exists if not 'section' in parameter: logger.error('Parameter section has not been specified in %s',key) raise Exception('NonSpecifiedSection') if parameter['section'] not in self.config_options: logger.error('Parameter section %s does not exist',parameter['section']) raise Exception('InvalidSection') if not 'parameter' in parameter: logger.error('Parameter name has not been specified in %s',key) raise Exception('NonSpecifiedParameter') if parameter['parameter'] not in self.config_options[parameter['section']]: logger.error('Parameter %s does not exist in section %s',parameter['parameter'],parameter['section']) raise Exception('InvalidParameter') if not 'min_value' in parameter: logger.error('Parameter minimum values has not been specified in %s',key) raise Exception('NonSpecifiedMinValue') if not 'max_value' in parameter: logger.error('Parameter maximum values has not been specified in %s',key) raise Exception('NonSpecifiedMaxValue') if not 'type' in parameter: logger.error('Parameter evolution type has not been specified in %s',key) raise Exception('NonSpecifiedType') if parameter['type'] not in ['geometric','arithmetic']: logger.error('Parameter evolution type %s has not been implemented. Only geometric and arithmetic are allowed so far',parameter['type']) raise Exception('InvalidType') def _get_operator(self, parameter): # Check if the specified operator is included if parameter in self.operatorTranslatorDict: return self.operatorTranslatorDict[parameter] else: logger.error('The operator %s has not been mapped to an operator', parameter) raise Exception('Non-MappedOperator') def _get_operator_params(self, parameter, dicAlgorithm): # Retrieve the parameters of the operator. out_params = list() param_dic = dict() if parameter in self.operatorParamDict: for param in self.operatorParamDict[parameter]: if param in dicAlgorithm: out_params.append(dicAlgorithm[param]) if param in self.paramTranslatorDict: param_dic[self.paramTranslatorDict[param]] = dicAlgorithm[param] else: logger.error('The required operator parameter %s has not a translation', param) raise Exception('Non-DefinedParameter') else: logger.error('The required operator parameter %s has not been set', param) raise Exception('Non-DefinedParameter') return param_dic def _get_unnormalized_values(self, individual): unnorm_values = [] for norm_value, param_dic in zip(individual,self.parameter_dic): min_value = param_dic['min_value'] max_value = param_dic['max_value'] if param_dic['type'] == 'arithmetic': value = norm_value*(max_value - min_value) + min_value elif param_dic['type'] == 'geometric': logmin = math.log10(abs(min_value)) logmax = math.log10(abs(max_value)) value = 10.0**(norm_value*(logmax - logmin))*min_value unnorm_values.append(value) return unnorm_values def _eval_fitness_funct(self,individual, seed): # Make a copy of the simulation config options local_config_options = copy.deepcopy(self.simulation_options) unnorm_values = self._get_unnormalized_values(individual) for unnorm, param_dic in zip(unnorm_values,self.parameter_dic): local_config_options[param_dic['section']][param_dic['parameter']] = unnorm local_config_options['simulation']['seed'] = seed logger.info('Running evaluation with seed %s and parameters %s', seed, self._get_unnormalized_values(individual)) # parent_conn, child_conn = multiprocessing.Pipe() # # p = multiprocessing.Process(target=helper_subprocess_simulation, args=(child_conn,local_config_options)) # p.start() # # # # # Catch SIGNINT just in case the parent process is killed before. # # import signal # # import sys # # # # def signal_term_handler(signal, frame): # # logger.info('Got %s. Killing running subprocesses',signal) # # if p.is_alive(): # Child still around? # # p.terminate() # kill it # # p.join() # # sys.exit(0) # # # # signal.signal(signal.SIGUSR2, signal_term_handler) # # signal.signal(signal.SIGINT, signal_term_handler) # # # signal.signal(signal.SIGKILL, signal_term_handler) # # signal.signal(signal.SIGTERM, signal_term_handler) # # mutual_information = parent_conn.recv() # p.join() mutual_information = helper_simulation(local_config_options) logger.info('Mutual information with seed %s and parameters %s: %s', seed, self._get_unnormalized_values(individual), mutual_information) return mutual_information def _initialize_algorithm(self): ''' Initialize the evolutionary algorithm based on the provided parameters. ''' self.num_generator = numpy.random.RandomState() # Create multiobjective optimization (maximize average MI and minimize Std) creator.create("FitnessMulti", base.Fitness, weights=(1.0,-1.0e-4)) # Each individual inherits from list and add the FitnessMulti fitness function creator.create("Individual", list, fitness=creator.FitnessMulti) # Attribute generator (each attribute will be the normalized value -or the logartihm-) toolbox.register("attr_float", self.num_generator.rand) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, len(self.parameter_keys)) # Population initializers toolbox.register("population", tools.initRepeat, list, toolbox.individual) # Operator registering #toolbox.register("evaluate", self._evaluate_population) # Crossover operator operator = self._get_operator(self.config_options['algorithm']['crossover_operator']) paramOperator = self._get_operator_params(self.config_options['algorithm']['crossover_operator'], self.config_options['algorithm']) toolbox.register("mate", operator, **paramOperator) # Mutate operator operator = self._get_operator(self.config_options['algorithm']['mutation_operator']) paramOperator = self._get_operator_params(self.config_options['algorithm']['mutation_operator'], self.config_options['algorithm']) toolbox.register("mutate", operator, **paramOperator) toolbox.decorate("mutate", checkBounds()) # Selection operator operator = self._get_operator(self.config_options['algorithm']['selection_operator']) paramOperator = self._get_operator_params(self.config_options['algorithm']['selection_operator'], self.config_options['algorithm']) toolbox.register("select", operator, **paramOperator) def _evaluate_population(self, population): # Insert the population into the simulation queue and unlock it self.simulationQueue.put(population) self.simulationQueue.task_done() self.end_simulation = self.last_generation logger.info("Evaluating %i individuals",len(population)) return self.completeQueue.get() def _manage_communications(self): ''' This function manages the simulation queue, sending the simulations to the other MPI processes. It manages the two simulation queues (SimulationQueue -jobs to be done- and CompleteQueue -jobs finished-). This function is thought to be executed in a sepparate thread of the master process. ''' # Initialize SimulationMap and RunningDict simulationMap = dict() availableProcs = range(1,self.mpi_size) endedProcs = [] for ind in availableProcs: simulationMap[ind] = None runningDict = dict() individualDict = dict() # List with the simulations to be executed in this "batch" simulationList = [] request = None status = MPI.Status() population_size = 0 output_population = toolbox.population(n=0) data = numpy.empty(1, dtype=numpy.float64) ######################################## # Create requests with MPI.Irecv(....) ######################################## request = self.comm.Irecv([data, MPI.DOUBLE], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG) # This loop end when every worker process has been finished while (len(endedProcs)!=(self.mpi_size-1)): Job_finished = request.Test(status) if Job_finished or (not availableProcs): if (not Job_finished): logger.debug('Waiting for something finished') request.Wait(status) # There is at least one simulation finished mpi_process = status.Get_source() tuple_ind = simulationMap[mpi_process] logger.debug('%s mutual information has been received from %s: %s', tuple_ind, mpi_process, data[0]) if tuple_ind not in runningDict: logger.warning('Error in data received from process %s',mpi_process) logger.warning('%s not exist in runningDict %s',tuple_ind,runningDict) else: runningDict[tuple_ind][0].append(data[0]) # If all the simulations with these parameters are done, get the average and std if (len(runningDict[tuple_ind][0])==self.config_options['algorithm']['number_of_repetitions']): individual = individualDict.pop(tuple_ind) individual.fitness.values = numpy.average(runningDict[tuple_ind][0]), numpy.std(runningDict[tuple_ind][0]) logger.debug('Fitness value calculated for individual %s: %s', individual, individual.fitness.values) output_population.append(individual) # Print fitness value for evaluated individual if (self.config_options['algorithm']['evaluated_individuals_file'] is not None): with open(self.config_options['algorithm']['evaluated_individuals_file'],'a') as file: param_values = self._get_unnormalized_values(individual) for val in param_values+list(individual.fitness.values): file.write('%s\t'%val) file.write('\n') is_from_queue = runningDict[tuple_ind][1] runningDict.pop(tuple_ind) logger.debug('%s extracted from the running dictionary', tuple_ind) # Check the number of individual to finish before unlocking the EA. if is_from_queue: population_size -= 1 if population_size==0: self.completeQueue.put(output_population) output_population = toolbox.population(n=0) self.completeQueue.task_done() logger.debug('Simulation batch has been finished') # Set the process as available simulationMap[mpi_process] = None availableProcs.append(mpi_process) ######################################## # Create requests with MPI.Irecv(....) ######################################## request = self.comm.Irecv([data, MPI.DOUBLE], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG) elif availableProcs: # There are available processors if simulationList: # There are simulations ready to be launched # Extract the first simulation and the first available process parameters, cur_seed, from_queue = simulationList.pop(0) proc_rank = availableProcs.pop(0) param_tuple = tuple(parameters) if param_tuple not in runningDict: runningDict[param_tuple] = ([],from_queue) logger.debug('%s inserted in the running dictionary', param_tuple) simulationMap[proc_rank] = param_tuple ######################################## # Send through MPI the parameters and the seed ######################################## logger.debug('Sending parameters %s and seed %s to process %s', parameters, cur_seed, proc_rank) data_send = numpy.empty(len(parameters)+1, dtype=numpy.float64) for idx, parameter in enumerate(parameters): data_send[idx] = parameter data_send[-1] = cur_seed self.comm.Send([data_send, MPI.DOUBLE], dest=proc_rank, tag=SIM_DATA) elif not self.simulationQueue.empty(): # There are available batch simulations in the simulation queue. # Add every individual/seed combination to the simulationList population = self.simulationQueue.get() logger.debug('New population to be evaluated received: %s',population) for ind in population: ind_key = tuple(ind) # Skip those individual already under evaluation if ind_key not in individualDict: population_size += 1 individualDict[ind_key] = ind for seed in range(self.config_options['simulation']['seed'],self.config_options['simulation']['seed']+self.config_options['algorithm']['number_of_repetitions']): simulationList.append((ind,seed,True)) elif self.end_simulation: proc_rank = availableProcs.pop(0) ######################################## # Send and ending signal to the worker ######################################## logger.debug('Sending ending signal to process %s', proc_rank) data_send = numpy.empty(len(self.parameter_dic)+1, dtype=numpy.float64) self.comm.Send([data_send, MPI.DOUBLE], dest=proc_rank, tag=SIM_EXIT) endedProcs.append(proc_rank) elif self.config_options['algorithm']['fill_idle_nodes']: # There is nothing to simulate. Generate a new individual logger.debug('There are idle nodes. Creating random individuals.') individual = toolbox.individual() ind_key = tuple(individual) # Skip those individual already under evaluation if ind_key not in individualDict: individualDict[ind_key] = individual for seed in range(self.config_options['simulation']['seed'],self.config_options['simulation']['seed']+self.config_options['algorithm']['number_of_repetitions']): simulationList.append((individual,seed,False)) else: # Nothing to do logger.debug('Sleeping 1') time.sleep(0.1) else: # Nothing to do logger.debug('Sleeping 2') time.sleep(0.1) def execute_search(self): ''' Initialize all the objects needed for running the simulation. ''' if (self.mpi_rank==0): self._execute_search_master() else: self._execute_search_worker() return def _execute_search_master(self): ''' The master node executes the genetic algorithm and provides simulation parameters to the workers. ''' # Sequence of the parameter names param_names = [] for param_dic in self.parameter_dic: param_names.append(param_dic['section'] + '.' + param_dic['parameter']) # Initialize stats stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.average, axis=0) stats.register("max", numpy.max, axis=0) stats.register("min", numpy.min, axis=0) stats.register("std", numpy.std, axis=0) # Initialize mapping to distribute the evaluations to the workers #toolbox.register("map", futures.map) # If we are in the last generation activate the flag self.last_generation = False self.end_simulation = False # Start simulation thread self.managerThread.start() # Load the previous algorithm state if self.config_options['algorithm']['load_from_file']: with open(self.config_options['algorithm']['load_from_file'], "r") as cp_file: cp = pickle.load(cp_file) self.population = cp["population"] start_gen = cp["generation"]+1 halloffame = cp["halloffame"] logbook = cp["logbook"] self.num_generator.set_state(cp["rndstate"]) else: self.num_generator.seed() self.population = toolbox.population(n=self.config_options['algorithm']['number_of_individual']) start_gen = 0 halloffame = tools.HallOfFame(maxsize=self.config_options['algorithm']['hall_of_fame_size']) logbook = tools.Logbook() logbook.header = "gen", "evals", "fitness" logbook.chapters["fitness"].header = "avg", "max", "min", "std" # Fill idle nodes with new random individual #if self.config_options['algorithm']['fill_idle_nodes']: # population = self._fill_idle_nodes(self.population) # Remove identical individual filt_offspring = [] [filt_offspring.append(ind) for ind in self.population if ind not in filt_offspring] self.population = filt_offspring logger.debug("Start of evolution") self.population = self._evaluate_population(self.population) halloffame.update(self.population) record = stats.compile(self.population) logbook.record(gen=0, evals=len(self.population), **record) # Saving evolution state if self.config_options['algorithm']['saving_file']: # Fill the dictionary using the dict(key=value[, ...]) constructor cp = dict(population=self.population, generation=0, halloffame=halloffame, logbook=logbook, rndstate=self.num_generator.get_state()) with open(self.config_options['algorithm']['saving_file'], "wb") as cp_file: pickle.dump(cp, cp_file) logger.info('Evolution state saved in file %s', self.config_options['algorithm']['saving_file']) logger.info('Parameter sequence: %s', param_names) logger.info('Hall of Fame:') for ind in halloffame: logger.info('Individual: %s. Fitness: %s', self._get_unnormalized_values(ind), ind.fitness.values) start_gen += 1; # Begin the evolution for gen in range(start_gen, self.config_options['algorithm']['number_of_generations']): logger.info("Generation %i", gen) # If we are in the last generation activate the flag self.last_generation = (gen == self.config_options['algorithm']['number_of_generations']-1) # Select the next generation individuals offspring = toolbox.select(self.population, k=self.config_options['algorithm']['number_of_individual']) # Clone the selected individuals offspring = list(map(toolbox.clone, offspring)) # Apply crossover and mutation on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): if self.num_generator.rand() < self.config_options['algorithm']['crossover_probability']: orig_child1 = toolbox.clone(child1) orig_child2 = toolbox.clone(child2) toolbox.mate(child1, child2) if orig_child1!=child1: del child1.fitness.values if orig_child2!=child2: del child2.fitness.values for mutant in offspring: if self.num_generator.rand() < self.config_options['algorithm']['mutation_probability']: orig_mutant = toolbox.clone(mutant) toolbox.mutate(mutant, rand_generator=self.num_generator) if (mutant!=orig_mutant): del mutant.fitness.values # Remove identical individual filt_offspring = [] [filt_offspring.append(ind) for ind in offspring if ind not in filt_offspring] offspring = filt_offspring # Fill idle nodes with new random individual #if self.config_options['algorithm']['fill_idle_nodes']: # offspring = self._fill_idle_nodes(offspring) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] valid_ind = [ind for ind in offspring if ind.fitness.valid] #for idx,ind in enumerate(offspring): # print idx, ind, ind.fitness.valid # Evaluate the population if (len(invalid_ind)>0): evaluated_pop = self._evaluate_population(invalid_ind) # The population is entirely replaced by the offspring self.population[:] = valid_ind[:] + evaluated_pop[:] # The population is extended with the offspring #self.population.extend(invalid_ind) halloffame.update(self.population) record = stats.compile(self.population) logbook.record(gen=gen, evals=len(self.population), **record) # Saving evolution state if self.config_options['algorithm']['saving_file'] and gen % self.config_options['algorithm']['saving_step'] == 0: # Fill the dictionary using the dict(key=value[, ...]) constructor cp = dict(population=self.population, generation=gen, halloffame=halloffame, logbook=logbook, rndstate=self.num_generator.get_state()) with open(self.config_options['algorithm']['saving_file'], "wb") as cp_file: pickle.dump(cp, cp_file) logger.info('Evolution state saved in file %s', self.config_options['algorithm']['saving_file']) logger.info('Statistics in generation %s. %s evaluations', gen, len(invalid_ind)) for key,value in record.items(): logger.info('%s: %s', key, value) logger.info('Parameter sequence: %s', param_names) best_ind = tools.selBest(self.population, 1)[0] logger.info("Best individual in current population is %s, %s",self._get_unnormalized_values(best_ind), best_ind.fitness.values) logger.info('Hall of Fame:') for ind in halloffame: logger.info('Individual: %s. Fitness: %s', self._get_unnormalized_values(ind), ind.fitness.values) logger.info("-- End of (successful) evolution --") return def _execute_search_worker(self): ''' Worker nodes receive parameter lists and simulate the network. ''' stay_working = True my_status = MPI.Status() while stay_working: # Receive the simulation parameters and seed data_recv = numpy.empty(len(self.parameter_keys)+1, dtype=numpy.float64) self.comm.Recv([data_recv, MPI.DOUBLE], source=0, tag=MPI.ANY_TAG, status = my_status) tag = my_status.Get_tag() # Check the tag if tag == SIM_EXIT: stay_working = False continue if tag != SIM_DATA: logger.warning('Unknown tag %s received in worker', tag) cur_seed = int(data_recv[-1]) parameters = data_recv[:-1].tolist() logger.debug('Received parameters %s and seed %s', parameters, cur_seed) # Launch the simulation with the parameters mutual_information = self._eval_fitness_funct(parameters, cur_seed) logger.debug('Sending mutual information value %s to process root process', mutual_information) send_array = numpy.array([mutual_information], dtype=numpy.float64) self.comm.Send([send_array, MPI.DOUBLE], dest=0, tag=SIM_DATA) return
def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() if 'log_file' in self.config_options['algorithm']: Logger2File(logger, self.config_options['algorithm']['log_file']) if 'verbosity' not in self.config_options['algorithm']: self.config_options['algorithm']['verbosity'] = 'debug' logger.warning( 'Non-specified simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) if not isinstance(numeric_level, int): self.config_options['algorithm']['verbosity'] = 'info' numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) logger.warning( 'Invalid simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) raise ValueError('Invalid log level: %s' % self.config_options['algorithm']['verbosity']) logger.setLevel(numeric_level) if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Number of generations if 'number_of_generations' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_generations'] = 1 logger.warning( 'Non-specified number_of_generations parameter. Using default value %s', self.config_options['algorithm']['number_of_generations']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_cores' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_cores'] = 1 logger.warning( 'Non-specified number_of_cores parameter. Using default value %s', self.config_options['algorithm']['number_of_cores']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_individual' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_individual'] = 64 logger.warning( 'Non-specified number_of_individual parameter. Using default value %s', self.config_options['algorithm']['number_of_individual']) if 'fill_idle_nodes' not in self.config_options['algorithm']: self.config_options['algorithm']['fill_idle_nodes'] = False logger.warning( 'Non-specified fill_idle_nodes parameter. Using default value %s', self.config_options['algorithm']['fill_idle_nodes']) # Crossover probability if 'crossover_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_probability'] = 1. logger.warning( 'Non-specified crossover_probability parameter. Using default value %s', self.config_options['algorithm']['crossover_probability']) # Crossover operator if 'crossover_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_operator'] = 'OnePoint' logger.warning( 'Non-specified crossover_operator parameter. Using default value %s', self.config_options['algorithm']['crossover_operator']) # Mutation probability if 'mutation_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_probability'] = 1. logger.warning( 'Non-specified mutation_probability parameter. Using default value %s', self.config_options['algorithm']['mutation_probability']) # Mutation operator if 'mutation_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_operator'] = 'Gaussian' logger.warning( 'Non-specified mutation_operator parameter. Using default value %s', self.config_options['algorithm']['mutation_operator']) # Selection operator if 'selection_operator' not in self.config_options['algorithm']: self.config_options['algorithm'][ 'selection_operator'] = 'Tournament' logger.warning( 'Non-specified selection_operator parameter. Using default value %s', self.config_options['algorithm']['selection_operator']) # Hall of fame size if 'hall_of_fame_size' not in self.config_options['algorithm']: self.config_options['algorithm']['hall_of_fame_size'] = 1 logger.warning( 'Non-specified hall_of_fame_size parameter. Using default value %s', self.config_options['algorithm']['hall_of_fame_size']) # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None if 'saving_step' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_step'] = 1 logger.warning( 'Non-specified saving_step parameter. Using default value %s', self.config_options['algorithm']['saving_step']) # Extract parameters to explore self._extract_parameters() # Make a copy of the simulation options and extract the algorithm section self.simulation_options = copy.deepcopy(self.config_options) self.simulation_options.pop('algorithm') # Initialize the evolutionary algorithm self._initialize_algorithm() return
def _initialize_master(self): ''' This function initializes the master process in the evolutionary algorithm. The master is in charge of providing individual to the workers. Thus, the master reads the algorithm configuration. ''' logger.info('Parsing configuration file %s',self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Number of generations if 'number_of_generations' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_generations'] = 1 logger.warning('Non-specified number_of_generations parameter. Using default value %s', self.config_options['algorithm']['number_of_generations']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_individual' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_individual'] = 64 logger.warning('Non-specified number_of_individual parameter. Using default value %s', self.config_options['algorithm']['number_of_individual']) if 'fill_idle_nodes' not in self.config_options['algorithm']: self.config_options['algorithm']['fill_idle_nodes'] = False logger.warning('Non-specified fill_idle_nodes parameter. Using default value %s', self.config_options['algorithm']['fill_idle_nodes']) # Crossover probability if 'crossover_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_probability'] = 1. logger.warning('Non-specified crossover_probability parameter. Using default value %s', self.config_options['algorithm']['crossover_probability']) # Crossover operator if 'crossover_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_operator'] = 'OnePoint' logger.warning('Non-specified crossover_operator parameter. Using default value %s', self.config_options['algorithm']['crossover_operator']) # Mutation probability if 'mutation_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_probability'] = 1. logger.warning('Non-specified mutation_probability parameter. Using default value %s', self.config_options['algorithm']['mutation_probability']) # Mutation operator if 'mutation_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_operator'] = 'Gaussian' logger.warning('Non-specified mutation_operator parameter. Using default value %s', self.config_options['algorithm']['mutation_operator']) # Selection operator if 'selection_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['selection_operator'] = 'Tournament' logger.warning('Non-specified selection_operator parameter. Using default value %s', self.config_options['algorithm']['selection_operator']) # Hall of fame size if 'hall_of_fame_size' not in self.config_options['algorithm']: self.config_options['algorithm']['hall_of_fame_size'] = 1 logger.warning('Non-specified hall_of_fame_size parameter. Using default value %s', self.config_options['algorithm']['hall_of_fame_size']) # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None if 'saving_step' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_step'] = 1 logger.warning('Non-specified saving_step parameter. Using default value %s', self.config_options['algorithm']['saving_step']) if 'evaluated_individuals_file' not in self.config_options['algorithm']: self.config_options['algorithm']['evaluated_individuals_file'] = None else: if os.path.isfile(self.config_options['algorithm']['evaluated_individuals_file']): logger.warning('Evaluated individual file %s already exists. New individuals will be appended', self.config_options['algorithm']['evaluated_individuals_file']) # Initialize the simulation seeds if they have not been initialized if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() # Extract parameters to explore self._extract_parameters() # Initialize the evolutionary algorithm self._initialize_algorithm() # Initialize communication manager self.simulationQueue = Queue.Queue() self.completeQueue = Queue.Queue() self.managerThread = threading.Thread(target=self._manage_communications) return
class EvolutionaryAlgorithm(object): ''' This class implements an evolutionary algorithm where the parameters are taken from the configuration file passed as a parameter. ''' # Cell name translation operatorTranslatorDict = { 'OnePoint': tools.cxOnePoint, 'TwoPoint': tools.cxTwoPoint, 'Gaussian': tools.mutGaussian, 'MutUniform': mutUniformCustom, 'Tournament': tools.selTournament } operatorParamDict = { 'OnePoint': [], 'TwoPoint': [], 'Gaussian': ['gaussian_mu', 'gaussian_sigma', 'gaussian_indpb'], 'Tournament': ['tournament_size'], 'MutUniform': ['uniform_indpb'] } paramTranslatorDict = { 'gaussian_mu': 'mu', 'gaussian_sigma': 'sigma', 'gaussian_indpb': 'indpb', 'uniform_indpb': 'indpb', 'tournament_size': 'tournsize' } def __init__(self, **kwargs): ''' Constructor of the class. It creates a new object. @param config_file Name of the file with the options of the model. ''' logger = logging.getLogger('EvolutionaryAlgorithm') if ('config_file' in kwargs): self.config_file = kwargs.pop('config_file') else: logger.error('Non-specified simulation config file') raise Exception('Non-DefinedSimulationConfig') super(EvolutionaryAlgorithm, self).__init__(**kwargs) return def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() if 'log_file' in self.config_options['algorithm']: Logger2File(logger, self.config_options['algorithm']['log_file']) if 'verbosity' not in self.config_options['algorithm']: self.config_options['algorithm']['verbosity'] = 'debug' logger.warning( 'Non-specified simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) if not isinstance(numeric_level, int): self.config_options['algorithm']['verbosity'] = 'info' numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) logger.warning( 'Invalid simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) raise ValueError('Invalid log level: %s' % self.config_options['algorithm']['verbosity']) logger.setLevel(numeric_level) if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Number of generations if 'number_of_generations' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_generations'] = 1 logger.warning( 'Non-specified number_of_generations parameter. Using default value %s', self.config_options['algorithm']['number_of_generations']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_cores' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_cores'] = 1 logger.warning( 'Non-specified number_of_cores parameter. Using default value %s', self.config_options['algorithm']['number_of_cores']) # If number of individual is not defined, the number of available cores will be used. if 'number_of_individual' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_individual'] = 64 logger.warning( 'Non-specified number_of_individual parameter. Using default value %s', self.config_options['algorithm']['number_of_individual']) if 'fill_idle_nodes' not in self.config_options['algorithm']: self.config_options['algorithm']['fill_idle_nodes'] = False logger.warning( 'Non-specified fill_idle_nodes parameter. Using default value %s', self.config_options['algorithm']['fill_idle_nodes']) # Crossover probability if 'crossover_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_probability'] = 1. logger.warning( 'Non-specified crossover_probability parameter. Using default value %s', self.config_options['algorithm']['crossover_probability']) # Crossover operator if 'crossover_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['crossover_operator'] = 'OnePoint' logger.warning( 'Non-specified crossover_operator parameter. Using default value %s', self.config_options['algorithm']['crossover_operator']) # Mutation probability if 'mutation_probability' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_probability'] = 1. logger.warning( 'Non-specified mutation_probability parameter. Using default value %s', self.config_options['algorithm']['mutation_probability']) # Mutation operator if 'mutation_operator' not in self.config_options['algorithm']: self.config_options['algorithm']['mutation_operator'] = 'Gaussian' logger.warning( 'Non-specified mutation_operator parameter. Using default value %s', self.config_options['algorithm']['mutation_operator']) # Selection operator if 'selection_operator' not in self.config_options['algorithm']: self.config_options['algorithm'][ 'selection_operator'] = 'Tournament' logger.warning( 'Non-specified selection_operator parameter. Using default value %s', self.config_options['algorithm']['selection_operator']) # Hall of fame size if 'hall_of_fame_size' not in self.config_options['algorithm']: self.config_options['algorithm']['hall_of_fame_size'] = 1 logger.warning( 'Non-specified hall_of_fame_size parameter. Using default value %s', self.config_options['algorithm']['hall_of_fame_size']) # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None if 'saving_step' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_step'] = 1 logger.warning( 'Non-specified saving_step parameter. Using default value %s', self.config_options['algorithm']['saving_step']) # Extract parameters to explore self._extract_parameters() # Make a copy of the simulation options and extract the algorithm section self.simulation_options = copy.deepcopy(self.config_options) self.simulation_options.pop('algorithm') # Initialize the evolutionary algorithm self._initialize_algorithm() return def _extract_parameters(self): # Extract every parameter to explore self.parameter_keys = [ key for key in self.config_options.keys() if key.startswith('parameter') ] self.parameter_dic = [] for key in self.parameter_keys: self.parameter_dic.append(self.config_options.pop(key)) for key, parameter in zip(self.parameter_keys, self.parameter_dic): # Check if the section and parameter exists if not 'section' in parameter: logger.error('Parameter section has not been specified in %s', key) raise Exception('NonSpecifiedSection') if parameter['section'] not in self.config_options: logger.error('Parameter section %s does not exist', parameter['section']) raise Exception('InvalidSection') if not 'parameter' in parameter: logger.error('Parameter name has not been specified in %s', key) raise Exception('NonSpecifiedParameter') if parameter['parameter'] not in self.config_options[ parameter['section']]: logger.error('Parameter %s does not exist in section %s', parameter['parameter'], parameter['section']) raise Exception('InvalidParameter') if not 'min_value' in parameter: logger.error( 'Parameter minimum values has not been specified in %s', key) raise Exception('NonSpecifiedMinValue') if not 'max_value' in parameter: logger.error( 'Parameter maximum values has not been specified in %s', key) raise Exception('NonSpecifiedMaxValue') if not 'type' in parameter: logger.error( 'Parameter evolution type has not been specified in %s', key) raise Exception('NonSpecifiedType') if parameter['type'] not in ['geometric', 'arithmetic']: logger.error( 'Parameter evolution type %s has not been implemented. Only geometric and arithmetic are allowed so far', parameter['type']) raise Exception('InvalidType') def _get_operator(self, parameter): # Check if the specified operator is included if parameter in self.operatorTranslatorDict: return self.operatorTranslatorDict[parameter] else: logger.error('The operator %s has not been mapped to an operator', parameter) raise Exception('Non-MappedOperator') def _get_operator_params(self, parameter, dicAlgorithm): # Retrieve the parameters of the operator. out_params = list() param_dic = dict() if parameter in self.operatorParamDict: for param in self.operatorParamDict[parameter]: if param in dicAlgorithm: out_params.append(dicAlgorithm[param]) if param in self.paramTranslatorDict: param_dic[self.paramTranslatorDict[ param]] = dicAlgorithm[param] else: logger.error( 'The required operator parameter %s has not a translation', param) raise Exception('Non-DefinedParameter') else: logger.error( 'The required operator parameter %s has not been set', param) raise Exception('Non-DefinedParameter') return param_dic def _get_unnormalized_values(self, individual): unnorm_values = [] for norm_value, param_dic in zip(individual, self.parameter_dic): min_value = param_dic['min_value'] max_value = param_dic['max_value'] if param_dic['type'] == 'arithmetic': value = norm_value * (max_value - min_value) + min_value elif param_dic['type'] == 'geometric': logmin = math.log10(abs(min_value)) logmax = math.log10(abs(max_value)) value = 10.0**(norm_value * (logmax - logmin)) * min_value unnorm_values.append(value) return unnorm_values def _eval_fitness_funct(self, individual, seed): # Make a copy of the simulation config options local_config_options = copy.deepcopy(self.simulation_options) unnorm_values = self._get_unnormalized_values(individual) for unnorm, param_dic in zip(unnorm_values, self.parameter_dic): local_config_options[param_dic['section']][ param_dic['parameter']] = unnorm local_config_options['simulation']['seed'] = seed logger.info('Running evaluation with seed %s and parameters %s', seed, self._get_unnormalized_values(individual)) # parent_conn, child_conn = multiprocessing.Pipe() # p = multiprocessing.Process(target=helper_subprocess_simulation, args=(child_conn,local_config_options)) # # p.start() # # # # Catch SIGNINT just in case the parent process is killed before. # # import signal # # import sys # # # # def signal_term_handler(signal, frame): # # logger.info('Got %s. Killing running subprocesses',signal) # # if p.is_alive(): # Child still around? # # p.terminate() # kill it # # p.join() # # sys.exit(0) # # # # signal.signal(signal.SIGUSR2, signal_term_handler) # # signal.signal(signal.SIGINT, signal_term_handler) # # signal.signal(signal.SIGKILL, signal_term_handler) # # signal.signal(signal.SIGTERM, signal_term_handler) # # mutual_information = parent_conn.recv() # p.join() mutual_information = helper_simulation(local_config_options) logger.info('Mutual information with seed %s and parameters %s: %s', seed, self._get_unnormalized_values(individual), mutual_information) return mutual_information def _initialize_algorithm(self): ''' Initialize the evolutionary algorithm based on the provided parameters. ''' self.num_generator = numpy.random.RandomState() # Create multiobjective optimization (maximize average MI and minimize Std) creator.create("FitnessMulti", base.Fitness, weights=(1.0, -1.0e-4)) # Each individual inherits from list and add the FitnessMulti fitness function creator.create("Individual", list, fitness=creator.FitnessMulti) # Attribute generator (each attribute will be the normalized value -or the logartihm-) toolbox.register("attr_float", self.num_generator.rand) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, len(self.parameter_keys)) # Population initializers toolbox.register("population", tools.initRepeat, list, toolbox.individual) # Operator registering toolbox.register("evaluate", helper_eval_fitness_funct) # Crossover operator operator = self._get_operator( self.config_options['algorithm']['crossover_operator']) paramOperator = self._get_operator_params( self.config_options['algorithm']['crossover_operator'], self.config_options['algorithm']) toolbox.register("mate", operator, **paramOperator) # Mutate operator operator = self._get_operator( self.config_options['algorithm']['mutation_operator']) paramOperator = self._get_operator_params( self.config_options['algorithm']['mutation_operator'], self.config_options['algorithm']) toolbox.register("mutate", operator, **paramOperator) toolbox.decorate("mutate", checkBounds()) # Selection operator operator = self._get_operator( self.config_options['algorithm']['selection_operator']) paramOperator = self._get_operator_params( self.config_options['algorithm']['selection_operator'], self.config_options['algorithm']) toolbox.register("select", operator, **paramOperator) def _fill_idle_nodes(self, population): ''' Fill all the idle nodes with randomly generated individuals. It assumes every simulation to take aproximately the same time. ''' # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] filled_nodes = ( len(invalid_ind) * self.config_options['algorithm']['number_of_repetitions'] ) % self.config_options['algorithm']['number_of_cores'] if filled_nodes != 0: new_size = int( (self.config_options['algorithm']['number_of_cores'] - filled_nodes) / self.config_options['algorithm']['number_of_repetitions']) logger.info('Adding %s new random individual to fill idle nodes', new_size) new_pop = toolbox.population(n=new_size) population.extend(new_pop) return population def _evaluate_population(self, population): # Repeat the evaluation as many times as number of repetitions eval_population = [] for ind in population: eval_population.extend( [ind] * self.config_options['algorithm']['number_of_repetitions']) seed_population = range( self.config_options['simulation']['seed'], self.config_options['simulation']['seed'] + self.config_options['algorithm']['number_of_repetitions']) * len( population) self_population = [self] * ( self.config_options['algorithm']['number_of_repetitions'] * len(population)) # Evaluate the entire population fitnesses = list( toolbox.map(toolbox.evaluate, eval_population, self_population, seed_population)) # Calculate the average fitnesses fit_reshape = numpy.reshape( fitnesses, (len(population), self.config_options['algorithm']['number_of_repetitions']), order='F').tolist() for ind, row in zip(population, fit_reshape): ind.fitness.values = numpy.average(row), numpy.std(row) logger.info("Evaluated %i individuals", len(population)) def execute_search(self): ''' Initialize all the objects needed for running the simulation. ''' # Parameters sequence param_names = [] for param_dic in self.parameter_dic: param_names.append(param_dic['section'] + '.' + param_dic['parameter']) # Initialize stats stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.average, axis=0) stats.register("max", numpy.max, axis=0) stats.register("min", numpy.min, axis=0) stats.register("std", numpy.std, axis=0) # Initialize mapping to distribute the evaluations to the workers toolbox.register("map", futures.map) # Load the previous algorithm state if self.config_options['algorithm']['load_from_file']: with open(self.config_options['algorithm']['load_from_file'], "r") as cp_file: cp = pickle.load(cp_file) self.population = cp["population"] start_gen = cp["generation"] + 1 halloffame = cp["halloffame"] logbook = cp["logbook"] self.num_generator.set_state(cp["rndstate"]) else: self.num_generator.seed() self.population = toolbox.population( n=self.config_options['algorithm']['number_of_individual']) start_gen = 0 halloffame = tools.HallOfFame( maxsize=self.config_options['algorithm']['hall_of_fame_size']) logbook = tools.Logbook() logbook.header = "gen", "evals", "fitness" logbook.chapters["fitness"].header = "avg", "max", "min", "std" # Fill idle nodes with new random individual if self.config_options['algorithm']['fill_idle_nodes']: population = self._fill_idle_nodes(population) logger.debug("Start of evolution") self.population = self._evaluate_population(self.population) halloffame.update(self.population) record = stats.compile(self.population) logbook.record(gen=0, evals=len(self.population), **record) # Saving evolution state if self.config_options['algorithm']['saving_file']: # Fill the dictionary using the dict(key=value[, ...]) constructor cp = dict(population=self.population, generation=0, halloffame=halloffame, logbook=logbook, rndstate=self.num_generator.get_state()) with open(self.config_options['algorithm']['saving_file'], "wb") as cp_file: pickle.dump(cp, cp_file) logger.info('Evolution state saved in file %s', self.config_options['algorithm']['saving_file']) logger.info('Parameter sequence: %s', param_names) logger.info('Hall of Fame:') for ind in halloffame: logger.info('Individual: %s. Fitness: %s', self._get_unnormalized_values(ind), ind.fitness.values) start_gen += 1 # Begin the evolution for gen in range( start_gen, self.config_options['algorithm']['number_of_generations']): logger.info("Generation %i", gen) # Select the next generation individuals offspring = toolbox.select( population, k=self.config_options['algorithm']['number_of_individual']) # Clone the selected individuals offspring = list(map(toolbox.clone, offspring)) # Apply crossover and mutation on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): if self.num_generator.rand( ) < self.config_options['algorithm']['crossover_probability']: toolbox.mate(child1, child2) del child1.fitness.values del child2.fitness.values for mutant in offspring: if self.num_generator.rand( ) < self.config_options['algorithm']['mutation_probability']: toolbox.mutate(mutant, rand_generator=self.num_generator) del mutant.fitness.values # Fill idle nodes with new random individual if self.config_options['algorithm']['fill_idle_nodes']: offspring = self._fill_idle_nodes(offspring) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] # Evaluate the population self._evaluate_population(invalid_ind) # The population is entirely replaced by the offspring # population[:] = offspring # The population is extended with the offspring population.extend(invalid_ind) halloffame.update(population) record = stats.compile(population) logbook.record(gen=gen, evals=len(invalid_ind), **record) # Saving evolution state if self.config_options['algorithm'][ 'saving_file'] and gen % self.config_options['algorithm'][ 'saving_step'] == 0: # Fill the dictionary using the dict(key=value[, ...]) constructor cp = dict(population=population, generation=gen, halloffame=halloffame, logbook=logbook, rndstate=self.num_generator.get_state()) with open(self.config_options['algorithm']['saving_file'], "wb") as cp_file: pickle.dump(cp, cp_file) logger.info('Evolution state saved in file %s', self.config_options['algorithm']['saving_file']) logger.info('Statistics in generation %s. %s evaluations', gen, len(invalid_ind)) for key, value in record.items(): logger.info('%s: %s', key, value) logger.info('Parameter sequence: %s', param_names) best_ind = tools.selBest(population, 1)[0] logger.info("Best individual in current population is %s, %s", self._get_unnormalized_values(best_ind), best_ind.fitness.values) logger.info('Hall of Fame:') for ind in halloffame: logger.info('Individual: %s. Fitness: %s', self._get_unnormalized_values(ind), ind.fitness.values) logger.info("-- End of (successful) evolution --")
def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'simulation' in self.config_options: if 'debug' in self.config_options[ 'simulation'] and self.config_options['simulation'][ 'debug']: logger.setLevel(logging.DEBUG) # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time() if 'launcher' not in self.config_options: self.config_options['launcher'] = dict() if 'number_of_repetitions' not in self.config_options['launcher']: self.config_options['launcher']['number_of_repetitions'] = 1 # By default 1 process is used for the simulations if 'num_mpi_processes' not in self.config_options['launcher']: self.config_options['launcher']['num_mpi_processes'] = 1 # By default qsub is not enabled if 'use_qsub' in self.config_options[ 'launcher'] and self.config_options['launcher']['use_qsub']: self.launch_funct = self._launch_qsub_simulation elif self.config_options['launcher']['num_mpi_processes'] > 1: self.config_options['launcher']['use_qsub'] = False self.launch_funct = self._launch_mpi_simulation else: self.config_options['launcher']['use_qsub'] = False self.launch_funct = self._launch_serial_simulation # By default 1 thread is used for the simulations if 'num_omp_threads' not in self.config_options['launcher']: self.config_options['launcher']['num_omp_threads'] = 1 if 'parallel_environment' not in self.config_options['launcher'] and self.config_options['launcher']['use_qsub'] and \ (self.config_options['launcher']['num_omp_threads'] > 1 or self.config_options['launcher']['num_mpi_processes'] > 1): logger.error( 'Non-specified parallel environment for qsub job submission') raise Exception('Non-DefinedParallelEnvironment') if 'queue_name' not in self.config_options[ 'launcher'] and self.config_options['launcher']['use_qsub']: logger.error('Non-specified queue name for qsub job submission') raise Exception('Non-DefinedQueueName') if 'mpi_host_file' not in self.config_options['launcher']: self.config_options['launcher']['mpi_host_file'] = None if 'mpi_launcher' not in self.config_options['launcher']: self.config_options['launcher']['mpi_launcher'] = 'mpirun' if 'python_exec' not in self.config_options['launcher']: self.config_options['launcher']['python_exec'] = 'python' if 'nest' not in self.config_options: self.config_options['nest'] = dict() if 'number_of_virtual_processes' not in self.config_options['nest']: self.config_options['nest'][ 'number_of_virtual_processes'] = self.config_options[ 'launcher']['num_mpi_processes'] return
class ParameterSearch(object): ''' This class defines launch succesive simulations to explore one or more parameters. ''' def __init__(self, **kwargs): ''' Constructor of the class. It creates a new simulation object. @param config_file Name of the file with the options of the model. ''' if ('config_file' in kwargs): self.config_file = kwargs.pop('config_file') else: logger.error('Non-specified simulation config file') raise Exception('Non-DefinedSimulationConfig') super(ParameterSearch, self).__init__(**kwargs) return def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'simulation' in self.config_options: if 'debug' in self.config_options[ 'simulation'] and self.config_options['simulation'][ 'debug']: logger.setLevel(logging.DEBUG) # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time() if 'launcher' not in self.config_options: self.config_options['launcher'] = dict() if 'number_of_repetitions' not in self.config_options['launcher']: self.config_options['launcher']['number_of_repetitions'] = 1 # By default 1 process is used for the simulations if 'num_mpi_processes' not in self.config_options['launcher']: self.config_options['launcher']['num_mpi_processes'] = 1 # By default qsub is not enabled if 'use_qsub' in self.config_options[ 'launcher'] and self.config_options['launcher']['use_qsub']: self.launch_funct = self._launch_qsub_simulation elif self.config_options['launcher']['num_mpi_processes'] > 1: self.config_options['launcher']['use_qsub'] = False self.launch_funct = self._launch_mpi_simulation else: self.config_options['launcher']['use_qsub'] = False self.launch_funct = self._launch_serial_simulation # By default 1 thread is used for the simulations if 'num_omp_threads' not in self.config_options['launcher']: self.config_options['launcher']['num_omp_threads'] = 1 if 'parallel_environment' not in self.config_options['launcher'] and self.config_options['launcher']['use_qsub'] and \ (self.config_options['launcher']['num_omp_threads'] > 1 or self.config_options['launcher']['num_mpi_processes'] > 1): logger.error( 'Non-specified parallel environment for qsub job submission') raise Exception('Non-DefinedParallelEnvironment') if 'queue_name' not in self.config_options[ 'launcher'] and self.config_options['launcher']['use_qsub']: logger.error('Non-specified queue name for qsub job submission') raise Exception('Non-DefinedQueueName') if 'mpi_host_file' not in self.config_options['launcher']: self.config_options['launcher']['mpi_host_file'] = None if 'mpi_launcher' not in self.config_options['launcher']: self.config_options['launcher']['mpi_launcher'] = 'mpirun' if 'python_exec' not in self.config_options['launcher']: self.config_options['launcher']['python_exec'] = 'python' if 'nest' not in self.config_options: self.config_options['nest'] = dict() if 'number_of_virtual_processes' not in self.config_options['nest']: self.config_options['nest'][ 'number_of_virtual_processes'] = self.config_options[ 'launcher']['num_mpi_processes'] return def _extract_parameters(self): # Extract every parameter to explore self.parameter_keys = [ key for key in self.config_options.keys() if key.startswith('parameter') ] self.parameter_dic = [] for key in self.parameter_keys: self.parameter_dic.append(self.config_options.pop(key)) for key, parameter in zip(self.parameter_keys, self.parameter_dic): # Check if the section and parameter exists if not 'section' in parameter: logger.error('Parameter section has not been specified in %s', key) raise Exception('NonSpecifiedSection') if parameter['section'] not in self.config_options: logger.error('Parameter section %s does not exist', parameter['section']) raise Exception('InvalidSection') if not 'parameter' in parameter: logger.error('Parameter name has not been specified in %s', key) raise Exception('NonSpecifiedParameter') if parameter['parameter'] not in self.config_options[ parameter['section']]: logger.error('Parameter %s does not exist in section %s', parameter['parameter'], parameter['section']) raise Exception('InvalidParameter') if not 'min_value' in parameter: logger.error( 'Parameter minimum values has not been specified in %s', key) raise Exception('NonSpecifiedMinValue') if not 'max_value' in parameter: logger.error( 'Parameter maximum values has not been specified in %s', key) raise Exception('NonSpecifiedMaxValue') if not 'step' in parameter: logger.error('Parameter step has not been specified in %s', key) raise Exception('NonSpecifiedStep') if not 'type' in parameter: logger.error( 'Parameter evolution type has not been specified in %s', key) raise Exception('NonSpecifiedType') if parameter['type'] not in ['geometric', 'arithmetic']: logger.error( 'Parameter evolution type %s has not been implemented. Only geometric and arithmetic are allowed so far', parameter['type']) raise Exception('InvalidType') def _generate_config_dicts(self): self._extract_parameters() # Generate the combinations of values value_list = [] for key, parameter in zip(self.parameter_keys, self.parameter_dic): if parameter['type'] == 'geometric': # Geometric series min_exponent = math.log(parameter['min_value'], parameter['step']) max_exponent = math.log(parameter['max_value'], parameter['step']) values = list( numpy.logspace(min_exponent, max_exponent, num=int(max_exponent - min_exponent) + 1, base=parameter['step'])) elif parameter['type'] == 'arithmetic': # Arithmetic series values = list( numpy.linspace( parameter['min_value'], parameter['max_value'], num=(parameter['max_value'] - parameter['min_value']) / float(parameter['step']) + 1)) value_list.append(values) if len(value_list): # Generate the combinations of values combinations = list(itertools.product(*value_list)) simulation_options = list() for tuple_act in combinations: for seed in range( self.config_options['simulation']['seed'], self.config_options['simulation']['seed'] + self.config_options['launcher'] ['number_of_repetitions']): # Copy the dictionary and change every single parameter options_copy = copy.deepcopy(self.config_options) logger.info( 'Setting parameters to the following values: %s', tuple_act) logger.debug('Using seed %s', seed) options_copy['simulation']['seed'] = seed sim_name = '_' + str(seed) for param_dic, value in zip(self.parameter_dic, tuple_act): options_copy[param_dic['section']][ param_dic['parameter']] = value sim_name += '_' + str(value) options_copy['simulation']['simulation_name'] += sim_name simulation_options.append(options_copy) else: simulation_options = [options_copy] return simulation_options def execute_search(self): ''' Initialize all the objects needed for running the simulation. ''' simulation_options = self._generate_config_dicts() self.launch_funct(config_options=simulation_options) def _launch_serial_simulation(self, config_options): ''' Launch a new simulation according to the proposed method. ''' import FrequencySimulation for index, config in enumerate(config_options): logger.debug('Writing configuration file for job %s', index) self._save_configuration_file(config) logger.info('Launching simulation %s of %s', index, len(config_options)) # Create, initialize and launch the simulation logger.debug('Creating the simulation object') simulation = FrequencySimulation.FrequencySimulation( config_options=config) logger.info('Initializing the simulation') simulation.initialize() logger.info('Running the simulation') if self.config_options['simulation']['visualize_results']: simulation.visualize_results() else: simulation.run_simulation() logger.info('Simulation ended') logger.info('Analyzing results') simulation.analyze_results() logger.info('Analysis ended') def _launch_mpi_simulation(self, config_options): ''' Launch a new simulation according to the proposed method. @param index: Index of the simulation. @param config_options List of dictionaries with the configuration to be used for the simulation ''' for index, config in enumerate(config_options): logger.debug('Writing configuration file for job %s', index) file_name = self._save_configuration_file(config) mpi_command = [] # mpi_command.append(config_options['launcher']['mpi_launcher']) # mpi_command.append('-np') # mpi_command.append(str(config_options['launcher']['num_mpi_processes'])) mpi_command.append(str(config['launcher']['python_exec'])) mpi_command.append('./src/LaunchSimulation.py') mpi_command.append('-c ' + file_name) # Create, initialize and launch the simulation logger.info('Calling MPI process for simulation %s of %s', index, len(config_options)) logger.debug(mpi_command) subprocess.call(mpi_command) logger.info('Simulation ended') def _get_configuration_file_name(self, config_options): ''' Generate the name of the configuration file. ''' # Create configuration file if 'data_path' in config_options['simulation']: data_path = config_options['simulation']['data_path'] else: data_path = './results' config_options['simulation']['data_path'] = data_path if 'simulation_name' in config_options['simulation']: data_path += '/' + config_options['simulation']['simulation_name'] if not os.path.exists(data_path): logger.info('Creating result folder %s', data_path) os.makedirs(data_path) file_name = data_path + '/SimulationConfig.cfg' return file_name def _save_configuration_file(self, config_options): ''' Create the configuration file according to the config_options parameters. @param config_options Dictionary with the configuration to be used for the simulation ''' file_name = self._get_configuration_file_name(config_options) if os.path.isfile(file_name): logger.warning( 'A configuration file %s already exists. It will overwrite that simulation file', file_name) logger.debug('Writing configuration file %s', file_name) WriteConfigFile(config_options, file_name) return file_name def _launch_qsub_simulation(self, config_options): ''' Launch a qsub job to run a simulation by using config_options parameters. @param config_options List of dictionaries with the configuration to be used for the simulation ''' job_table_file = self.config_options['simulation'][ 'data_path'] + '/' + self.config_options['simulation'][ 'simulation_name'] + '.txt' f = open(job_table_file, 'w') for index, config in enumerate(config_options): if 'simulation_name' not in config['simulation']: config['simulation']['simulation_name'] = 'job' + str(index) logger.debug('Writing configuration file for job %s', index) file_name = self._save_configuration_file(config) # Writhe the file name into the job array table f.write(file_name + '\n') # python will convert \n to os.linesep f.close() # you can omit in most cases as the destructor will call if # Create the job submission script buf = '#!/bin/sh\n' buf += '#$ -S /bin/sh\n' buf += '#$ -t 1-' + str(len(config_options)) + '\n' buf += '#$ -N ' + self.config_options['simulation'][ 'simulation_name'] + '\n' buf += '#$ -o ' + self.config_options['simulation']['data_path'] + '/\n' # buf += '#$ -M [email protected]\n' # buf += '#$ -m ae\n' buf += '#$ -j y\n' buf += '#$ -cwd\n' buf += '#$ -V\n' buf += '#$ -v OMP_NUM_THREADS=' + str( self.config_options['launcher']['num_omp_threads']) + '\n' buf += '#$ -q ' + self.config_options['launcher']['queue_name'] + '\n' # buf += '#$ -l ' + self.config_options['launcher']['queue_name'] + ',h_rt=2:00:00,h_cpu=2:00:00\n' # Set maximum cpu time to 12 hours if (self.config_options['launcher']['num_omp_threads'] > 1 or self.config_options['launcher']['num_mpi_processes'] > 1): buf += '#$ -pe ' + self.config_options['launcher'][ 'parallel_environment'] + ' ' + str( self.config_options['launcher']['num_omp_threads'] * self. config_options['launcher']['num_mpi_processes']) + '\n' buf += '\nPARAM_FILE=' + job_table_file + '\n' buf += 'PARAM=$(cat $PARAM_FILE | head -n $SGE_TASK_ID | tail -n 1)\n\n' buf += 'mpirun -n ' + str( self.config_options['launcher']['num_mpi_processes'] ) + ' -ppn 1 ' + self.config_options['launcher'][ 'python_exec'] + ' ./src/LaunchSimulation.py -c $PARAM\n' logger.debug('Generated qsub script:') logger.debug(buf) logger.info('Launching qsub command') # Open a pipe to the qsub command. output, inputstr = popen2('qsub') # Send job_string to qsub inputstr.write(buf) inputstr.close() # Print your job and the response to the screen logger.info(output.read())
def _initialize_master(self): ''' This function initializes the master process in the evolutionary algorithm. The master is in charge of providing individual to the workers. Thus, the master reads the algorithm configuration. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Initialize the simulation seeds if they have not been initialized if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() self._extract_parameters() # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None else: # Remove saving file if self.config_options['algorithm'][ 'load_from_file'] is None and os.path.isfile( self.config_options['algorithm']['saving_file']): logger.info('Removing existing result file %s', self.config_options['algorithm']['saving_file']) os.remove(self.config_options['algorithm']['saving_file']) param_names = '# ' for param_dic in self.parameter_dic: param_names = param_names + param_dic[ 'section'] + '.' + param_dic['parameter'] + '\t' param_names = param_names + 'Av.\tStd.\n' if self.config_options['algorithm']['saving_file'] is not None: with open(self.config_options['algorithm']['saving_file'], 'a') as fileid: logger.debug('Writing file header in the new file') fileid.write(param_names) self.population = self._generate_config_tuples() self.population = self._extract_finished_tuples(self.population) # Initialize communication manager self.simulationQueue = Queue.Queue() self.completeQueue = Queue.Queue() self.managerThread = threading.Thread( target=self._manage_communications) return
class ParameterSearch(object): ''' This class implements exhaustive search into the parameter space taken from the configuration file passed as a parameter. ''' def __init__(self, **kwargs): ''' Constructor of the class. It creates a new object. @param config_file Name of the file with the options of the model. ''' logger = logging.getLogger('ParameterSearch') if ('config_file' in kwargs): self.config_file = kwargs.pop('config_file') else: logger.error('Non-specified simulation config file') raise Exception('Non-DefinedSimulationConfig') super(ParameterSearch, self).__init__(**kwargs) return def initialize_searcher(self): ''' Initialize all the objects needed for running the simulation. ''' self.comm = MPI.COMM_WORLD self.mpi_rank = self.comm.Get_rank() self.mpi_size = self.comm.Get_size() if (self.mpi_rank == 0): self._initialize_master() else: self._initialize_worker() if (self.mpi_size == 1): logger.error( "MPI Error. Only one MPI process has been created. No workers defined." ) raise Exception('MPIError') return def _initialize_logger(self): if 'log_file' in self.config_options['algorithm']: Logger2File(logger, self.config_options['algorithm']['log_file']) if 'verbosity' not in self.config_options['algorithm']: self.config_options['algorithm']['verbosity'] = 'debug' logger.warning( 'Non-specified simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) if not isinstance(numeric_level, int): self.config_options['algorithm']['verbosity'] = 'info' numeric_level = getattr( logging, self.config_options['algorithm']['verbosity'].upper(), None) logger.warning( 'Invalid simulation verbosity. Using default value %s', self.config_options['algorithm']['verbosity']) raise ValueError('Invalid log level: %s' % self.config_options['algorithm']['verbosity']) logger.setLevel(numeric_level) def _initialize_master(self): ''' This function initializes the master process in the evolutionary algorithm. The master is in charge of providing individual to the workers. Thus, the master reads the algorithm configuration. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'number_of_repetitions' not in self.config_options['algorithm']: self.config_options['algorithm']['number_of_repetitions'] = 1 # Loading from file if 'load_from_file' not in self.config_options['algorithm']: self.config_options['algorithm']['load_from_file'] = None # Initialize the simulation seeds if they have not been initialized if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() self._extract_parameters() # Saving state parameters if 'saving_file' not in self.config_options['algorithm']: self.config_options['algorithm']['saving_file'] = None else: # Remove saving file if self.config_options['algorithm'][ 'load_from_file'] is None and os.path.isfile( self.config_options['algorithm']['saving_file']): logger.info('Removing existing result file %s', self.config_options['algorithm']['saving_file']) os.remove(self.config_options['algorithm']['saving_file']) param_names = '# ' for param_dic in self.parameter_dic: param_names = param_names + param_dic[ 'section'] + '.' + param_dic['parameter'] + '\t' param_names = param_names + 'Av.\tStd.\n' if self.config_options['algorithm']['saving_file'] is not None: with open(self.config_options['algorithm']['saving_file'], 'a') as fileid: logger.debug('Writing file header in the new file') fileid.write(param_names) self.population = self._generate_config_tuples() self.population = self._extract_finished_tuples(self.population) # Initialize communication manager self.simulationQueue = Queue.Queue() self.completeQueue = Queue.Queue() self.managerThread = threading.Thread( target=self._manage_communications) return def _initialize_worker(self): ''' This function initializes the worker process in the evolutionary algorithm. The workers are in charge of running the simulations with the parameters received from the master. ''' logger.info('Parsing configuration file %s', self.config_file) self.config_options = ReadConfigFile(self.config_file) if 'algorithm' not in self.config_options: self.config_options['algorithm'] = dict() self._initialize_logger() if 'simulation' not in self.config_options: self.config_options['simulation'] = dict() # Set important undefined options if 'visualize_results' not in self.config_options['simulation']: self.config_options['simulation']['visualize_results'] = False if 'seed' not in self.config_options['simulation']: self.config_options['simulation']['seed'] = time.time() # Extract parameters to explore self._extract_parameters() # Make a copy of the simulation options and extract the algorithm section self.simulation_options = copy.deepcopy(self.config_options) self.simulation_options.pop('algorithm') return def _extract_parameters(self): # Extract every parameter to explore self.parameter_keys = [ key for key in self.config_options.keys() if key.startswith('parameter') ] self.parameter_dic = [] for key in self.parameter_keys: self.parameter_dic.append(self.config_options.pop(key)) for key, parameter in zip(self.parameter_keys, self.parameter_dic): # Check if the section and parameter exists if not 'section' in parameter: logger.error('Parameter section has not been specified in %s', key) raise Exception('NonSpecifiedSection') if parameter['section'] not in self.config_options: logger.error('Parameter section %s does not exist', parameter['section']) raise Exception('InvalidSection') if not 'parameter' in parameter: logger.error('Parameter name has not been specified in %s', key) raise Exception('NonSpecifiedParameter') if parameter['parameter'] not in self.config_options[ parameter['section']]: logger.error('Parameter %s does not exist in section %s', parameter['parameter'], parameter['section']) raise Exception('InvalidParameter') if not 'min_value' in parameter: logger.error( 'Parameter minimum values has not been specified in %s', key) raise Exception('NonSpecifiedMinValue') if not 'max_value' in parameter: logger.error( 'Parameter maximum values has not been specified in %s', key) raise Exception('NonSpecifiedMaxValue') if not 'num_values' in parameter: logger.error( 'Parameter number of values has not been specified in %s', key) raise Exception('NonSpecifiedNumValues') if not 'type' in parameter: logger.error( 'Parameter evolution type has not been specified in %s', key) raise Exception('NonSpecifiedType') if parameter['type'] not in ['geometric', 'arithmetic']: logger.error( 'Parameter evolution type %s has not been implemented. Only geometric and arithmetic are allowed so far', parameter['type']) raise Exception('InvalidType') def _generate_config_tuples(self): # Generate the combinations of values value_list = [] for key, parameter in zip(self.parameter_keys, self.parameter_dic): # Arithmetic series values = list(numpy.linspace(0.0, 1.0, num=parameter['num_values'])) value_list.append(values) if len(value_list): # Generate the combinations of values combinations = list(itertools.product(*value_list)) else: combinations = list() return combinations def _extract_finished_tuples(self, population): if self.config_options['algorithm'][ 'load_from_file'] is not None and os.path.isfile( self.config_options['algorithm']['load_from_file']): loaded_values = [ tuple(row) for row in numpy.loadtxt( self.config_options['algorithm']['load_from_file'], usecols=tuple(range(len(self.parameter_keys)))) ] unnorm_population = [ tuple(self._get_unnormalized_values(individual)) for individual in population ] for row in loaded_values: RemIndex = -1 for index, ind in enumerate(unnorm_population): if all( abs((val1 - val2) / val1) < 1e-5 for val1, val2 in zip(row, ind)): logger.debug( '%s simulation is already loaded from the file. Removing simulation', row) print index RemIndex = index break if RemIndex != -1: del unnorm_population[RemIndex] del population[RemIndex] return population def _get_unnormalized_values(self, individual): unnorm_values = [] for norm_value, param_dic in zip(individual, self.parameter_dic): min_value = param_dic['min_value'] max_value = param_dic['max_value'] if param_dic['type'] == 'arithmetic': value = norm_value * (max_value - min_value) + min_value elif param_dic['type'] == 'geometric': logmin = math.log10(abs(min_value)) logmax = math.log10(abs(max_value)) value = 10.0**(norm_value * (logmax - logmin)) * min_value unnorm_values.append(value) return unnorm_values def _eval_fitness_funct(self, individual, seed): # Make a copy of the simulation config options local_config_options = copy.deepcopy(self.simulation_options) unnorm_values = self._get_unnormalized_values(individual) for unnorm, param_dic in zip(unnorm_values, self.parameter_dic): local_config_options[param_dic['section']][ param_dic['parameter']] = unnorm local_config_options['simulation']['seed'] = seed logger.info('Running evaluation with seed %s and parameters %s', seed, self._get_unnormalized_values(individual)) mutual_information = helper_simulation(local_config_options) logger.info('Mutual information with seed %s and parameters %s: %s', seed, self._get_unnormalized_values(individual), mutual_information) return mutual_information def _evaluate_population(self, population): # Insert the population into the simulation queue and unlock it self.simulationQueue.put(population) self.simulationQueue.task_done() self.end_simulation = self.last_generation logger.info("Evaluating %i individuals", len(population)) return self.completeQueue.get() def _manage_communications(self): ''' This function manages the simulation queue, sending the simulations to the other MPI processes. It manages the two simulation queues (SimulationQueue -jobs to be done- and CompleteQueue -jobs finished-). This function is thought to be executed in a sepparate thread of the master process. ''' # Initialize SimulationMap and RunningDict simulationMap = dict() availableProcs = range(1, self.mpi_size) endedProcs = [] for ind in availableProcs: simulationMap[ind] = None runningDict = dict() # List with the simulations to be executed in this "batch" simulationList = [] status = MPI.Status() population_size = 0 output_population = [] data = numpy.empty(1, dtype=numpy.float64) ######################################## # Create requests with MPI.Irecv(....) ######################################## request = self.comm.Irecv([data, MPI.DOUBLE], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG) # This loop end when every worker process has been finished while (len(endedProcs) != (self.mpi_size - 1)): Job_finished = request.Test(status) if Job_finished or (not availableProcs): if (not Job_finished): logger.debug('Waiting for something finished') request.Wait(status) # There is at least one simulation finished mpi_process = status.Get_source() tuple_ind = simulationMap[mpi_process] logger.debug( '%s mutual information has been received from %s: %s', tuple_ind, mpi_process, data[0]) if tuple_ind not in runningDict: logger.warning('Error in data received from process %s', mpi_process) logger.warning('%s not exist in runningDict %s', tuple_ind, runningDict) else: runningDict[tuple_ind].append(data[0]) # If all the simulations with these parameters are done, get the average and std if (len(runningDict[tuple_ind]) == self.config_options[ 'algorithm']['number_of_repetitions']): avMI = numpy.average( runningDict[tuple_ind]), numpy.std( runningDict[tuple_ind]) logger.debug( 'Fitness value calculated for individual %s: %s', tuple_ind, avMI) unnorm_val = self._get_unnormalized_values(tuple_ind) unnorm_val.extend(list(avMI)) if self.config_options['algorithm'][ 'saving_file'] is not None: with open( self.config_options['algorithm'] ['saving_file'], 'a') as fileid: logger.debug( 'Saving fitness value calculated to %s', self.config_options['algorithm'] ['saving_file']) numpy.savetxt(fileid, numpy.transpose(unnorm_val), fmt="%.15e", delimiter="\t", newline='\t') fileid.write('\n') output_population.append(tuple_ind) runningDict.pop(tuple_ind) logger.debug( '%s extracted from the running dictionary', tuple_ind) # Check the number of individual to finish before unlocking the EA. population_size -= 1 if population_size == 0: self.completeQueue.put(output_population) output_population = [] self.completeQueue.task_done() logger.debug('Simulation batch has been finished') # Set the process as available simulationMap[mpi_process] = None availableProcs.append(mpi_process) ######################################## # Create requests with MPI.Irecv(....) ######################################## request = self.comm.Irecv([data, MPI.DOUBLE], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG) elif availableProcs: # There are available processors if simulationList: # There are simulations ready to be launched # Extract the first simulation and the first available process parameters, cur_seed = simulationList.pop(0) proc_rank = availableProcs.pop(0) param_tuple = tuple(parameters) if param_tuple not in runningDict: runningDict[param_tuple] = [] logger.debug('%s inserted in the running dictionary', param_tuple) simulationMap[proc_rank] = param_tuple ######################################## # Send through MPI the parameters and the seed ######################################## logger.debug( 'Sending parameters %s and seed %s to process %s', parameters, cur_seed, proc_rank) data_send = numpy.empty(len(parameters) + 1, dtype=numpy.float64) for idx, parameter in enumerate(parameters): data_send[idx] = parameter data_send[-1] = cur_seed self.comm.Send([data_send, MPI.DOUBLE], dest=proc_rank, tag=SIM_DATA) elif not self.simulationQueue.empty(): # There are available batch simulations in the simulation queue. # Add every individual/seed combination to the simulationList population = self.simulationQueue.get() logger.debug('New population to be evaluated received: %s', population) for ind in population: # Skip those individual already under evaluation population_size += 1 for seed in range( self.config_options['simulation']['seed'], self.config_options['simulation']['seed'] + self.config_options['algorithm'] ['number_of_repetitions']): simulationList.append((ind, seed)) elif self.end_simulation: proc_rank = availableProcs.pop(0) ######################################## # Send and ending signal to the worker ######################################## logger.debug('Sending ending signal to process %s', proc_rank) data_send = numpy.empty(len(self.parameter_dic) + 1, dtype=numpy.float64) self.comm.Send([data_send, MPI.DOUBLE], dest=proc_rank, tag=SIM_EXIT) endedProcs.append(proc_rank) else: # Nothing to do logger.debug('Sleeping 1') time.sleep(0.1) else: # Nothing to do logger.debug('Sleeping 2') time.sleep(0.1) def execute_search(self): ''' Initialize all the objects needed for running the simulation. ''' if (self.mpi_rank == 0): self._execute_search_master() else: self._execute_search_worker() return def _execute_search_master(self): ''' The master node executes the genetic algorithm and provides simulation parameters to the workers. ''' # If we are in the last generation activate the flag self.last_generation = True self.end_simulation = False # Start simulation thread self.managerThread.start() logger.debug("Start of simulation") self.population = self._evaluate_population(self.population) logger.debug("Simulation ended") return def _execute_search_worker(self): ''' Worker nodes receive parameter lists and simulate the network. ''' stay_working = True my_status = MPI.Status() while stay_working: # Receive the simulation parameters and seed data_recv = numpy.empty(len(self.parameter_keys) + 1, dtype=numpy.float64) self.comm.Recv([data_recv, MPI.DOUBLE], source=0, tag=MPI.ANY_TAG, status=my_status) tag = my_status.Get_tag() # Check the tag if tag == SIM_EXIT: stay_working = False continue if tag != SIM_DATA: logger.warning('Unknown tag %s received in worker', tag) cur_seed = int(data_recv[-1]) parameters = data_recv[:-1].tolist() logger.debug('Received parameters %s and seed %s', parameters, cur_seed) # Launch the simulation with the parameters mutual_information = self._eval_fitness_funct(parameters, cur_seed) logger.debug( 'Sending mutual information value %s to process root process', mutual_information) send_array = numpy.array([mutual_information], dtype=numpy.float64) self.comm.Send([send_array, MPI.DOUBLE], dest=0, tag=SIM_DATA) return