def __init__(self, interface_out_filename=mlu.default_interface_out_filename, interface_in_filename=mlu.default_interface_in_filename, interface_file_type=mlu.default_interface_file_type, interface_in_file_type=mlu.default_interface_in_file_type, interface_out_file_type=mlu.default_interface_out_file_type, **kwargs): super(FileInterface, self).__init__(**kwargs) self.out_file_count = 0 self.in_file_count = 0 if mlu.check_file_type_supported(interface_out_file_type): self.out_file_type = str(interface_out_file_type) else: if mlu.check_file_type_supported(interface_file_type): self.out_file_type = str(interface_file_type) else: self.log.error('Output file type is not supported:' + interface_out_file_type) if mlu.check_file_type_supported(interface_in_file_type): self.in_file_type = str(interface_in_file_type) else: if mlu.check_file_type_supported(interface_file_type): self.in_file_type = str(interface_file_type) else: self.log.error('Input file type is not supported:' + interface_in_file_type) self.out_filename = str(interface_out_filename) self.total_out_filename = self.out_filename + '.' + self.out_file_type self.in_filename = str(interface_in_filename) self.total_in_filename = self.in_filename + '.' + self.in_file_type
def __init__(self, filename, file_type ='pkl', **kwargs): self.log = logging.getLogger(__name__) self.filename = str(filename) self.file_type = str(file_type) if not mlu.check_file_type_supported(self.file_type): self.log.error('GP training file type not supported' + repr(self.file_type)) learner_dict = mlu.get_dict_from_file(self.filename, self.file_type) if 'archive_type' in learner_dict and not (learner_dict['archive_type'] == 'differential_evolution'): self.log.error('The archive appears to be the wrong type.' + repr(learner_dict['archive_type'])) raise ValueError self.archive_type = learner_dict['archive_type'] self.num_generations = int(learner_dict['generation_count']) self.num_population_members = int(learner_dict['num_population_members']) self.num_params = int(learner_dict['num_params']) self.min_boundary = np.squeeze(np.array(learner_dict['min_boundary'])) self.max_boundary = np.squeeze(np.array(learner_dict['max_boundary'])) self.params_generations = np.array(learner_dict['params_generations']) self.costs_generations = np.array(learner_dict['costs_generations']) self.finite_flag = True self.param_scaler = lambda p: (p-self.min_boundary)/(self.max_boundary - self.min_boundary) self.scaled_params_generations = np.array([[self.param_scaler(self.params_generations[inda,indb,:]) for indb in range(self.num_population_members)] for inda in range(self.num_generations)]) self.gen_numbers = np.arange(1,self.num_generations+1) self.param_colors = _color_list_from_num_of_params(self.num_params) self.gen_plot = np.array([np.full(self.num_population_members, ind, dtype=int) for ind in self.gen_numbers]).flatten()
def __init__(self, filename, file_type='pkl', **kwargs): self.log = logging.getLogger(__name__) self.filename = str(filename) self.file_type = str(file_type) if not mlu.check_file_type_supported(self.file_type): self.log.error('GP training file type not supported' + repr(self.file_type)) controller_dict = mlu.get_dict_from_file(self.filename, self.file_type) self.archive_type = controller_dict['archive_type'] if 'archive_type' in controller_dict and not ( controller_dict['archive_type'] == 'controller'): self.log.error('The archive appears to be the wrong type.') raise ValueError self.num_in_costs = int(controller_dict['num_in_costs']) self.num_out_params = int(controller_dict['num_out_params']) self.out_params = np.array(controller_dict['out_params']) self.out_type = [x.strip() for x in list(controller_dict['out_type'])] self.in_costs = np.squeeze(np.array(controller_dict['in_costs'])) self.in_uncers = np.squeeze(np.array(controller_dict['in_uncers'])) self.in_bads = np.squeeze(list(controller_dict['in_bads'])) self.best_index = int(controller_dict['best_index']) self.num_params = int(controller_dict['num_params']) self.min_boundary = np.squeeze( np.array(controller_dict['min_boundary'])) self.max_boundary = np.squeeze( np.array(controller_dict['max_boundary'])) if np.all(np.isfinite(self.min_boundary)) and np.all( np.isfinite(self.min_boundary)): self.finite_flag = True self.param_scaler = lambda p: (p - self.min_boundary) / ( self.max_boundary - self.min_boundary) self.scaled_params = np.array([ self.param_scaler(self.out_params[ind, :]) for ind in range(self.num_out_params) ]) else: self.finite_flag = False self.unique_types = set(self.out_type) self.cost_colors = [ _color_from_controller_name(x) for x in self.out_type ] self.in_numbers = np.arange(1, self.num_in_costs + 1) self.out_numbers = np.arange(1, self.num_out_params + 1) self.param_colors = _color_list_from_num_of_params(self.num_params)
def __init__(self, interface_out_filename=mlu.default_interface_out_filename, interface_in_filename=mlu.default_interface_in_filename, interface_file_type=mlu.default_interface_file_type, **kwargs): super(FileInterface,self).__init__(**kwargs) self.out_file_count = 0 self.in_file_count = 0 if mlu.check_file_type_supported(interface_file_type): self.out_file_type = str(interface_file_type) self.in_file_type = str(interface_file_type) else: self.log.error('File out type is not supported:' + interface_file_type) self.out_filename = str(interface_out_filename) self.total_out_filename = self.out_filename + '.' + self.out_file_type self.in_filename = str(interface_in_filename) self.total_in_filename = self.in_filename + '.' + self.in_file_type
def __init__(self, filename, file_type ='pkl', **kwargs): self.log = logging.getLogger(__name__) self.filename = str(filename) self.file_type = str(file_type) if not mlu.check_file_type_supported(self.file_type): self.log.error('GP training file type not supported' + repr(self.file_type)) controller_dict = mlu.get_dict_from_file(self.filename, self.file_type) self.archive_type = controller_dict['archive_type'] if 'archive_type' in controller_dict and not (controller_dict['archive_type'] == 'controller'): self.log.error('The archive appears to be the wrong type.') raise ValueError self.num_in_costs = int(controller_dict['num_in_costs']) self.num_out_params = int(controller_dict['num_out_params']) self.out_params = np.array(controller_dict['out_params']) self.out_type = [x.strip() for x in list(controller_dict['out_type'])] self.in_costs = np.squeeze(np.array(controller_dict['in_costs'])) self.in_uncers = np.squeeze(np.array(controller_dict['in_uncers'])) self.in_bads = np.squeeze(list(controller_dict['in_bads'])) self.best_index = int(controller_dict['best_index']) self.num_params = int(controller_dict['num_params']) self.min_boundary = np.squeeze(np.array(controller_dict['min_boundary'])) self.max_boundary = np.squeeze(np.array(controller_dict['max_boundary'])) if np.all(np.isfinite(self.min_boundary)) and np.all(np.isfinite(self.min_boundary)): self.finite_flag = True self.param_scaler = lambda p: (p-self.min_boundary)/(self.max_boundary - self.min_boundary) self.scaled_params = np.array([self.param_scaler(self.out_params[ind,:]) for ind in range(self.num_out_params)]) else: self.finite_flag = False self.unique_types = set(self.out_type) self.cost_colors = [_color_from_controller_name(x) for x in self.out_type] self.in_numbers = np.arange(1,self.num_in_costs+1) self.out_numbers = np.arange(1,self.num_out_params+1) self.param_colors = _color_list_from_num_of_params(self.num_params)
def __init__(self, filename, file_type='pkl', **kwargs): self.log = logging.getLogger(__name__) self.filename = str(filename) self.file_type = str(file_type) if not mlu.check_file_type_supported(self.file_type): self.log.error('GP training file type not supported' + repr(self.file_type)) learner_dict = mlu.get_dict_from_file(self.filename, self.file_type) if 'archive_type' in learner_dict and not ( learner_dict['archive_type'] == 'differential_evolution'): self.log.error('The archive appears to be the wrong type.' + repr(learner_dict['archive_type'])) raise ValueError self.archive_type = learner_dict['archive_type'] self.num_generations = int(learner_dict['generation_count']) self.num_population_members = int( learner_dict['num_population_members']) self.num_params = int(learner_dict['num_params']) self.min_boundary = np.squeeze(np.array(learner_dict['min_boundary'])) self.max_boundary = np.squeeze(np.array(learner_dict['max_boundary'])) self.params_generations = np.array(learner_dict['params_generations']) self.costs_generations = np.array(learner_dict['costs_generations']) self.finite_flag = True self.param_scaler = lambda p: (p - self.min_boundary) / ( self.max_boundary - self.min_boundary) self.scaled_params_generations = np.array([[ self.param_scaler(self.params_generations[inda, indb, :]) for indb in range(self.num_population_members) ] for inda in range(self.num_generations)]) self.gen_numbers = np.arange(1, self.num_generations + 1) self.param_colors = _color_list_from_num_of_params(self.num_params) self.gen_plot = np.array([ np.full(self.num_population_members, ind, dtype=int) for ind in self.gen_numbers ]).flatten()
def __init__(self, interface, max_num_runs = float('+inf'), target_cost = float('-inf'), max_num_runs_without_better_params = float('+inf'), controller_archive_filename=default_controller_archive_filename, controller_archive_file_type=default_controller_archive_file_type, archive_extra_dict = None, start_datetime = None, **kwargs): #Make logger self.remaining_kwargs = mlu._config_logger(**kwargs) self.log = logging.getLogger(__name__) #Variable that are included in archive self.num_in_costs = 0 self.num_out_params = 0 self.num_last_best_cost = 0 self.out_params = [] self.out_type = [] self.out_extras = [] self.in_costs = [] self.in_uncers = [] self.in_bads = [] self.in_extras = [] self.best_cost = float('inf') self.best_uncer = float('nan') self.best_index = float('nan') self.best_params = float('nan') #Variables that used internally self.last_out_params = None self.curr_params = None self.curr_cost = None self.curr_uncer = None self.curr_bad = None self.curr_extras = None #Constants self.controller_wait = float(1) #Learner related variables self.learner_params_queue = None self.learner_costs_queue = None self.end_learner = None self.learner = None #Variables set by user #save interface and extract important variables if isinstance(interface, mli.Interface): self.interface = interface else: self.log.error('interface is not a Interface as defined in the MLOOP package.') raise TypeError self.params_out_queue = interface.params_out_queue self.costs_in_queue = interface.costs_in_queue self.end_interface = interface.end_event #Other options if start_datetime is None: self.start_datetime = datetime.datetime.now() else: self.start_datetime = datetime.datetime(start_datetime) self.max_num_runs = float(max_num_runs) if self.max_num_runs<=0: self.log.error('Number of runs must be greater than zero. max_num_runs:'+repr(self.max_num_run)) raise ValueError self.target_cost = float(target_cost) self.max_num_runs_without_better_params = float(max_num_runs_without_better_params) if self.max_num_runs_without_better_params<=0: self.log.error('Max number of repeats must be greater than zero. max_num_runs:'+repr(max_num_runs_without_better_params)) raise ValueError if mlu.check_file_type_supported(controller_archive_file_type): self.controller_archive_file_type = controller_archive_file_type else: self.log.error('File in type is not supported:' + repr(controller_archive_file_type)) raise ValueError if controller_archive_filename is None: self.controller_archive_filename = None else: if not os.path.exists(mlu.archive_foldername): os.makedirs(mlu.archive_foldername) self.controller_archive_filename =str(controller_archive_filename) self.total_archive_filename = mlu.archive_foldername + self.controller_archive_filename + '_' + mlu.datetime_to_string(self.start_datetime) + '.' + self.controller_archive_file_type self.archive_dict = {'archive_type':'controller', 'num_out_params':self.num_out_params, 'out_params':self.out_params, 'out_type':self.out_type, 'out_extras':self.out_extras, 'in_costs':self.in_costs, 'in_uncers':self.in_uncers, 'in_bads':self.in_bads, 'in_extras':self.in_extras, 'max_num_runs':self.max_num_runs, 'start_datetime':mlu.datetime_to_string(self.start_datetime)} if archive_extra_dict is not None: self.archive_dict.update(archive_extra_dict) self.log.debug('Controller init completed.')
def __init__( self, interface, max_num_runs=float('+inf'), target_cost=float('-inf'), max_num_runs_without_better_params=float('+inf'), controller_archive_filename=default_controller_archive_filename, controller_archive_file_type=default_controller_archive_file_type, archive_extra_dict=None, start_datetime=None, **kwargs): #Make logger self.remaining_kwargs = mlu._config_logger(**kwargs) self.log = logging.getLogger(__name__) #Variable that are included in archive self.num_in_costs = 0 self.num_out_params = 0 self.num_last_best_cost = 0 self.out_params = [] self.out_type = [] self.out_extras = [] self.in_costs = [] self.in_uncers = [] self.in_bads = [] self.in_extras = [] self.best_cost = float('inf') self.best_uncer = float('nan') self.best_index = float('nan') self.best_params = float('nan') #Variables that used internally self.last_out_params = None self.curr_params = None self.curr_cost = None self.curr_uncer = None self.curr_bad = None self.curr_extras = None #Constants self.controller_wait = float(1) #Learner related variables self.learner_params_queue = None self.learner_costs_queue = None self.end_learner = None self.learner = None #Variables set by user #save interface and extract important variables if isinstance(interface, mli.Interface): self.interface = interface else: self.log.error( 'interface is not a Interface as defined in the M-LOOP package.' ) raise TypeError self.params_out_queue = interface.params_out_queue self.costs_in_queue = interface.costs_in_queue self.end_interface = interface.end_event #Other options if start_datetime is None: self.start_datetime = datetime.datetime.now() else: self.start_datetime = datetime.datetime(start_datetime) self.max_num_runs = float(max_num_runs) if self.max_num_runs <= 0: self.log.error( 'Number of runs must be greater than zero. max_num_runs:' + repr(self.max_num_run)) raise ValueError self.target_cost = float(target_cost) self.max_num_runs_without_better_params = float( max_num_runs_without_better_params) if self.max_num_runs_without_better_params <= 0: self.log.error( 'Max number of repeats must be greater than zero. max_num_runs:' + repr(max_num_runs_without_better_params)) raise ValueError if mlu.check_file_type_supported(controller_archive_file_type): self.controller_archive_file_type = controller_archive_file_type else: self.log.error('File in type is not supported:' + repr(controller_archive_file_type)) raise ValueError if controller_archive_filename is None: self.controller_archive_filename = None else: # Store self.controller_archive_filename without any path, but # include any path components in controller_archive_filename when # constructing the full path. controller_archive_filename = str(controller_archive_filename) self.controller_archive_filename = os.path.basename( controller_archive_filename) filename_suffix = mlu.generate_filename_suffix( self.controller_archive_file_type, file_datetime=self.start_datetime, ) filename = controller_archive_filename + filename_suffix self.total_archive_filename = os.path.join(mlu.archive_foldername, filename) # Include any path info from controller_archive_filename when # creating directory for archive files.] archive_dir = os.path.dirname(self.total_archive_filename) if not os.path.exists(archive_dir): os.makedirs(archive_dir) self.archive_dict = { 'mloop_version': __version__, 'archive_type': 'controller', 'num_out_params': self.num_out_params, 'out_params': self.out_params, 'out_type': self.out_type, 'out_extras': self.out_extras, 'in_costs': self.in_costs, 'in_uncers': self.in_uncers, 'in_bads': self.in_bads, 'in_extras': self.in_extras, 'max_num_runs': self.max_num_runs, 'start_datetime': mlu.datetime_to_string(self.start_datetime) } if archive_extra_dict is not None: self.archive_dict.update(archive_extra_dict) self.log.debug('Controller init completed.')