def create_hyperopt_files(experiments_file_name): ## Get templates and Variants config_strings = create_config_strings(experiments_file_name) config_objects = create_config_objects(config_strings) templates, variants = create_templates_variants_from_config_objects( config_objects) ## Create template string and save to file template_str = "{\n templates: {\n" for key in templates: template_str += " {:s}: {:s},\n".format(key, templates[key]) template_str += "}}\n\n" with open('hyperopt_template.yaml', 'w') as template_file: template_file.write(template_str) ## Create parameter ranges and save to .pcs-file # Fold param variants into one dict param -> list of possibilities parameter_ranges = dict() for param_dict in variants: for key in param_dict: if key in parameter_ranges and (param_dict[key] not in parameter_ranges[key]): parameter_ranges[key].append(param_dict[key]) if key not in parameter_ranges: parameter_ranges[key] = [param_dict[key]] # Delete unnecessary stuff, add template name reminder parameter_ranges.pop('dataset_filename') parameter_ranges.pop('save_path') parameter_ranges['template_name'] = ['!ADD_TEMPLATE_FILE_NAME!'] # Build string hyperopt_param_string = "" for key, values in parameter_ranges.iteritems(): # take middle value as default value default_str = "[{:s}]".format(str(values[len(values) // 2])) if len(values) == 1: val_str = "{{{:s}}}".format(str(values[0])) else: is_integer = False if all(isinstance(val, numbers.Number) for val in values): val_str = "[{:s}, {:s}]".format(str(values[0]), str(values[-1])) is_integer = all(val.is_integer() for val in values) if (is_integer): default_str += 'i' else: val_str = str(values).replace('(', '{').replace(')', '}') line = "{:30s} {:30s} {:s}\n".format(str(key), val_str, default_str) line = line.replace("$", "**") # correct indentation line = line.replace(" [**", "[**") hyperopt_param_string += line with open('hyperopt_params.pcs', 'w') as param_file: param_file.write(hyperopt_param_string)
def create_hyperopt_files(experiments_file_name): ## Get templates and Variants config_strings = create_config_strings(experiments_file_name) config_objects = create_config_objects(config_strings) templates, variants = create_templates_variants_from_config_objects(config_objects) ## Create template string and save to file template_str = "{\n templates: {\n" for key in templates: template_str += " {:s}: {:s},\n".format(key, templates[key]) template_str += "}}\n\n" with open('hyperopt_template.yaml', 'w') as template_file: template_file.write(template_str) ## Create parameter ranges and save to .pcs-file # Fold param variants into one dict param -> list of possibilities parameter_ranges = dict() for param_dict in variants: for key in param_dict: if key in parameter_ranges and (param_dict[key] not in parameter_ranges[key]): parameter_ranges[key].append(param_dict[key]) if key not in parameter_ranges: parameter_ranges[key] = [param_dict[key]] # Delete unnecessary stuff, add template name reminder parameter_ranges.pop('dataset_filename') parameter_ranges.pop('save_path') parameter_ranges['template_name'] = ['!ADD_TEMPLATE_FILE_NAME!'] # Build string hyperopt_param_string = "" for key, values in parameter_ranges.iteritems(): # take middle value as default value default_str = "[{:s}]".format(str(values[len(values) // 2])) if len(values) == 1: val_str = "{{{:s}}}".format(str(values[0])) else: is_integer = False if all(isinstance(val, numbers.Number) for val in values): val_str = "[{:s}, {:s}]".format(str(values[0]), str(values[-1])) is_integer = all(val.is_integer() for val in values) if (is_integer): default_str += 'i' else: val_str = str(values).replace('(', '{').replace(')', '}') line = "{:30s} {:30s} {:s}\n".format(str(key), val_str, default_str) line = line.replace("$", "**") # correct indentation line = line.replace(" [**", "[**") hyperopt_param_string += line with open('hyperopt_params.pcs', 'w') as param_file: param_file.write(hyperopt_param_string)
def train_hyperopt(params): """ Runs one fold with given parameters and returns test misclass.""" lasagne.random.set_rng(RandomState(9859295)) template_name = params.pop('template_name') params = adjust_params_for_hyperopt(params) config_strings = create_config_strings(template_name) config_objects = create_config_objects(config_strings) templates, _ = create_templates_variants_from_config_objects( config_objects) processed_templates, params_without_template_params = process_templates( templates, params) final_params = process_parameters_by_templates( params_without_template_params, processed_templates) # go to directory above this source-file main_template_filename = os.path.dirname( os.path.abspath(os.path.dirname(__file__))) # then complete path to config main_template_filename = os.path.join(main_template_filename, "configs", "eegnet_template.yaml") with (open(main_template_filename, 'r')) as main_template_file: main_template_str = main_template_file.read() final_params['original_params'] = 'dummy' train_str = Template(main_template_str).substitute(final_params) def do_not_load_constructor(loader, node): return None yaml.add_constructor(u'!DoNotLoad', do_not_load_constructor) modified_train_str = train_str.replace('layers: ', 'layers: !DoNotLoad ') train_dict = yaml_parse.load(modified_train_str) dataset = train_dict['dataset'] dataset.load() dataset_provider = train_dict['dataset_provider'] assert 'in_sensors' in train_str assert 'in_rows' in train_str assert 'in_cols' in train_str train_str = train_str.replace('in_sensors', str(dataset.get_topological_view().shape[1])) train_str = train_str.replace('in_rows', str(dataset.get_topological_view().shape[2])) train_str = train_str.replace('in_cols', str(dataset.get_topological_view().shape[3])) train_dict = yaml_parse.load(train_str) layers = train_dict['layers'] final_layer = layers[-1] # turn off debug/info logging logging.getLogger("pylearn2").setLevel(logging.WARN) logging.getLogger("braindecode").setLevel(logging.WARN) exp = Experiment() exp.setup(final_layer, dataset_provider, **train_dict['exp_args']) exp.run() final_misclass = exp.monitor_chans['test_misclass'][-1] print("Result for") pprint(params) print("Final Test misclass: {:5.4f}".format(float(final_misclass))) return final_misclass
def train_hyperopt(params): """ Runs one fold with given parameters and returns test misclass.""" lasagne.random.set_rng(RandomState(9859295)) template_name = params.pop('template_name') params = adjust_params_for_hyperopt(params) config_strings = create_config_strings(template_name) config_objects = create_config_objects(config_strings) templates, _ = create_templates_variants_from_config_objects( config_objects) processed_templates, params_without_template_params = process_templates( templates, params) final_params = process_parameters_by_templates(params_without_template_params, processed_templates) # go to directory above this source-file main_template_filename = os.path.dirname(os.path.abspath(os.path.dirname( __file__))) # then complete path to config main_template_filename = os.path.join(main_template_filename, "configs", "eegnet_template.yaml") with (open(main_template_filename, 'r')) as main_template_file: main_template_str = main_template_file.read() final_params['original_params'] = 'dummy' train_str = Template(main_template_str).substitute(final_params) def do_not_load_constructor(loader, node): return None yaml.add_constructor(u'!DoNotLoad', do_not_load_constructor) modified_train_str = train_str.replace('layers: ', 'layers: !DoNotLoad ') train_dict = yaml_parse.load(modified_train_str) dataset = train_dict['dataset'] dataset.load() dataset_provider = train_dict['dataset_provider'] assert 'in_sensors' in train_str assert 'in_rows' in train_str assert 'in_cols' in train_str train_str = train_str.replace('in_sensors', str(dataset.get_topological_view().shape[1])) train_str = train_str.replace('in_rows', str(dataset.get_topological_view().shape[2])) train_str = train_str.replace('in_cols', str(dataset.get_topological_view().shape[3])) train_dict = yaml_parse.load(train_str) layers = train_dict['layers'] final_layer = layers[-1] # turn off debug/info logging logging.getLogger("pylearn2").setLevel(logging.WARN) logging.getLogger("braindecode").setLevel(logging.WARN) exp = Experiment() exp.setup(final_layer, dataset_provider, **train_dict['exp_args']) exp.run() final_misclass = exp.monitor_chans['test_misclass'][-1] print("Result for") pprint(params) print("Final Test misclass: {:5.4f}".format(float(final_misclass))) return final_misclass