def grid_search(self, settings_file, filename): """ Grid search for parameters/hyperparameters using network. Args: settings_file (str): File with GridSearch setting ranges. filename (str): Output .csv file with entry data and resulting testing losses. """ settings_set = SettingsParser('GridSearch').read(settings_file) keys = list(settings_set.keys()) values = list(settings_set.values()) range_keys = [ keys[j] for j, range_ in enumerate(values) if len(range_) > 1 ] print('\nGrid Search:', ','.join(range_keys)) setting_combinations = list(product(*values)) run_entries = [] for i, setting_combination in enumerate(setting_combinations): print('\n\nRun {}:'.format(i + 1), end=' ') run_settings = {k: v for k, v in zip(keys, setting_combination)} entry = [str(val) for val in setting_combinations] for key in range_keys: print('{} = {}'.format(key, run_settings[key]), end=' ') self.load_data(run_settings['inputs_name'], run_settings['partitions_file'], run_settings) run_testing_loss = self.train_network(run_settings, tag='_' + str(i + 1)) entry.append('{0:.4f}'.format(run_testing_loss)) run_entries.append(entry) with open(filename, 'w') as search_record: lines = [','.join(entry) + ';\n' for entry in run_entries] lines.insert(0, ','.join(keys) + ';\n') search_record.writelines(lines)
if __name__ == '__main__': description = 'Create collated molecules file.' argparser = argparse.ArgumentParser(description=description) argparser.add_argument('--settings_file', '-s', default='settings.cfg', help='Filename of settings.') argparser.add_argument('--verbosity', '-v', default=0, action='count') argparser.add_argument('--export', '-E', action='store_true', help='Export entries to csv.') args = argparser.parse_args() settings = SettingsParser('Collate').read(args.settings_file) settings['verbosity'] = args.verbosity input_name = settings['inputs_name'] output_name = settings['outputs_name'] energies_file = settings['energies_file'] sys_elements = settings['sys_elements'] assert sys_elements != ['None'] collator = BatchCollator(output_name, settings, sys_elements) if os.path.isfile(input_name): collator.parse_molecules(input_name, energies_file) else: collator.parse_loose_molecules(input_name, energies_file) if args.export: collator.export_entries(input_name.split('.')[0] + '.csv')
raise ValueError('invalid keyword!') return np.asarray(pairs), np.asarray(triplets) if __name__ == '__main__': description = 'Create fingerprints from collated molecules file.' argparser = argparse.ArgumentParser(description=description) argparser.add_argument('--settings_file', '-s', default='settings.cfg', help='Filename of settings.') argparser.add_argument('--verbosity', '-v', default=0, action='count') argparser.add_argument('--export', '-E', action='store_true', help='Export entries to csv.') args = argparser.parse_args() settings = SettingsParser('Fingerprint').read(args.settings_file) settings['verbosity'] = args.verbosity input_name = settings['inputs_name'] output_name = settings['outputs_name'] parameters_file = settings['parameters_file'] sys_elements = settings['sys_elements'] assert sys_elements != ['None'] processor = FingerprintProcessor(output_name, parameters_file, settings) processor.process_collated(input_name) if args.export: processor.export_entries(input_name.split('.')[0] + '.csv')
argparser.add_argument('--export', '-e', action='store_true', help='Export entries to csv.') argparser.add_argument('--GridSearch', '-g', action='store_true', help='Initialized grid search with network.') # argparser.add_argument('--plot', '-p', action='store_true', # help='Plot and Save figures.') argparser.add_argument('--force', '-f', action='store_true', help='Force overwrite/merge. (prompt otherwise)') args = argparser.parse_args() return args if __name__ == '__main__': args = initialize_argparser() action = args.action settings = SettingsParser(action).read(args.settings_file) settings['verbosity'] = args.verbosity inputs_name = settings['inputs_name'] outputs_name = settings['outputs_name'] force = args.force if os.path.isfile(outputs_name) and not force: while True: reply = str(input('File "{}"exists. \ Merge/overwrite? (y/n) '.format(outputs_name)).lower().strip()) try: if reply[0] == 'y': break elif reply[0] == 'n': ind = 1 outputs_path = os.path.splitext(outputs_name)
if __name__ == '__main__': description = 'Train an artificial neural network.' argparser = argparse.ArgumentParser(description=description) argparser.add_argument('--settings_file', '-s', default='settings.cfg', help='Filename of settings.') argparser.add_argument('--verbosity', '-v', default=0, action='count') argparser.add_argument('--grid', '-g', action='store_true', help='Begin grid search.') args = argparser.parse_args() settings = SettingsParser('Network').read(args.settings_file) settings['verbosity'] = args.verbosity input_name = settings['inputs_name'] partitions_file = settings['partitions_file'] output_name = settings['outputs_name'] network = Network(settings) if args.grid: network.grid_search(args.settings_file, output_name) else: network.load_data(input_name, partitions_file, settings) final_loss = network.train_network(settings) print('Final loss:', final_loss) print(model.save_weights(self.final_weights_name.format(tag)))