def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.iter_range.process_args(args) self.task_callable = get_object(args.task_callable, path=['.']) if args.crawler_instance is not None: self.crawler = get_object(args.crawler_instance, path=['.']) else: self.crawler = WESTPACrawler()
def process_args(self, args): if args.construct_dataset: self.dsspec = FnDSSpec(self.h5filename, get_object(args.construct_dataset,path=['.'])) elif args.dsspecs: self.dsspec = MultiDSSpec([SingleSegmentDSSpec.from_string(dsspec, self.h5filename) for dsspec in args.dsspecs]) else: # we can only get here if a default dataset name was specified assert self.default_dsname self.dsspec = SingleSegmentDSSpec(self.h5filename, self.default_dsname)
def get_mapper_func(self, plugin_config): try: methodname = plugin_config['mapper_func'] except KeyError: return False mapper_func = extloader.get_object(methodname) log.info('loaded adaptive voronoi mapper function {!r}'.format(mapper_func)) return mapper_func
def get_dfunc_method(self, plugin_config): try: methodname = plugin_config['dfunc_method'] except KeyError: raise ConfigItemMissing('dfunc_method') dfunc_method = extloader.get_object(methodname) log.info('loaded adaptive voronoi dfunc method {!r}'.format(dfunc_method)) return dfunc_method
def get_dfunc_method(self, plugin_config): try: methodname = plugin_config["dfunc_method"] except KeyError: raise ConfigItemMissing("dfunc_method") dfunc_method = extloader.get_object(methodname) log.info("loaded stringmethod dfunc method {!r}".format(dfunc_method)) return dfunc_method
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.iter_range.process_args(args) # Set the attributes according to arguments self.output_filename = args.output self.tm_filename = args.transition_matrix if args.postprocess_function: self.postprocess_function = get_object(args.postprocess_function, path=["."])
def get_dfunc_method(self, plugin_config): try: methodname = plugin_config['dfunc_method'] except KeyError: raise ConfigItemMissing('dfunc_method') dfunc_method = extloader.get_object(methodname) log.info('loaded stringmethod dfunc method {!r}'.format(dfunc_method)) return dfunc_method
def load_plugins(self): try: plugins_config = westpa.rc.config['west', 'plugins'] except KeyError: return for plugin_config in (plugins_config or []): plugin_name = plugin_config['plugin'] if plugin_config.get('enabled', True): log.info('loading plugin {!r}'.format(plugin_name)) plugin = extloader.get_object(plugin_name)(self, plugin_config) log.debug('loaded plugin {!r}'.format(plugin))
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.iter_range.process_args(args) predicate = get_object(args.predicate_function,path=['.']) if not callable(predicate): raise TypeError('predicate object {!r} is not callable'.format(predicate)) self.predicate = predicate self.invert = bool(args.invert) self.include_ancestors = bool(args.include_ancestors) self.output_filename = args.output
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.iter_range.process_args(args) predicate = get_object(args.predicate_function, path=['.']) if not callable(predicate): raise TypeError( 'predicate object {!r} is not callable'.format(predicate)) self.predicate = predicate self.invert = bool(args.invert) self.include_ancestors = bool(args.include_ancestors) self.output_filename = args.output
def get_avgpos_method(self, plugin_config): try: methodname = plugin_config['avgpos_method'] except KeyError: raise ConfigItemMissing('avgpos_method') if methodname.lower() == 'cartesian': avgpos_method = self.avgpos_cartesian else: avgpos_method = extloader.get_object(methodname) log.info('loaded stringmethod avgpos method {!r}'.format(avgpos_method)) return avgpos_method
def get_avgpos_method(self, plugin_config): try: methodname = plugin_config["avgpos_method"] except KeyError: raise ConfigItemMissing("avgpos_method") if methodname.lower() == "cartesian": avgpos_method = self.avgpos_cartesian else: avgpos_method = extloader.get_object(methodname) log.info("loaded stringmethod avgpos method {!r}".format(avgpos_method)) return avgpos_method
def get_string_method(self, plugin_config): try: methodname = plugin_config['string_method'] except KeyError: raise ConfigItemMissing('string_method') if methodname.lower() == 'default': str_method = DefaultStringMethod else: str_method = extloader.get_object(methodname) assert issubclass(str_method, WESTStringMethod) log.debug('loaded stringmethod string method {!r}'.format(str_method)) return str_method
def get_string_method(self, plugin_config): try: methodname = plugin_config["string_method"] except KeyError: raise ConfigItemMissing("string_method") if methodname.lower() == "default": str_method = DefaultStringMethod else: str_method = extloader.get_object(methodname) assert issubclass(str_method, WESTStringMethod) log.debug("loaded stringmethod string method {!r}".format(str_method)) return str_method
def mapper_from_function(funcspec): '''Return a mapper constructed by calling a function in a named module. ``funcspec`` should be formatted as ``[PATH]:MODULE.FUNC``. This function loads MODULE, optionally adding PATH to the search path, then returns MODULE.FUNC()''' if ':' in funcspec: (pathpart, funcpart) = funcspec.rsplit(':') pathinfo = ['.'] + pathpart.split(':') else: funcpart = funcspec pathinfo = ['.'] fn = get_object(funcpart,['.'] + pathinfo) mapper = fn() log.debug('loaded {!r} from {!r}'.format(mapper,fn)) return mapper
def process_args(self, args): self.plotscale = args.plotscale self.input_h5 = h5py.File(args.input, 'r') self.plot_output_filename = args.plot_output self.hdf5_output_filename = args.hdf5_output self.plot_contour = args.plot_contour if args.title: self.plottitle = args.title if args.range: self.plotrange = self.parse_range(args.range) if args.firstdim: self.dimensions.append(self.parse_dimspec(args.firstdim)) if not args.firstdim: self.dimensions.append({'idim': 0, 'label': 'dimension 0'}) if args.enerzero: lenerzero = args.enerzero.lower() if lenerzero not in ('min', 'max'): try: self.enerzero = float(args.enerzero) except ValueError: raise ValueError('invalid energy zero point {!r}'.format( args.enerzero)) else: self.enerzero = lenerzero else: self.enerzero = 'min' self.avail_iter_start, self.avail_iter_stop = h5io.get_iter_range( self.input_h5['histograms']) try: self.avail_iter_step = h5io.get_iter_step( self.input_h5['histograms']) except KeyError: self.avail_iter_step = 1 log.info( 'HDF5 file {!r} contains data for iterations {} -- {} with a step of {}' .format(args.input, self.avail_iter_start, self.avail_iter_stop, self.avail_iter_step)) if args.postprocess_function: self.postprocess_function = get_object(args.postprocess_function, path=['.'])
def process_args(self, args): self.plotscale = args.plotscale self.input_h5 = h5py.File(args.input, 'r') self.plot_output_filename = args.plot_output self.hdf5_output_filename = args.hdf5_output self.plot_contour = args.plot_contour if args.title: self.plottitle = args.title if args.range: self.plotrange = self.parse_range(args.range) if args.firstdim: self.dimensions.append(self.parse_dimspec(args.firstdim)) if not args.firstdim: self.dimensions.append({'idim': 0, 'label':'dimension 0'}) if args.enerzero: lenerzero = args.enerzero.lower() if lenerzero not in ('min', 'max'): try: self.enerzero = float(args.enerzero) except ValueError: raise ValueError('invalid energy zero point {!r}'.format(args.enerzero)) else: self.enerzero = lenerzero else: self.enerzero = 'min' self.avail_iter_start, self.avail_iter_stop = h5io.get_iter_range(self.input_h5['histograms']) try: self.avail_iter_step = h5io.get_iter_step(self.input_h5['histograms']) except KeyError: self.avail_iter_step = 1 log.info('HDF5 file {!r} contains data for iterations {} -- {} with a step of {}'.format(args.input, self.avail_iter_start, self.avail_iter_stop, self.avail_iter_step)) if args.postprocess_function: self.postprocess_function = get_object(args.postprocess_function,path=['.'])
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) # Necessary to open the file to get the current iteration # if we want to use the mapper in the file self.data_reader.open(mode='r+') self.n_iter = self.data_reader.current_iteration # If we decide to use this option for iteration selection: # getattr(args,'bins_from_h5file',None) or self.data_reader.current_iteration with self.data_reader: self.dssynth.h5filename = self.data_reader.we_h5filename self.dssynth.process_args(args) if args.config_from_file == False: self.binning.set_we_h5file_info(self.n_iter, self.data_reader) self.binning.process_args(args) self.output_filename = args.output if args.config_from_file: if not args.scheme: raise ValueError('A scheme must be specified.') else: self.load_config_from_west(args.scheme) elif args.states: self.parse_cmdline_states(args.states) elif args.states_from_file: self.load_state_file(args.states_from_file) elif args.states_from_function: self.load_states_from_function( get_object(args.states_from_function, path=['.'])) if self.states and len(self.states) < 2: raise ValueError('zero, two, or more macrostates are required') #self.output_file = WESTPAH5File(args.output, 'w', creating_program=True) log.debug('state list: {!r}'.format(self.states)) self.subsample = args.subsample if args.subsample is not None else False
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.dssynth.h5filename = self.data_reader.we_h5filename self.dssynth.process_args(args) self.binning.process_args(args) if args.states: self.parse_cmdline_states(args.states) elif args.states_from_file: self.load_state_file(args.states_from_file) elif args.states_from_function: self.load_states_from_function(get_object(args.states_from_function,path=['.'])) if self.states and len(self.states) < 2: raise ValueError('zero, two, or more macrostates are required') #self.output_file = WESTPAH5File(args.output, 'w', creating_program=True) self.output_filename = args.output log.debug('state list: {!r}'.format(self.states))
def process_args(self, args): self.progress.process_args(args) self.data_reader.process_args(args) with self.data_reader: self.dssynth.h5filename = self.data_reader.we_h5filename self.dssynth.process_args(args) self.binning.process_args(args) if args.states: self.parse_cmdline_states(args.states) elif args.states_from_file: self.load_state_file(args.states_from_file) elif args.states_from_function: self.load_states_from_function( get_object(args.states_from_function, path=['.'])) if self.states and len(self.states) < 2: raise ValueError('zero, two, or more macrostates are required') #self.output_file = WESTPAH5File(args.output, 'w', creating_program=True) self.output_filename = args.output log.debug('state list: {!r}'.format(self.states))
def __init__(self, rc=None): super(ExecutablePropagator,self).__init__(rc) # A mapping of environment variables to template strings which will be # added to the environment of all children launched. self.addtl_child_environ = dict() # A mapping of executable name ('propagator', 'pre_iteration', 'post_iteration') to # a dictionary of attributes like 'executable', 'stdout', 'stderr', 'environ', etc. self.exe_info = {} self.exe_info['propagator'] = {} self.exe_info['pre_iteration'] = {} self.exe_info['post_iteration'] = {} self.exe_info['get_pcoord'] = {} self.exe_info['gen_istate'] = {} # A mapping of data set name ('pcoord', 'coord', 'com', etc) to a dictionary of # attributes like 'loader', 'dtype', etc self.data_info = {} self.data_info['pcoord'] = {} # Validate configuration config = self.rc.config for key in [('west','executable','propagator','executable'), ('west','data','data_refs','segment'), ('west','data','data_refs','basis_state'), ('west','data','data_refs','initial_state')]: config.require(key) self.segment_ref_template = config['west','data','data_refs','segment'] self.basis_state_ref_template = config['west','data','data_refs','basis_state'] self.initial_state_ref_template = config['west','data','data_refs','initial_state'] # Load additional environment variables for all child processes self.addtl_child_environ.update({k:str(v) for k,v in (config['west','executable','environ'] or {}).iteritems()}) # Load configuration items relating to child processes for child_type in ('propagator', 'pre_iteration', 'post_iteration', 'get_pcoord', 'gen_istate'): child_info = config.get(['west','executable',child_type]) if not child_info: continue info_prefix = ['west', 'executable', child_type] # require executable to be specified if anything is specified at all config.require(info_prefix+['executable']) self.exe_info[child_type]['executable'] = child_info['executable'] self.exe_info[child_type]['stdin'] = child_info.get('stdin', os.devnull) self.exe_info[child_type]['stdout'] = child_info.get('stdout', None) self.exe_info[child_type]['stderr'] = child_info.get('stderr', None) self.exe_info[child_type]['cwd'] = child_info.get('cwd', None) if child_type not in ('propagator', 'get_pcoord', 'gen_istate'): self.exe_info[child_type]['enabled'] = child_info.get('enabled',True) else: # for consistency, propagator, get_pcoord, and gen_istate can never be disabled self.exe_info[child_type]['enabled'] = True # apply environment modifications specific to this executable self.exe_info[child_type]['environ'] = {k:str(v) for k,v in (child_info.get('environ') or {}).iteritems()} log.debug('exe_info: {!r}'.format(self.exe_info)) # Load configuration items relating to dataset input self.data_info['pcoord'] = {'name': 'pcoord', 'loader': pcoord_loader, 'enabled': True, 'filename': None} dataset_configs = config.get(['west', 'executable', 'datasets']) or [] for dsinfo in dataset_configs: try: dsname = dsinfo['name'] except KeyError: raise ValueError('dataset specifications require a ``name`` field') if dsname != 'pcoord': check_bool(dsinfo.setdefault('enabled', True)) else: # can never disable pcoord collection dsinfo['enabled'] = True loader_directive = dsinfo.get('loader') if loader_directive: loader = get_object(loader_directive) elif dsname != 'pcoord': loader = aux_data_loader dsinfo['loader'] = loader self.data_info.setdefault(dsname,{}).update(dsinfo) log.debug('data_info: {!r}'.format(self.data_info))
def __init__(self, sim_manager, plugin_config): super(StringDriver, self).__init__() if not sim_manager.work_manager.is_master: return self.work_manager = sim_manager.work_manager self.sim_manager = sim_manager self.data_manager = sim_manager.data_manager self.system = sim_manager.system # Parameters from config file self.windowsize = plugin_config.get("windowsize", 10) self.tensor_windowsize = plugin_config.get("tensor_windowsize", self.windowsize) self.update_interval = plugin_config.get("update_interval", 10) self.initial_update = plugin_config.get("initial_update", 20) self.priority = plugin_config.get("priority", 0) self.write_avg_pos = check_bool(plugin_config.get("write_avgpos", True)) self.do_update = check_bool(plugin_config.get("do_update", True)) self.init_from_data = check_bool(plugin_config.get("init_from_data", True)) self.update_metric_tensor = check_bool(plugin_config.get("do_tensor_update", False)) # Try to load a supplied function to calculate metric tensor, if provided # Otherwise, set 'tensor_func' to None, and take care of it later # NOTE: if no tensor_func provided, will automatically set the metric tensor to None (eg. use default dfunc) try: methodname = plugin_config["tensor_function"] self.tensor_func = extloader.get_object(methodname) except: self.tensor_func = None self.dfunc = self.get_dfunc_method(plugin_config) # Load method to calculate average position in a bin # If the method is defined in an external module, correctly bind it ap = self.get_avgpos_method(plugin_config) if hasattr(ap, "im_class"): self.get_avgpos = ap else: self.get_avgpos = types.MethodType(ap, self) # Get initial set of string centers centers = self.get_initial_centers() ndim = centers.shape[1] # Grab inverse metric tensor from h5 file or system, if provided - otherwise set to None self.inv_tensor = self.get_initial_tensor() try: sm_params = self.system.sm_params except AttributeError as e: log.error( "String Driver Error: system does not define sm_params." "This is required and should be added to the system definition; {}".format(e) ) raise # Initialize the string str_method = self.get_string_method(plugin_config) try: self.strings = str_method(centers, **sm_params) except (TypeError, AssertionError) as e: log.error("String Driver Error: Failed during initialization of string method: {}".format(e)) raise # Update the BinMapper self.update_bin_mapper() # Register callback sim_manager.register_callback(sim_manager.prepare_new_iteration, self.prepare_new_iteration, self.priority) westpa.rc.pstatus("-westext.stringmethod -----------------\n") westpa.rc.pstatus("windowsize: {}\n".format(self.windowsize)) westpa.rc.pstatus("update interval: {}\n".format(self.update_interval)) westpa.rc.pstatus("initial update: {}\n".format(self.initial_update)) westpa.rc.pstatus("priority: {}\n".format(self.priority)) westpa.rc.pstatus("write average positions: {}\n".format(self.write_avg_pos)) westpa.rc.pstatus("do update: {}\n".format(self.do_update)) westpa.rc.pstatus("initialize from WE data: {}\n".format(self.init_from_data)) westpa.rc.pstatus("----------------------------------------\n") westpa.rc.pflush()