def apply_postprocessing(data, instance=None, results=None): """ Apply post-processing steps. Required: instance: Problem instance. results: Optimization results object. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Applying Pyomo postprocessing actions\n' % (time.time() - start_time)) sys.stdout.flush() # options are of type ConfigValue, not raw strings / atomics. for config_value in data.options.postprocess: postprocess = pyutilib.misc.import_file(config_value, clear_cache=True) if "pyomo_postprocess" in dir(postprocess): postprocess.pyomo_postprocess(data.options, instance, results) for ep in ExtensionPoint(IPyomoScriptPostprocess): ep.apply(options=data.options, instance=instance, results=results) if data.options.runtime.profile_memory >= 1 and pympler_available: mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes upon termination" % mem_used)
def test_plugin_interface(self): class IFoo(Interface): pass class myFoo(Plugin): implements(IFoo) ep = ExtensionPoint(IFoo) self.assertEqual(ep.extensions(), []) self.assertEqual(IFoo._plugins, {myFoo: {}}) self.assertEqual(len(ep), 0) a = myFoo() self.assertEqual(ep.extensions(), []) self.assertEqual(IFoo._plugins, { myFoo: { 0: (weakref.ref(a), False) }, }) self.assertEqual(len(ep), 0) a.activate() self.assertEqual(ep.extensions(), [a]) self.assertEqual(IFoo._plugins, { myFoo: { 0: (weakref.ref(a), True) }, }) self.assertEqual(len(ep), 1) a.deactivate() self.assertEqual(ep.extensions(), []) self.assertEqual(IFoo._plugins, { myFoo: { 0: (weakref.ref(a), False) }, }) self.assertEqual(len(ep), 0) # Free a and make sure the garbage collector collects it (so # that the weakref will be removed from IFoo._plugins) a = None gc.collect() gc.collect() gc.collect() self.assertEqual(ep.extensions(), []) self.assertEqual(IFoo._plugins, {myFoo: {}}) self.assertEqual(len(ep), 0)
def load_extensions(names, ep_type): import pyomo.environ plugins = ExtensionPoint(ep_type) active_plugins = [] for this_extension in names: module, _ = load_external_module(this_extension) assert module is not None for name, obj in inspect.getmembers(module, inspect.isclass): # the second condition gets around goofyness related # to issubclass returning True when the obj is the # same as the test class. if issubclass(obj, SingletonPlugin) and \ (name != "SingletonPlugin"): for plugin in plugins(all=True): if isinstance(plugin, obj): active_plugins.append(plugin) return tuple(active_plugins)
def process_results(data, instance=None, results=None, opt=None): """ Process optimization results. Required: instance: Problem instance. results: Optimization results object. opt: Optimizer object. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Processing results\n' % (time.time() - start_time)) sys.stdout.flush() # if data.options.postsolve.print_logfile: print("") print("==========================================================") print("Solver Logfile: " + str(opt._log_file)) print("==========================================================") print("") with open(opt._log_file, "r") as INPUT: for line in INPUT: sys.stdout.write(line) print("==========================================================") print("Solver Logfile - END") print("==========================================================") # try: # transform the results object into human-readable names. instance.solutions.store_to(results) except Exception: print("Problem updating solver results") raise # if not data.options.postsolve.show_results: if data.options.postsolve.save_results: results_file = data.options.postsolve.save_results elif data.options.postsolve.results_format == 'yaml': results_file = 'results.yml' else: results_file = 'results.json' results.write(filename=results_file, format=data.options.postsolve.results_format) if not data.options.runtime.logging == 'quiet': print(" Number of solutions: " + str(len(results.solution))) if len(results.solution) > 0: print(" Solution Information") print(" Gap: " + str(results.solution[0].gap)) print(" Status: " + str(results.solution[0].status)) if len(results.solution[0].objective) == 1: key = list(results.solution[0].objective.keys())[0] print(" Function Value: " + str(results.solution[0].objective[key]['Value'])) print(" Solver results file: " + results_file) # #ep = ExtensionPoint(IPyomoScriptPrintResults) if data.options.postsolve.show_results: print("") results.write(num=1, format=data.options.postsolve.results_format) print("") # if data.options.postsolve.summary: print("") print("==========================================================") print("Solution Summary") print("==========================================================") if len(results.solution(0).variable) > 0: print("") display(instance) print("") else: print("No solutions reported by solver.") # for ep in ExtensionPoint(IPyomoScriptPrintResults): ep.apply(options=data.options, instance=instance, results=results) # for ep in ExtensionPoint(IPyomoScriptSaveResults): ep.apply(options=data.options, instance=instance, results=results) # if data.options.runtime.profile_memory >= 1 and pympler_available: global memory_data mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes following results processing" % mem_used)
def create_model(data): """ Create instance of Pyomo model. Return: model: Model object. instance: Problem instance. symbol_map: Symbol map created when writing model to a file. filename: Filename that a model instance was written to. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Creating model\n' % (time.time() - start_time)) sys.stdout.flush() # if data.options.runtime.profile_memory >= 1 and pympler_available: global memory_data mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) data.local.max_memory = mem_used print(" Total memory = %d bytes prior to model construction" % mem_used) # # Find the Model objects # _models = {} _model_IDS = set() for _name, _obj in iteritems(data.local.usermodel.__dict__): if isinstance(_obj, Model) and id(_obj) not in _model_IDS: _models[_name] = _obj _model_IDS.add(id(_obj)) model_name = data.options.model.object_name if len(_models) == 1: _name = list(_models.keys())[0] if model_name is None: model_name = _name elif model_name != _name: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(_models) > 1: if model_name is None: msg = "Multiple models defined in file '%s'!" raise SystemExit(msg % data.options.model.filename) elif not model_name in _models: msg = "Unknown model '%s' in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) ep = ExtensionPoint(IPyomoScriptCreateModel) if model_name is None: if len(ep) == 0: msg = "A model is not defined and the 'pyomo_create_model' is not "\ "provided in module %s" raise SystemExit(msg % data.options.model.filename) elif len(ep) > 1: msg = 'Multiple model construction plugins have been registered in module %s!' raise SystemExit(msg % data.options.model.filename) else: model_options = data.options.model.options.value() tick = time.time() model = ep.service().apply( options=pyutilib.misc.Container(*data.options), model_options=pyutilib.misc.Container(*model_options)) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) data.local.time_initial_import = None tick = time.time() else: if model_name not in _models: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) model = _models[model_name] if model is None: msg = "'%s' object is 'None' in module %s" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(ep) > 0: msg = "Model construction function 'create_model' defined in " \ "file '%s', but model is already constructed!" raise SystemExit(msg % data.options.model.filename) # # Print model # for ep in ExtensionPoint(IPyomoScriptPrintModel): ep.apply(options=data.options, model=model) # # Create Problem Instance # ep = ExtensionPoint(IPyomoScriptCreateDataPortal) if len(ep) > 1: msg = 'Multiple model data construction plugins have been registered!' raise SystemExit(msg) if len(ep) == 1: modeldata = ep.service().apply(options=data.options, model=model) else: modeldata = DataPortal() if model._constructed: # # TODO: use a better test for ConcreteModel # instance = model if data.options.runtime.report_timing is True and not data.local.time_initial_import is None: print(" %6.2f seconds required to construct instance" % (data.local.time_initial_import)) else: tick = time.time() if len(data.options.data.files) > 1: # # Load a list of *.dat files # for file in data.options.data.files: suffix = (file).split(".")[-1] if suffix != "dat": msg = 'When specifiying multiple data files, they must all ' \ 'be *.dat files. File specified: %s' raise SystemExit(msg % str(file)) modeldata.load(filename=file, model=model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif len(data.options.data.files) == 1: # # Load a *.dat file or process a *.py data file # suffix = (data.options.data.files[0]).split(".")[-1].lower() if suffix == "dat": instance = model.create_instance( data.options.data.files[0], namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "py": userdata = pyutilib.misc.import_file( data.options.data.files[0], clear_cache=True) if "modeldata" in dir(userdata): if len(ep) == 1: msg = "Cannot apply 'pyomo_create_modeldata' and use the" \ " 'modeldata' object that is provided in the model" raise SystemExit(msg) if userdata.modeldata is None: msg = "'modeldata' object is 'None' in module %s" raise SystemExit(msg % str(data.options.data.files[0])) modeldata = userdata.modeldata else: if len(ep) == 0: msg = "Neither 'modeldata' nor 'pyomo_create_dataportal' " \ 'is defined in module %s' raise SystemExit(msg % str(data.options.data.files[0])) modeldata.read(model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "yml" or suffix == 'yaml': modeldata = yaml.load(open(data.options.data.files[0]), **yaml_load_args) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) else: raise ValueError("Unknown data file type: " + data.options.data.files[0]) else: instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) # modify_start_time = time.time() for ep in ExtensionPoint(IPyomoScriptModifyInstance): if data.options.runtime.report_timing is True: tick = time.time() ep.apply(options=data.options, model=model, instance=instance) if data.options.runtime.report_timing is True: print(" %6.2f seconds to apply %s" % (time.time() - tick, type(ep))) tick = time.time() # for transformation in data.options.transform: with TransformationFactory(transformation) as xfrm: instance = xfrm.create_using(instance) if instance is None: raise SystemExit("Unexpected error while applying " "transformation '%s'" % transformation) # if data.options.runtime.report_timing is True: total_time = time.time() - modify_start_time print(" %6.2f seconds required for problem transformations" % total_time) if logger.isEnabledFor(logging.DEBUG): print("MODEL INSTANCE") instance.pprint() print("") for ep in ExtensionPoint(IPyomoScriptPrintInstance): ep.apply(options=data.options, instance=instance) fname = None smap_id = None if not data.options.model.save_file is None: if data.options.runtime.report_timing is True: write_start_time = time.time() if data.options.model.save_file == True: if data.local.model_format in (ProblemFormat.cpxlp, ProblemFormat.lpxlp): fname = (data.options.data.files[0])[:-3] + 'lp' else: fname = (data.options.data.files[0])[:-3] + str( data.local.model_format) format = data.local.model_format else: fname = data.options.model.save_file format = data.options.model.save_format io_options = {} if data.options.model.symbolic_solver_labels: io_options['symbolic_solver_labels'] = True if data.options.model.file_determinism != 1: io_options[ 'file_determinism'] = data.options.model.file_determinism (fname, smap_id) = instance.write(filename=fname, format=format, io_options=io_options) if not data.options.runtime.logging == 'quiet': if not os.path.exists(fname): print("ERROR: file " + fname + " has not been created!") else: print("Model written to file '" + str(fname) + "'") if data.options.runtime.report_timing is True: total_time = time.time() - write_start_time print(" %6.2f seconds required to write file" % total_time) if data.options.runtime.profile_memory >= 2 and pympler_available: print("") print(" Summary of objects following file output") post_file_output_summary = pympler.summary.summarize( pympler.muppy.get_objects()) pympler.summary.print_(post_file_output_summary, limit=100) print("") for ep in ExtensionPoint(IPyomoScriptSaveInstance): ep.apply(options=data.options, instance=instance) if data.options.runtime.profile_memory >= 1 and pympler_available: mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes following Pyomo instance creation" % mem_used) return pyutilib.misc.Options(model=model, instance=instance, smap_id=smap_id, filename=fname, local=data.local)
def apply_preprocessing(data, parser=None): """ Execute preprocessing files Required: parser: Command line parser object Returned: error: This is true if an error has occurred. """ data.local = pyutilib.misc.Options() # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Applying Pyomo preprocessing actions\n' % (time.time() - start_time)) sys.stdout.flush() # global filter_excepthook # # # Setup solver and model # # if len(data.options.model.filename) == 0: parser.print_help() data.error = True return data # if not data.options.preprocess is None: for config_value in data.options.preprocess: preprocess = pyutilib.misc.import_file(config_value, clear_cache=True) # for ep in ExtensionPoint(IPyomoScriptPreprocess): ep.apply(options=data.options) # # Verify that files exist # for file in [data.options.model.filename ] + data.options.data.files.value(): if not os.path.exists(file): raise IOError("File " + file + " does not exist!") # filter_excepthook = True tick = time.time() data.local.usermodel = pyutilib.misc.import_file( data.options.model.filename, clear_cache=True) data.local.time_initial_import = time.time() - tick filter_excepthook = False usermodel_dir = dir(data.local.usermodel) data.local._usermodel_plugins = [] for key in modelapi: if key in usermodel_dir: class TMP(Plugin): implements(modelapi[key], service=True) def __init__(self): self.fn = getattr(data.local.usermodel, key) def apply(self, **kwds): return self.fn(**kwds) tmp = TMP() data.local._usermodel_plugins.append(tmp) if 'pyomo_preprocess' in usermodel_dir: if data.options.model.object_name in usermodel_dir: msg = "Preprocessing function 'pyomo_preprocess' defined in file" \ " '%s', but model is already constructed!" raise SystemExit(msg % data.options.model.filename) getattr(data.local.usermodel, 'pyomo_preprocess')(options=data.options) # return data
def cleanup(): for key in modelapi: for ep in ExtensionPoint(modelapi[key]): ep.deactivate()
def create(self, args): if self._swap: args = list(args) args.reverse() return self._cls(args) def ExpressionFactory(name=None, args=[]): ep = ExpressionFactory.ep if name is None: return map(lambda x: x.name, ep()) return ep.service(name).create(args) ExpressionFactory.ep = ExtensionPoint(IPyomoExpression) class IModelComponent(Interface): pass ModelComponentFactory = CreatePluginFactory(IModelComponent) def register_component(cls, description): class TMP(Plugin): implements(IModelComponent, service=False) alias(cls.__name__, description) component = cls
def convert_problem(args, target_problem_type, valid_problem_types, has_capability=lambda x: False, **kwds): """ Convert a problem, defined by the 'args' tuple, into another problem. """ if len(valid_problem_types) == 0: raise ConverterError("No valid problem types") if not (target_problem_type is None or \ target_problem_type in valid_problem_types): msg = "Problem type '%s' is not valid" raise ConverterError(msg % str( target_problem_type )) if len(args) == 0: raise ConverterError("Empty argument list") # # Setup list of source problem types # tmp = args[0] if isinstance(tmp,basestring): fname = tmp.split(os.sep)[-1] if os.sep in fname: #pragma:nocover fname = tmp.split(os.sep)[-1] source_ptype = [guess_format(fname)] if source_ptype is [None]: raise ConverterError("Unknown suffix type: "+tmp) else: source_ptype = args[0].valid_problem_types() # # Setup list of valid problem types # valid_ptypes = copy.copy(valid_problem_types) if target_problem_type is not None: valid_ptypes.remove(target_problem_type) valid_ptypes = [target_problem_type] + valid_ptypes if source_ptype[0] in valid_ptypes: valid_ptypes.remove(source_ptype[0]) valid_ptypes = [source_ptype[0]] + valid_ptypes # # Iterate over the valid problem types, starting with the target type # # Apply conversion and return for first match # for ptype in valid_ptypes: for s_ptype in source_ptype: # # If the source and target types are equal, then simply the return # the args (return just the first element of the tuple if it has length # one. # if s_ptype == ptype: return (args,ptype,None) # # Otherwise, try to convert # for converter in ExtensionPoint(IProblemConverter): if converter.can_convert(s_ptype, ptype): tmp = [s_ptype,ptype] + list(args) tmp = tuple(tmp) # propagate input keywords to the converter tmpkw = kwds tmpkw['capabilities'] = has_capability problem_files, symbol_map = converter.apply(*tmp, **tmpkw) return problem_files, ptype, symbol_map msg = 'No conversion possible. Source problem type: %s. Valid target ' \ 'types: %s' raise ConverterError(msg % (str(source_ptype[0]), list(map(str, valid_ptypes))))
def test_plugin_factory(self): class IFoo(Interface): pass ep = ExtensionPoint(IFoo) class myFoo(Plugin): implements(IFoo) alias('my_foo', 'myFoo docs') factory = PluginFactory(IFoo) self.assertEqual(factory.services(), ['my_foo']) self.assertIsInstance(factory('my_foo'), myFoo) self.assertIsNone(factory('unknown'), None) self.assertEqual(factory.doc('my_foo'), 'myFoo docs') self.assertEqual(factory.doc('unknown'), '') self.assertIs(factory.get_class('my_foo'), myFoo) self.assertIsNone(factory.get_class('unknown')) a = myFoo() b = myFoo() self.assertFalse(a.enabled()) self.assertFalse(b.enabled()) self.assertIsNone(ep.service()) a.activate() self.assertTrue(a.enabled()) self.assertFalse(b.enabled()) self.assertIs(ep.service(), a) factory.deactivate('my_foo') self.assertFalse(a.enabled()) self.assertFalse(b.enabled()) self.assertIsNone(ep.service()) b.activate() self.assertFalse(a.enabled()) self.assertTrue(b.enabled()) self.assertIs(ep.service(), b) # Note: Run the GC to ensure the instance created by # factory('my_foo') above has been removed. gc.collect() gc.collect() gc.collect() factory.activate('my_foo') self.assertTrue(a.enabled()) self.assertTrue(b.enabled()) with self.assertRaisesRegex( PluginError, r"The ExtensionPoint does not have a unique service! " r"2 services are defined for interface 'IFoo' \(key=None\)."): self.assertIsNone(ep.service()) a.deactivate() self.assertFalse(a.enabled()) self.assertTrue(b.enabled()) self.assertIs(ep.service(), b) factory.activate('unknown') self.assertFalse(a.enabled()) self.assertTrue(b.enabled()) self.assertIs(ep.service(), b) factory.deactivate('unknown') self.assertFalse(a.enabled()) self.assertTrue(b.enabled()) self.assertIs(ep.service(), b)
def construct_scenario_instance(self, scenario_name, scenario_tree, profile_memory=False, output_instance_construction_time=False, compile_instance=False, verbose=False): assert not self._closed if not scenario_tree.contains_scenario(scenario_name): raise ValueError("ScenarioTree does not contain scenario " "with name %s." % (scenario_name)) scenario = scenario_tree.get_scenario(scenario_name) node_name_list = [n._name for n in scenario._node_list] if verbose: print("Creating instance for scenario=%s" % (scenario_name)) scenario_instance = None try: if self._model_callback is not None: assert self._model_object is None try: _scenario_tree_arg = None # new callback signature if (self._scenario_tree_filename is not None) and \ self._scenario_tree_filename.endswith('.dat'): # we started with a .dat file, so # send the PySP scenario tree _scenario_tree_arg = scenario_tree elif self._scenario_tree_model is not None: # We started from a Pyomo # scenario tree model instance, or a # networkx tree. _scenario_tree_arg = self._scenario_tree_model else: # send the PySP scenario tree _scenario_tree_arg = scenario_tree scenario_instance = self._model_callback(_scenario_tree_arg, scenario_name, node_name_list) except TypeError: # old callback signature # TODO: #logger.warning( # "DEPRECATED: The 'pysp_instance_creation_callback' function " # "signature has changed. An additional argument should be " # "added to the beginning of the arguments list that will be " # "set to the user provided scenario tree object when called " # "by PySP (e.g., a Pyomo scenario tree model instance, " # "a networkx tree, or a PySP ScenarioTree object.") scenario_instance = self._model_callback(scenario_name, node_name_list) elif self._model_object is not None: if (not isinstance(self._model_object, AbstractModel)) or \ (self._model_object.is_constructed()): scenario_instance = self._model_object.clone() elif scenario_tree._scenario_based_data: assert self.data_directory() is not None scenario_data_filename = \ os.path.join(self.data_directory(), str(scenario_name)) # JPW: The following is a hack to support # initialization of block instances, which # don't work with .dat files at the # moment. Actually, it's not that bad of a # hack - it just needs to be extended a bit, # and expanded into the node-based data read # logic (where yaml is completely ignored at # the moment. if os.path.exists(scenario_data_filename+'.dat'): scenario_data_filename = \ scenario_data_filename + ".dat" data = None elif os.path.exists(scenario_data_filename+'.yaml'): if not has_yaml: raise ValueError( "Found yaml data file for scenario '%s' " "but he PyYAML module is not available" % (scenario_name)) scenario_data_filename = \ scenario_data_filename+".yaml" with open(scenario_data_filename) as f: data = yaml.load(f) else: raise RuntimeError( "Cannot find a data file for scenario '%s' " "in directory: %s\nRecognized formats: .dat, " ".yaml" % (scenario_name, self.data_directory())) if verbose: print("Data for scenario=%s loads from file=%s" % (scenario_name, scenario_data_filename)) if data is None: scenario_instance = \ self._model_object.create_instance( filename=scenario_data_filename, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: scenario_instance = \ self._model_object.create_instance( data, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: assert self.data_directory() is not None data_files = [] for node_name in node_name_list: node_data_filename = \ os.path.join(self.data_directory(), str(node_name)+".dat") if not os.path.exists(node_data_filename): raise RuntimeError( "Cannot find a data file for scenario tree " "node '%s' in directory: %s\nRecognized " "formats: .dat" % (node_name, self.data_directory())) data_files.append(node_data_filename) scenario_data = DataPortal(model=self._model_object) for data_file in data_files: if verbose: print("Node data for scenario=%s partially " "loading from file=%s" % (scenario_name, data_file)) scenario_data.load(filename=data_file) scenario_instance = self._model_object.create_instance( scenario_data, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: raise RuntimeError("Unable to construct scenario instance. " "Neither a reference model or callback " "is defined.") # name each instance with the scenario name scenario_instance._name = scenario_name # apply each of the post-instance creation plugins. this # really shouldn't be associated (in terms of naming) with the # pyomo script - this should be rectified with a workflow # re-work. it is unclear how this interacts, or doesn't, with # the preprocessors. ep = ExtensionPoint(IPyomoScriptModifyInstance) for ep in ExtensionPoint(IPyomoScriptModifyInstance): logger.warning( "DEPRECATED: IPyomoScriptModifyInstance extension " "point callbacks will be ignored by PySP in the future") ep.apply(options=None, model=reference_model, instance=scenario_instance) if compile_instance: from pyomo.repn.beta.matrix import \ compile_block_linear_constraints compile_block_linear_constraints( scenario_instance, "_PySP_compiled_linear_constraints", verbose=verbose) except: logger.error("Failed to create model instance for scenario=%s" % (scenario_name)) raise return scenario_instance
class OptProblem(object): """ A class that defines an application that can be optimized by a COLIN optimizer via system calls. """ io_manager = ExtensionPoint(IBlackBoxOptProblemIO) def __init__(self): """ The constructor. Derived classes should define the response types. By default, only function evaluations are supported in an OptProblem instance. """ self.response_types = [response_enum.FunctionValue] def main(self, argv, format='colin'): """ The main routine for parsing the command-line and executing the evaluation. """ if len(argv) < 3: #pragma:nocover print(argv[0] + " <input> <output> <log>") sys.exit(1) # # Get enum strings # self.response_str = list(map(str, self.response_types)) # # Parse XML input file # iomngr = OptProblem.io_manager.service(format) if iomngr is None: raise ValueError("Unknown IO format '%s' for COLIN OptProblem" % str(format)) if not os.path.exists(argv[1]): raise IOError("Unknown input file '%s'" % argv[1]) self._compute_prefix(argv[1]) point = self.create_point() point, requests = iomngr.read(argv[1], point) self.validate(point) response = self._compute_results(point, requests) iomngr.write(argv[2], response) def create_point(self): """ Create the point type for this domain. This method is over-written to customized an OptProblem for the search domain. """ return None #pragma:nocover def function_value(self, point): """ Compute a function value. """ return None #pragma:nocover def function_values(self, point): #pragma:nocover """ Compute a list of function values. """ val = self.function_value(point) if val is None: return [] else: return [val] def gradient(self, point): """ Compute a function gradient. """ return [] #pragma:nocover def hessian(self, point): """ Compute a function Hessian matrix. """ return {} #pragma:nocover def nonlinear_constraint_values(self, point): """ Compute nonlinear constraint values. """ return [] #pragma:nocover def jacobian(self, point): """ Compute the Jacobian. """ return {} #pragma:nocover def _compute_results(self, point, requests): """ Compute the requested results. """ response = {} for key in requests: if key not in self.response_str: response[ key] = "ERROR: Unsupported application request %s" % str( key) # elif key == "FunctionValue": response[key] = self.function_value(point) elif key == "FunctionValues": response[key] = self.function_values(point) elif key == "Gradient": response[key] = self.gradient(point) elif key == "NonlinearConstraintValues": response[key] = self.nonlinear_constraint_values(point) elif key == "Jacobian": response[key] = self.jacobian(point) elif key == "Hessian": response[key] = self.hessian(point) # return response def _compute_prefix(self, filename): base, ext = os.path.splitext(filename) self.prefix = base def validate(self, point): #pragma:nocover """ This function should throw an exception if an error occurs """ pass
class Model(SimpleBlock): """ An optimization model. By default, this defers construction of components until data is loaded. """ preprocessor_ep = ExtensionPoint(IPyomoPresolver) _Block_reserved_words = set() def __new__(cls, *args, **kwds): if cls != Model: return super(Model, cls).__new__(cls) logger.warning( """DEPRECATION WARNING: Using the 'Model' class is deprecated. Please use the AbstractModel or ConcreteModel class instead.""") return AbstractModel.__new__(AbstractModel) def __init__(self, name='unknown', **kwargs): """Constructor""" # # NOTE: The 'ctype' keyword argument is not defined here. Thus, # a model is treated as a 'Block' class type. This simplifies # the definition of the block_data_objects() method, since we treat # Model and Block objects as the same. Similarly, this avoids # the requirement to import PyomoModel.py in the block.py file. # SimpleBlock.__init__(self, **kwargs) self._name = name self.statistics = Container() self.config = PyomoConfig() self.solutions = ModelSolutions(self) self.config.preprocessor = 'pyomo.model.simple_preprocessor' def compute_statistics(self, active=True): """ Compute model statistics """ if len(self.statistics) > 0: return self.statistics.number_of_variables = 0 self.statistics.number_of_constraints = 0 self.statistics.number_of_objectives = 0 for block in self.block_data_objects(active=active): for data in self.component_map(Var, active=active).itervalues(): self.statistics.number_of_variables += len(data) for data in self.component_map(Objective, active=active).itervalues(): self.statistics.number_of_objectives += len(data) for data in self.component_map(Constraint, active=active).itervalues(): self.statistics.number_of_constraints += len(data) def nvariables(self): self.compute_statistics() return self.statistics.number_of_variables def nconstraints(self): self.compute_statistics() return self.statistics.number_of_constraints def nobjectives(self): self.compute_statistics() return self.statistics.number_of_objectives def create_instance(self, filename=None, data=None, name=None, namespace=None, namespaces=None, profile_memory=0, report_timing=False, **kwds): """ Create a concrete instance of an abstract model, possibly using data read in from a file. Parameters ---------- filename: `str`, optional The name of a Pyomo Data File that will be used to load data into the model. data: `dict`, optional A dictionary containing initialization data for the model to be used if there is no filename name: `str`, optional The name given to the model. namespace: `str`, optional A namespace used to select data. namespaces: `list`, optional A list of namespaces used to select data. profile_memory: `int`, optional A number that indicates the profiling level. report_timing: `bool`, optional Report timing statistics during construction. """ # # Generate a warning if this is a concrete model but the # filename is specified. A concrete model is already # constructed, so passing in a data file is a waste of time. # if self.is_constructed() and isinstance(filename, string_types): msg = "The filename=%s will not be loaded - supplied as an " \ "argument to the create_instance() method of a "\ "concrete instance with name=%s." % (filename, name) logger.warning(msg) if 'clone' in kwds: kwds.pop('clone') deprecation_warning( "Model.create_instance() no longer accepts the 'clone' " "argument: the base abstract model is always cloned.") if 'preprocess' in kwds: kwds.pop('preprocess') deprecation_warning( "Model.create_instance() no longer accepts the preprocess' " "argument: preprocessing is always deferred to when the " "model is sent to the solver") if kwds: msg = \ """Model.create_instance() passed the following unrecognized keyword arguments (which have been ignored):""" for k in kwds: msg = msg + "\n '%s'" % (k, ) logger.error(msg) if self.is_constructed(): deprecation_warning( "Cannot call Model.create_instance() on a constructed " "model; returning a clone of the current model instance.") return self.clone() if report_timing: pyomo.common.timing.report_timing() if name is None: name = self.name if filename is not None: if data is not None: logger.warning( "Model.create_instance() passed both 'filename' " "and 'data' keyword arguments. Ignoring the " "'data' argument") data = filename if data is None: data = {} # # Clone the model and load the data # instance = self.clone() if name is not None: instance._name = name # If someone passed a rule for creating the instance, fire the # rule before constructing the components. if instance._rule is not None: instance._rule(instance) if namespaces: _namespaces = list(namespaces) else: _namespaces = [] if namespace is not None: _namespaces.append(namespace) if None not in _namespaces: _namespaces.append(None) instance.load(data, namespaces=_namespaces, profile_memory=profile_memory) # # Indicate that the model is concrete/constructed # instance._constructed = True # # Change this class from "Abstract" to "Concrete". It is # absolutely crazy that this is allowed in Python, but since the # AbstractModel and ConcreteModel are basically identical, we # can "reassign" the new concrete instance to be an instance of # ConcreteModel # instance.__class__ = ConcreteModel return instance def preprocess(self, preprocessor=None): """Apply the preprocess plugins defined by the user""" with PauseGC() as pgc: if preprocessor is None: preprocessor = self.config.preprocessor pyomo.common.PyomoAPIFactory(preprocessor)(self.config, model=self) def load(self, arg, namespaces=[None], profile_memory=0, report_timing=None): """ Load the model with data from a file, dictionary or DataPortal object. """ if report_timing is not None: deprecation_warning( "The report_timing argument to Model.load() is deprecated. " "Use pyomo.common.timing.report_timing() to enable reporting " "construction timing") if arg is None or isinstance(arg, basestring): dp = DataPortal(filename=arg, model=self) elif type(arg) is DataPortal: dp = arg elif type(arg) is dict: dp = DataPortal(data_dict=arg, model=self) elif isinstance(arg, SolverResults): if len(arg.solution): logger.warning( """DEPRECATION WARNING: the Model.load() method is deprecated for loading solutions stored in SolverResults objects. Call Model.solutions.load_from().""") self.solutions.load_from(arg) else: logger.warning( """DEPRECATION WARNING: the Model.load() method is deprecated for loading solutions stored in SolverResults objects. By default, results from solvers are immediately loaded into the original model instance.""") return else: msg = "Cannot load model model data from with object of type '%s'" raise ValueError(msg % str(type(arg))) self._load_model_data(dp, namespaces, profile_memory=profile_memory) def _load_model_data(self, modeldata, namespaces, **kwds): """ Load declarations from a DataPortal object. """ # # As we are primarily generating objects here (and acyclic ones # at that), there is no need to run the GC until the entire # model is created. Simple reference-counting should be # sufficient to keep memory use under control. # with PauseGC() as pgc: # # Unlike the standard method in the pympler summary # module, the tracker doesn't print 0-byte entries to pad # out the limit. # profile_memory = kwds.get('profile_memory', 0) if profile_memory >= 2 and pympler_available: mem_used = pympler.muppy.get_size(muppy.get_objects()) print("") print(" Total memory = %d bytes prior to model " "construction" % mem_used) if profile_memory >= 3: gc.collect() mem_used = pympler.muppy.get_size(muppy.get_objects()) print(" Total memory = %d bytes prior to model " "construction (after garbage collection)" % mem_used) # # Do some error checking # for namespace in namespaces: if not namespace is None and not namespace in modeldata._data: msg = "Cannot access undefined namespace: '%s'" raise IOError(msg % namespace) # # Initialize each component in order. # for component_name, component in iteritems(self.component_map()): if component.ctype is Model: continue self._initialize_component(modeldata, namespaces, component_name, profile_memory) if False: total_time = time.time() - start_time if isinstance(component, IndexedComponent): clen = len(component) else: assert isinstance(component, Component) clen = 1 print(" %%6.%df seconds required to construct component=%s; %d indicies total" \ % (total_time>=0.005 and 2 or 0, component_name, clen) \ % total_time) tmp_clone_counter = expr_common.clone_counter if clone_counter != tmp_clone_counter: clone_counter = tmp_clone_counter print( " Cloning detected! (clone count: %d)" % clone_counters) # Note: As is, connectors are expanded when using command-line pyomo but not calling model.create(...) in a Python script. # John says this has to do with extension points which are called from commandline but not when writing scripts. # Uncommenting the next two lines switches this (command-line fails because it tries to expand connectors twice) #connector_expander = ConnectorExpander() #connector_expander.apply(instance=self) if profile_memory >= 2 and pympler_available: print("") print( " Summary of objects following instance construction") post_construction_summary = pympler.summary.summarize( pympler.muppy.get_objects()) pympler.summary.print_(post_construction_summary, limit=100) print("") def _initialize_component(self, modeldata, namespaces, component_name, profile_memory): declaration = self.component(component_name) if component_name in modeldata._default: if declaration.ctype is not Set: declaration.set_default(modeldata._default[component_name]) data = None for namespace in namespaces: if component_name in modeldata._data.get(namespace, {}): data = modeldata._data[namespace][component_name] if data is not None: break if __debug__ and logger.isEnabledFor(logging.DEBUG): _blockName = "Model" if self.parent_block() is None \ else "Block '%s'" % self.name logger.debug("Constructing %s '%s' on %s from data=%s", declaration.__class__.__name__, declaration.name, _blockName, str(data)) try: declaration.construct(data) except: err = sys.exc_info()[1] logger.error( "Constructing component '%s' from data=%s failed:\n %s: %s", str(declaration.name), str(data).strip(), type(err).__name__, err) raise if __debug__ and logger.isEnabledFor(logging.DEBUG): _out = StringIO() declaration.pprint(ostream=_out) logger.debug("Constructed component '%s':\n %s" % (declaration.name, _out.getvalue())) if profile_memory >= 2 and pympler_available: mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) print( " Total memory = %d bytes following construction of component=%s" % (mem_used, component_name)) if profile_memory >= 3: gc.collect() mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) print( " Total memory = %d bytes following construction of component=%s (after garbage collection)" % (mem_used, component_name)) def create(self, filename=None, **kwargs): """ Create a concrete instance of this Model, possibly using data read in from a file. """ logger.warning( """DEPRECATION WARNING: the Model.create() method is deprecated. Call Model.create_instance() to create a concrete instance from an abstract model. You do not need to call Model.create() for a concrete model.""") return self.create_instance(filename=filename, **kwargs) def transform(self, name=None, **kwds): if name is None: logger.warning( """DEPRECATION WARNING: Model.transform() is deprecated. Use the TransformationFactory iterator to get the list of known transformations.""") return list(TransformationFactory) logger.warning( """DEPRECATION WARNING: Model.transform() is deprecated. Use TransformationFactory('%s') to construct a transformation object, or TransformationFactory('%s').apply_to(model) to directly apply the transformation to the model instance.""" % ( name, name, )) xfrm = TransformationFactory(name) if xfrm is None: raise ValueError("Unknown model transformation '%s'" % name) return xfrm.apply_to(self, **kwds)
class PyomoModelChecker(SingletonPlugin): implements(IModelChecker, inherit=True) _prehooks = ExtensionPoint(IPreCheckHook) _posthooks = ExtensionPoint(IPostCheckHook) def __init__(self): self._currentRunner = None self._currentScript = None def _check(self, runner, script, info): self._runner = runner self._script = script for prehook in self._prehooks: prehook.precheck(runner, script, info) try: self.check(runner, script, info) except Exception: e = sys.exc_info()[1] print(self.checkerLabel() + "ERROR during check call!") raise e for posthook in self._posthooks: posthook.postcheck(runner, script, info) self._runner = None self._script = None def check(self, runner, script, info): # Should be `pass` - checkers are not guaranteed to call # superclass when running their own check() methods pass def _beginChecking(self, runner, script): self._currentRunner = runner self._currentScript = script try: self.beginChecking(runner, script) except Exception: print(self.checkerLabel() + "ERROR during pre-check call!") def beginChecking(self, runner, script): pass def _endChecking(self, runner, script): try: self.endChecking(runner, script) except Exception: print(self.checkerLabel() + "ERROR during pre-check call!") self._currentRunner = None self._currentScript = None def endChecking(self, runner, script): pass def _checkerName(self): match = re.search(r"<class '([a-zA-Z0-9_\.]+)'>", str(self.__class__)) return match.group(1).split(".")[-1] def _checkerPackage(self): match = re.search(r"<class '([a-zA-Z0-9_\.]+)'>", str(self.__class__)) return match.group(1).split(".")[-3] def checkerLabel(self): return "[" + self._checkerPackage() + "::" + self._checkerName() + "] " def checkerDoc(self): return "" def problem(self, message="Error", runner=None, script=None, lineno=None): if script is None: script = self._currentScript if runner is None: runner = self._currentRunner output = self.checkerLabel() if script is not None: output += script.filename() + ":" if lineno is not None: output += str(lineno) + ":" else: output += "<unknown>:" output += " " + message print(output) try: if runner.verbose: if len(self.checkerDoc()) > 0: lines = textwrap.dedent(self.checkerDoc()).split("\n") lines = filter((lambda x: len(x) > 0), lines) for line in lines: print(self.checkerLabel() + line) print except Exception: print(self.checkerLabel() + "ERROR during verbose info generation") print
def BenderAlgorithmBuilder(options, scenario_tree): import pyomo.environ import pyomo.solvers.plugins.smanager.phpyro import pyomo.solvers.plugins.smanager.pyro solution_writer_plugins = ExtensionPoint(ISolutionWriterExtension) for plugin in solution_writer_plugins: plugin.disable() solution_plugins = [] if len(options.solution_writer) > 0: for this_extension in options.solution_writer: if this_extension in sys.modules: print("User-defined PH solution writer module=" + this_extension + " already imported - skipping") else: print("Trying to import user-defined PH " "solution writer module=" + this_extension) # make sure "." is in the PATH. original_path = list(sys.path) sys.path.insert(0, '.') import_file(this_extension) print("Module successfully loaded") sys.path[:] = original_path # restore to what it was # now that we're sure the module is loaded, re-enable this # specific plugin. recall that all plugins are disabled # by default in phinit.py, for various reasons. if we want # them to be picked up, we need to enable them explicitly. import inspect module_to_find = this_extension if module_to_find.rfind(".py"): module_to_find = module_to_find.rstrip(".py") if module_to_find.find("/") != -1: module_to_find = module_to_find.split("/")[-1] for name, obj in inspect.getmembers(sys.modules[module_to_find], inspect.isclass): import pyomo.common # the second condition gets around goofyness related # to issubclass returning True when the obj is the # same as the test class. if issubclass(obj, pyomo.common.plugin.SingletonPlugin ) and name != "SingletonPlugin": for plugin in solution_writer_plugins(all=True): if isinstance(plugin, obj): plugin.enable() solution_plugins.append(plugin) # # if any of the ww extension configuration options are specified # without the ww extension itself being enabled, halt and warn the # user - this has led to confusion in the past, and will save user # support time. # if (len(options.ww_extension_cfgfile) > 0) and \ (options.enable_ww_extensions is False): raise ValueError("A configuration file was specified " "for the WW extension module, but the WW extensions " "are not enabled!") if (len(options.ww_extension_suffixfile) > 0) and \ (options.enable_ww_extensions is False): raise ValueError("A suffix file was specified for the WW " "extension module, but the WW extensions are not " "enabled!") if (len(options.ww_extension_annotationfile) > 0) and \ (options.enable_ww_extensions is False): raise ValueError("A annotation file was specified for the " "WW extension module, but the WW extensions are not " "enabled!") # # disable all plugins up-front. then, enable them on an as-needed # basis later in this function. the reason that plugins should be # disabled is that they may have been programmatically enabled in # a previous run of PH, and we want to start from a clean slate. # ph_extension_point = ExtensionPoint(IPHExtension) for plugin in ph_extension_point: plugin.disable() ph_plugins = [] # # deal with any plugins. ww extension comes first currently, # followed by an option user-defined plugin. order only matters # if both are specified. # if options.enable_ww_extensions: import pyomo.pysp.plugins.wwphextension # explicitly enable the WW extension plugin - it may have been # previously loaded and/or enabled. ph_extension_point = ExtensionPoint(IPHExtension) for plugin in ph_extension_point(all=True): if isinstance(plugin, pyomo.pysp.plugins.wwphextension.wwphextension): plugin.enable() ph_plugins.append(plugin) # there is no reset-style method for plugins in general, # or the ww ph extension in plugin in particular. if no # configuration or suffix filename is specified, set to # None so that remnants from the previous use of the # plugin aren't picked up. if len(options.ww_extension_cfgfile) > 0: plugin._configuration_filename = options.ww_extension_cfgfile else: plugin._configuration_filename = None if len(options.ww_extension_suffixfile) > 0: plugin._suffix_filename = options.ww_extension_suffixfile else: plugin._suffix_filename = None if len(options.ww_extension_annotationfile) > 0: plugin._annotation_filename = options.ww_extension_annotationfile else: plugin._annotation_filename = None if len(options.user_defined_extensions) > 0: for this_extension in options.user_defined_extensions: if this_extension in sys.modules: print("User-defined PH extension module=" + this_extension + " already imported - skipping") else: print("Trying to import user-defined PH extension module=" + this_extension) # make sure "." is in the PATH. original_path = list(sys.path) sys.path.insert(0, '.') import_file(this_extension) print("Module successfully loaded") # restore to what it was sys.path[:] = original_path # now that we're sure the module is loaded, re-enable this # specific plugin. recall that all plugins are disabled # by default in phinit.py, for various reasons. if we want # them to be picked up, we need to enable them explicitly. import inspect module_to_find = this_extension if module_to_find.rfind(".py"): module_to_find = module_to_find.rstrip(".py") if module_to_find.find("/") != -1: module_to_find = module_to_find.split("/")[-1] for name, obj in inspect.getmembers(sys.modules[module_to_find], inspect.isclass): import pyomo.common # the second condition gets around goofyness related # to issubclass returning True when the obj is the # same as the test class. if issubclass(obj, pyomo.common.plugin.SingletonPlugin ) and name != "SingletonPlugin": ph_extension_point = ExtensionPoint(IPHExtension) for plugin in ph_extension_point(all=True): if isinstance(plugin, obj): plugin.enable() ph_plugins.append(plugin) ph = None solver_manager = None try: # construct the solver manager. if options.verbose: print("Constructing solver manager of type=" + options.solver_manager_type) solver_manager = SolverManagerFactory(options.solver_manager_type, host=options.pyro_host, port=options.pyro_port) if solver_manager is None: raise ValueError("Failed to create solver manager of " "type=" + options.solver_manager_type + " specified in call to PH constructor") ph = ProgressiveHedging(options) if isinstance( solver_manager, pyomo.solvers.plugins.smanager.phpyro.SolverManager_PHPyro): if scenario_tree.contains_bundles(): num_jobs = len(scenario_tree._scenario_bundles) if not _OLD_OUTPUT: print("Bundle solver jobs available: " + str(num_jobs)) else: num_jobs = len(scenario_tree._scenarios) if not _OLD_OUTPUT: print("Scenario solver jobs available: " + str(num_jobs)) servers_expected = options.phpyro_required_workers if (servers_expected is None): servers_expected = num_jobs timeout = options.phpyro_workers_timeout if \ (options.phpyro_required_workers is None) else \ None solver_manager.acquire_servers(servers_expected, timeout) ph.initialize(scenario_tree=scenario_tree, solver_manager=solver_manager, ph_plugins=ph_plugins, solution_plugins=solution_plugins) except: if ph is not None: ph.release_components() if solver_manager is not None: if isinstance( solver_manager, pyomo.solvers.plugins.smanager.phpyro. SolverManager_PHPyro): solver_manager.release_servers( shutdown=ph._shutdown_pyro_workers) elif isinstance( solver_manager, pyomo.solvers.plugins.smanager.pyro.SolverManager_Pyro): if ph._shutdown_pyro_workers: solver_manager.shutdown_workers() print("Failed to initialize progressive hedging algorithm") raise return ph
def test_deprecation(self): out = StringIO() with LoggingIntercept(out): PluginGlobals.add_env(None) self.assertIn("Pyomo only supports a single global environment", out.getvalue().replace('\n', ' ')) out = StringIO() with LoggingIntercept(out): PluginGlobals.pop_env() self.assertIn("Pyomo only supports a single global environment", out.getvalue().replace('\n', ' ')) out = StringIO() with LoggingIntercept(out): PluginGlobals.clear() self.assertIn("Pyomo only supports a single global environment", out.getvalue().replace('\n', ' ')) class IFoo(Interface): pass out = StringIO() with LoggingIntercept(out): class myFoo(Plugin): alias('myFoo', subclass=True) self.assertIn("alias() function does not support the subclass", out.getvalue().replace('\n', ' ')) out = StringIO() with LoggingIntercept(out): class myFoo(Plugin): implements(IFoo, namespace='here') self.assertIn("only supports a single global namespace.", out.getvalue().replace('\n', ' ')) class IGone(DeprecatedInterface): pass out = StringIO() with LoggingIntercept(out): class myFoo(Plugin): implements(IGone) self.assertIn("The IGone interface has been deprecated", out.getvalue().replace('\n', ' ')) out = StringIO() with LoggingIntercept(out): ExtensionPoint(IGone).extensions() self.assertIn("The IGone interface has been deprecated", out.getvalue().replace('\n', ' ')) class ICustomGone(DeprecatedInterface): __deprecated_message__ = 'This interface is gone!' out = StringIO() with LoggingIntercept(out): class myFoo(Plugin): implements(ICustomGone) self.assertIn("This interface is gone!", out.getvalue().replace('\n', ' ')) out = StringIO() with LoggingIntercept(out): ExtensionPoint(ICustomGone).extensions() self.assertIn("This interface is gone!", out.getvalue().replace('\n', ' '))
def construct_scenario_instance(self, scenario_name, scenario_tree, profile_memory=False, output_instance_construction_time=False, compile_instance=False, verbose=False): assert not self._closed if not scenario_tree.contains_scenario(scenario_name): raise ValueError("ScenarioTree does not contain scenario " "with name %s." % (scenario_name)) scenario = scenario_tree.get_scenario(scenario_name) node_name_list = [n._name for n in scenario._node_list] if verbose: print("Creating instance for scenario=%s" % (scenario_name)) scenario_instance = None try: if self._model_callback is not None: assert self._model_object is None try: _scenario_tree_arg = None # new callback signature if (self._scenario_tree_filename is not None) and \ self._scenario_tree_filename.endswith('.dat'): # we started with a .dat file, so # send the PySP scenario tree _scenario_tree_arg = scenario_tree elif self._scenario_tree_model is not None: # We started from a Pyomo # scenario tree model instance, or a # networkx tree. _scenario_tree_arg = self._scenario_tree_model else: # send the PySP scenario tree _scenario_tree_arg = scenario_tree scenario_instance = self._model_callback( _scenario_tree_arg, scenario_name, node_name_list) except TypeError: # old callback signature # TODO: #logger.warning( # "DEPRECATED: The 'pysp_instance_creation_callback' function " # "signature has changed. An additional argument should be " # "added to the beginning of the arguments list that will be " # "set to the user provided scenario tree object when called " # "by PySP (e.g., a Pyomo scenario tree model instance, " # "a networkx tree, or a PySP ScenarioTree object.") scenario_instance = self._model_callback( scenario_name, node_name_list) elif self._model_object is not None: if (not isinstance(self._model_object, AbstractModel)) or \ (self._model_object.is_constructed()): scenario_instance = self._model_object.clone() elif scenario_tree._scenario_based_data: assert self.data_directory() is not None scenario_data_filename = \ os.path.join(self.data_directory(), str(scenario_name)) # JPW: The following is a hack to support # initialization of block instances, which # don't work with .dat files at the # moment. Actually, it's not that bad of a # hack - it just needs to be extended a bit, # and expanded into the node-based data read # logic (where yaml is completely ignored at # the moment. if os.path.exists(scenario_data_filename + '.dat'): scenario_data_filename = \ scenario_data_filename + ".dat" data = None elif os.path.exists(scenario_data_filename + '.yaml'): if not yaml_available: raise ValueError( "Found yaml data file for scenario '%s' " "but he PyYAML module is not available" % (scenario_name)) scenario_data_filename = \ scenario_data_filename+".yaml" with open(scenario_data_filename) as f: data = yaml.load(f, **yaml_load_args) else: raise RuntimeError( "Cannot find a data file for scenario '%s' " "in directory: %s\nRecognized formats: .dat, " ".yaml" % (scenario_name, self.data_directory())) if verbose: print("Data for scenario=%s loads from file=%s" % (scenario_name, scenario_data_filename)) if data is None: scenario_instance = \ self._model_object.create_instance( filename=scenario_data_filename, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: scenario_instance = \ self._model_object.create_instance( data, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: assert self.data_directory() is not None data_files = [] for node_name in node_name_list: node_data_filename = \ os.path.join(self.data_directory(), str(node_name)+".dat") if not os.path.exists(node_data_filename): raise RuntimeError( "Cannot find a data file for scenario tree " "node '%s' in directory: %s\nRecognized " "formats: .dat" % (node_name, self.data_directory())) data_files.append(node_data_filename) scenario_data = DataPortal(model=self._model_object) for data_file in data_files: if verbose: print("Node data for scenario=%s partially " "loading from file=%s" % (scenario_name, data_file)) scenario_data.load(filename=data_file) scenario_instance = self._model_object.create_instance( scenario_data, profile_memory=profile_memory, report_timing=output_instance_construction_time) else: raise RuntimeError("Unable to construct scenario instance. " "Neither a reference model or callback " "is defined.") # name each instance with the scenario name scenario_instance._name = scenario_name # apply each of the post-instance creation plugins. this # really shouldn't be associated (in terms of naming) with the # pyomo script - this should be rectified with a workflow # re-work. it is unclear how this interacts, or doesn't, with # the preprocessors. ep = ExtensionPoint(IPyomoScriptModifyInstance) for ep in ExtensionPoint(IPyomoScriptModifyInstance): logger.warning( "DEPRECATED: IPyomoScriptModifyInstance extension " "point callbacks will be ignored by PySP in the future") ep.apply(options=None, model=reference_model, instance=scenario_instance) if compile_instance: from pyomo.repn.beta.matrix import \ compile_block_linear_constraints compile_block_linear_constraints( scenario_instance, "_PySP_compiled_linear_constraints", verbose=verbose) except: logger.error("Failed to create model instance for scenario=%s" % (scenario_name)) raise return scenario_instance
def test_singleton_plugin_interface(self): class IFoo(Interface): pass class mySingleton(SingletonPlugin): implements(IFoo) ep = ExtensionPoint(IFoo) self.assertEqual(ep.extensions(), []) self.assertEqual(IFoo._plugins, { mySingleton: { 0: (weakref.ref(mySingleton.__singleton__), False) }, }) self.assertIsNotNone(mySingleton.__singleton__) with self.assertRaisesRegex( RuntimeError, 'Cannot create multiple singleton plugin instances'): mySingleton() class myDerivedSingleton(mySingleton): pass self.assertEqual(ep.extensions(), []) self.assertEqual( IFoo._plugins, { mySingleton: { 0: (weakref.ref(mySingleton.__singleton__), False) }, myDerivedSingleton: { 1: (weakref.ref(myDerivedSingleton.__singleton__), False) }, }) self.assertIsNotNone(myDerivedSingleton.__singleton__) self.assertIsNot(mySingleton.__singleton__, myDerivedSingleton.__singleton__) class myDerivedNonSingleton(mySingleton): __singleton__ = False self.assertEqual(ep.extensions(), []) self.assertEqual( IFoo._plugins, { mySingleton: { 0: (weakref.ref(mySingleton.__singleton__), False) }, myDerivedSingleton: { 1: (weakref.ref(myDerivedSingleton.__singleton__), False) }, myDerivedNonSingleton: {}, }) self.assertIsNone(myDerivedNonSingleton.__singleton__) class myServiceSingleton(mySingleton): implements(IFoo, service=True) self.assertEqual(ep.extensions(), [myServiceSingleton.__singleton__]) self.assertEqual( IFoo._plugins, { mySingleton: { 0: (weakref.ref(mySingleton.__singleton__), False) }, myDerivedSingleton: { 1: (weakref.ref(myDerivedSingleton.__singleton__), False) }, myDerivedNonSingleton: {}, myServiceSingleton: { 2: (weakref.ref(myServiceSingleton.__singleton__), True) }, }) self.assertIsNotNone(myServiceSingleton.__singleton__)
def parse_command_line(args, register_options_callback, with_extensions=None, **kwds): import pysp.plugins pysp.plugins.load() from pysp.util.config import _domain_tuple_of_str registered_extensions = {} if with_extensions is not None: for name in with_extensions: plugins = ExtensionPoint(with_extensions[name]) for plugin in plugins(all=True): registered_extensions.setdefault(name,[]).\ append(plugin.__class__.__module__) def _get_argument_parser(options): # if we modify this and don't copy it, # the this output will appear twice the second # time this function gets called _kwds = dict(kwds) if len(registered_extensions) > 0: assert with_extensions is not None epilog = _kwds.pop('epilog', "") if epilog != "": epilog += "\n\n" epilog += "Registered Extensions:\n" for name in registered_extensions: epilog += " - " + str(with_extensions[name].__name__) + ": " epilog += str(registered_extensions[name]) + "\n" _kwds['epilog'] = epilog ap = argparse.ArgumentParser( add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter, **_kwds) options.initialize_argparse(ap) ap.add_argument("-h", "--help", dest="show_help", action="store_true", default=False, help="show this help message and exit") return ap # # Register options # options = PySPConfigBlock() register_options_callback(options) if with_extensions is not None: for name in with_extensions: configval = options.get(name, None) assert configval is not None assert configval._domain is _domain_tuple_of_str ap = _get_argument_parser(options) # First parse known args, then import any extension plugins # specified by the user, regenerate the options block and # reparse to pick up plugin specific registered options opts, _ = ap.parse_known_args(args=args) options.import_argparse(opts) extensions = {} if with_extensions is None: if opts.show_help: pass else: if all(len(options.get(name).value()) == 0 for name in with_extensions) and \ opts.show_help: ap.print_help() sys.exit(0) for name in with_extensions: extensions[name] = load_extensions( options.get(name).value(), with_extensions[name]) # regenerate the options options = PySPConfigBlock() register_options_callback(options) for name in extensions: for plugin in extensions[name]: if isinstance(plugin, PySPConfiguredObject): plugin.register_options(options) # do a dummy access to option to prevent # a warning about it not being used options.get(name).value() ap = _get_argument_parser(options) opts = ap.parse_args(args=args) options.import_argparse(opts) for name in extensions: for plugin in extensions[name]: if isinstance(plugin, PySPConfiguredObject): plugin.set_options(options) if opts.show_help: ap.print_help() sys.exit(0) if with_extensions: for name in extensions: extensions[name] = sort_extensions_by_precedence(extensions[name]) return options, extensions else: return options
class ModelCheckRunner(object): _checkers = ExtensionPoint(IModelChecker) def __init__(self): self.scripts = [] def run(self, *args, **kwargs): from pyomo.checker.plugins.checker import ImmediateDataChecker, IterativeDataChecker, ImmediateTreeChecker, IterativeTreeChecker # Get args script = kwargs.pop("script", None) verbose = kwargs.pop("verbose", False) checkers = kwargs.pop("checkers", {}) # Store args as necessary self.verbose = verbose # Add script, if given if script is not None: self.addScript(ModelScript(script)) # Enable listed checkers if checkers == {}: print("WARNING: No checkers enabled!") for c in self._checkers(all=True): if c._checkerPackage() in checkers: if c._checkerName() in checkers[c._checkerPackage()]: c.enable() else: c.disable() else: c.disable() # Show checkers if requested if False: printable = {} for c in self._checkers(): if c._checkerPackage() not in printable: printable[c._checkerPackage()] = [c._checkerName()] else: printable[c._checkerPackage()].append(c._checkerName()) for package in printable: print("{0}: {1}".format(package, " ".join(printable[package]))) print("") # Pre-partition checkers immDataCheckers = [ c for c in self._checkers if isinstance(c, ImmediateDataChecker) ] iterDataCheckers = [ c for c in self._checkers if isinstance(c, IterativeDataChecker) ] immTreeCheckers = [ c for c in self._checkers if isinstance(c, ImmediateTreeChecker) ] iterTreeCheckers = [ c for c in self._checkers if isinstance(c, IterativeTreeChecker) ] for script in self.scripts: # Read in the script and call data checkers data = script.read() for checker in immDataCheckers: checker._beginChecking(self, script) checker._check(self, script, data) checker._endChecking(self, script) # Get the data into a parse tree tree = ast.parse(data) for checker in immTreeCheckers: checker._beginChecking(self, script) checker._check(self, script, tree) checker._endChecking(self, script) # Start walking the tree, calling checkers along the way visitor = CheckingNodeVisitor(self, script, tc=iterTreeCheckers, dc=iterDataCheckers, pt=data) visitor.sendBegin() visitor.visit(tree) visitor.sendEnd() def addScript(self, script): self.scripts.append(script)