class ModelTrackerHook(SingletonPlugin): implements(IPreCheckHook) def precheck(self, runner, script, info): # create models dict if nonexistent if getattr(script, 'modelVars', None) is None: script.modelVars = {} # parse AST node if isinstance(info, ast.Assign): if isinstance(info.value, ast.Call): if isinstance(info.value.func, ast.Name): if info.value.func.id.endswith("Model"): for target in info.targets: if isinstance(target, ast.Name): script.modelVars[target.id] = info.value.func.id elif isinstance(target, ast.Tuple): for elt in target.elts: if isinstance(elt, ast.Name): script.modelVars[elt.id] = info.value.func.id else: for target in info.targets: if isinstance(target, ast.Name): if target.id in script.modelVars: del script.modelVars[target.id] elif isinstance(target, ast.Tuple): for elt in target.elts: if isinstance(elt, ast.Name): if elt.id in script.modelVars: del script.modelVars[elt.id]
class examplephsolverserverextension(SingletonPlugin): implements (phextension.IPHSolverServerExtension) def pre_ph_initialization(self,ph): """Called before PH initialization.""" print("PRE INITIALIZATION PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def post_instance_creation(self,ph): """Called after the instances have been created.""" print("POST INSTANCE CREATION PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def post_ph_initialization(self, ph): """Called after PH initialization!""" print("POST INITIALIZATION PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def pre_iteration_0_solve(self, ph): """Called before the iteration 0 solve begins!""" print("PRE ITERATION 0 SOLVE PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def post_iteration_0_solve(self, ph): """Called after the iteration 0 solve is finished!""" print("POST ITERATION 0 SOLVE PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def pre_iteration_k_solve(self, ph): """Called before the iteration k solve begins!""" print("PRE ITERATION K SOLVE PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME) def post_iteration_k_solve(self, ph): """Called after the iteration k solve is finished!""" print("POST ITERATION K SOLVE PHSOLVERSERVER CALLBACK INVOKED ON WORKER: "+ph.WORKERNAME)
class TrustRegionSolver(plugin.Plugin): """ A trust region filter method for black box / glass box optimizaiton Solves nonlinear optimization problems containing external function calls through automatic construction of reduced models (ROM), also known as surrogate models. Currently implements linear and quadratic reduced models. See Eason, Biegler (2016) AIChE Journal for more details Arguments: """ # + param.CONFIG.generte_yaml_template() plugin.implements(IOptSolver) plugin.alias( 'trustregion', doc='Trust region filter method for black box/glass box optimization') def available(self, exception_flag=True): """Check if solver is available. TODO: For now, it is always available. However, sub-solvers may not always be available, and so this should reflect that possibility. """ return True def version(self): """Return a 3-tuple describing the solver version.""" return __version__ def solve(self, model, eflist, **kwds): assert not kwds #config = param.CONFIG(kwds) return TRF(model, eflist) #, config)
class testphextension(SingletonPlugin): implements(phextension.IPHExtension) def reset(self, ph): pass def pre_ph_initialization(self, ph): pass def post_instance_creation(self, ph): pass def post_ph_initialization(self, ph): print("Called after PH initialization!") def post_iteration_0_solves(self, ph): print("Called after the iteration 0 solves!") def post_iteration_0(self, ph): print("Called after the iteration 0 solves, averages computation, and weight computation") def pre_iteration_k_solves(self, ph): # this one does not do anything pass def post_iteration_k_solves(self, ph): print("Called after the iteration k solves!") def post_iteration_k(self, ph): print("Called after an iteration k has finished!") def post_ph_execution(self, ph): print("Called after PH has terminated!")
class TMP(Plugin): implements(modelapi[key], service=True) def __init__(self): self.fn = getattr(data.local.usermodel, key) def apply(self, **kwds): return self.fn(**kwds)
class ConnectorExpander(Plugin): implements(IPyomoScriptModifyInstance) def apply(self, **kwds): instance = kwds.pop('instance') xform = TransformationFactory('core.expand_connectors') xform.apply_to(instance, **kwds) return instance
class phboundextension(SingletonPlugin, _PHBoundExtensionImpl): implements(phextension.IPHExtension) alias("phboundextension") def __init__(self): _PHBoundExtensionImpl.__init__(self)
class ConnectorExpander(Plugin): implements(IPyomoScriptModifyInstance) @deprecated("Use of pyomo.connectors is deprecated. " "Its functionality has been replaced by pyomo.network.", version='5.6.9') def apply(self, **kwds): instance = kwds.pop('instance') xform = TransformationFactory('core.expand_connectors') xform.apply_to(instance, **kwds) return instance
class BigM_Transformation_PyomoScript_Plugin(Plugin): implements(IPyomoScriptModifyInstance, service=True) def apply(self, **kwds): instance = kwds.pop('instance') # Not sure why the ModifyInstance callback started passing the # model along with the instance. We will ignore it. model = kwds.pop('model', None) xform = TransformationFactory('gdp.bigm') return xform.apply_to(instance, **kwds)
class Transformation(Plugin): """ Base class for all model transformations. """ implements(IModelTransformation, service=False) def __init__(self, **kwds): kwds["name"] = kwds.get("name", "transformation") super(Transformation, self).__init__(**kwds) @deprecated( "Transformation.apply() has been deprecated. Please use either " "Transformation.apply_to() for in-place transformations or " "Transformation.create_using() for transformations that create a " "new, independent transformed model instance.") def apply(self, model, **kwds): inplace = kwds.pop('inplace', True) if inplace: self.apply_to(model, **kwds) else: return self.create_using(model, **kwds) def apply_to(self, model, **kwds): """ Apply the transformation to the given model. """ timer = TransformationTimer(self, 'in-place') if not hasattr(model, '_transformation_data'): model._transformation_data = TransformationData() self._apply_to(model, **kwds) timer.report() def create_using(self, model, **kwds): """ Create a new model with this transformation """ timer = TransformationTimer(self, 'out-of-place') if not hasattr(model, '_transformation_data'): model._transformation_data = TransformationData() new_model = self._create_using(model, **kwds) timer.report() return new_model def _apply_to(self, model, **kwds): raise RuntimeError( "The Transformation.apply_to method is not implemented.") def _create_using(self, model, **kwds): instance = model.clone() self._apply_to(instance, **kwds) return instance
class UnknownDataManager(Plugin): implements(IDataManager) def __init__(self, *args, **kwds): Plugin.__init__(self, **kwds) # # The 'type' is the class type of the solver instance # self.type = kwds["type"] def available(self): return False
class PyomoDataCommands(Plugin): alias("dat", "Pyomo data command file interface") implements(IDataManager, service=False) def __init__(self): self._info = [] self.options = Options() def available(self): return True def initialize(self, **kwds): self.filename = kwds.pop('filename') self.add_options(**kwds) def add_options(self, **kwds): self.options.update(kwds) def open(self): if self.filename is None: #pragma:nocover raise IOError("No filename specified") if not os.path.exists(self.filename): #pragma:nocover raise IOError("Cannot find file '%s'" % self.filename) def close(self): pass def read(self): """ This function does nothing, since executing Pyomo data commands both reads and processes the data all at once. """ pass def write(self, data): #pragma:nocover """ This function does nothing, because we cannot write to a *.dat file. """ pass def process(self, model, data, default): """ Read Pyomo data commands and process the data. """ _process_include(['include', self.filename], model, data, default, self.options) def clear(self): self._info = []
class ConvexHull_Transformation_PyomoScript_Plugin(Plugin): """Plugin to automatically call the GDP Hull Reformulation within the Pyomo script. """ implements(IPyomoScriptModifyInstance, service=True) def apply(self, **kwds): instance = kwds.pop('instance') # Not sure why the ModifyInstance callback started passing the # model along with the instance. We will ignore it. model = kwds.pop('model', None) xform = TransformationFactory('gdp.hull') return xform.apply_to(instance, **kwds)
class FunctionTrackerHook(SingletonPlugin): implements(IPreCheckHook) implements(IPostCheckHook) def precheck(self, runner, script, info): # create models dict if nonexistent if getattr(script, 'functionDefs', None) is None: script.functionDefs = {} # create function argument stack if nonexistent if getattr(script, 'functionArgs', None) is None: script.functionArgs = [] # add new function definitions if isinstance(info, ast.FunctionDef): script.functionDefs[info.name] = info script.functionArgs.append(info.args) # update function def dictionary with assignments elif isinstance(info, ast.Assign): if isinstance(info.value, ast.Name): if info.value.id in script.functionDefs: for target in info.targets: if isinstance(target, ast.Name): script.functionDefs[ target.id] = script.functionDefs[info.value.id] else: for target in info.targets: if isinstance(target, ast.Name): if target.id in script.functionDefs: del script.functionDefs[target.id] def postcheck(self, runner, script, info): """Remove function args from the stack""" if isinstance(info, ast.FunctionDef): script.functionArgs.pop()
class ExpressionRegistration(Plugin): implements(IPyomoExpression, service=False) def __init__(self, type, cls, swap=False): self._type = type self._cls = cls self._swap = swap def type(self): return self._type def create(self, args): if self._swap: args = list(args) args.reverse() return self._cls(args)
class examplephextension(SingletonPlugin): implements (phextension.IPHExtension) def reset(self, ph): """Invoked to reset the state of a plugin to that of post-construction""" print("RESET PH CALLBACK INVOKED") def pre_ph_initialization(self,ph): """Called before PH initialization.""" print("PRE INITIALIZATION PH CALLBACK INVOKED") def post_instance_creation(self, ph): """Called after PH initialization has created the scenario instances, but before any PH-related weights/variables/parameters/etc are defined!""" print("POST INSTANCE CREATION PH CALLBACK INVOKED") def post_ph_initialization(self, ph): """Called after PH initialization!""" print("POST INITIALIZATION PH CALLBACK INVOKED") def post_iteration_0_solves(self, ph): """Called after the iteration 0 solves!""" print("POST ITERATION 0 SOLVE PH CALLBACK INVOKED") def post_iteration_0(self, ph): """Called after the iteration 0 solves, averages computation, and weight computation""" print("POST ITERATION 0 PH CALLBACK INVOKED") def pre_iteration_k_solves(self, ph): """Called immediately before the iteration k solves!""" print("PRE ITERATION K SOLVE PH CALLBACK INVOKED") def post_iteration_k_solves(self, ph): """Called after the iteration k solves!""" print("POST ITERATION K SOLVE PH CALLBACK INVOKED") def post_iteration_k(self, ph): """Called after the iteration k is finished, after weights have been updated!""" print("POST ITERATION K PH CALLBACK INVOKED") def post_ph_execution(self, ph): """Called after PH has terminated!""" print("POST EXECUTION PH CALLBACK INVOKED")
class JSONSolutionWriter(SingletonPlugin): implements (solutionwriter.ISolutionWriterExtension) def write(self, scenario_tree, output_file_prefix): if not isinstance(scenario_tree, ScenarioTree): raise RuntimeError("JSONSolutionWriter write method expects ScenarioTree object - type of supplied object="+str(type(scenario_tree))) include_ph_objective_parameters = None include_variable_statistics = None if output_file_prefix == 'ph': include_ph_objective_parameters = True include_variable_statistics = True elif output_file_prefix == 'postphef': include_ph_objective_parameters = False include_variable_statistics = True elif output_file_prefix == 'ef': include_ph_objective_parameters = False include_variable_statistics = False else: raise ValueError("JSONSolutionWriter requires an output prefix of 'ef', 'ph', or 'postphef' " "to indicate whether ph specific parameter values should be extracted " "from the solution") output_filename = output_file_prefix+"_solution.json" results = {} results['scenario tree'] = extract_scenario_tree_structure(scenario_tree) results['scenario solutions'] \ = extract_scenario_solutions(scenario_tree, include_ph_objective_parameters=include_ph_objective_parameters) results['node solutions'] \ = extract_node_solutions(scenario_tree, include_ph_objective_parameters=include_ph_objective_parameters, include_variable_statistics=include_variable_statistics) with open(output_filename,'w') as f: json.dump(results,f,indent=2) print("Scenario tree solution written to file="+output_filename)
class admm(SingletonPlugin, _AdaptiveRhoBase): implements(phextension.IPHExtension) alias("admm") def __init__(self): _AdaptiveRhoBase.__init__(self) def pre_ph_initialization(self, ph): _AdaptiveRhoBase.pre_ph_initialization(self, ph) def post_instance_creation(self, ph): _AdaptiveRhoBase.post_instance_creation(self, ph) def post_ph_initialization(self, ph): _AdaptiveRhoBase.post_ph_initialization(self, ph) def post_iteration_0_solves(self, ph): _AdaptiveRhoBase.post_iteration_0_solves(self, ph) def post_iteration_0(self, ph): _AdaptiveRhoBase.post_iteration_0(self, ph) def pre_iteration_k_solves(self, ph): _AdaptiveRhoBase.pre_iteration_k_solves(self, ph) def post_iteration_k_solves(self, ph): _AdaptiveRhoBase.post_iteration_k_solves(self, ph) def post_iteration_k(self, ph): _AdaptiveRhoBase.post_iteration_k(self, ph) def ph_convergence_check(self, ph): return _AdaptiveRhoBase.ph_convergence_check(self, ph) def post_ph_execution(self, ph): _AdaptiveRhoBase.post_ph_execution(self, ph)
class GJHSolver(ASL): """An interface to the AMPL GJH "solver" for evaluating a model at a point.""" plugin.implements(IOptSolver) plugin.alias( 'contrib.gjh', doc='Interface to the AMPL GJH "solver"') def __init__(self, **kwds): kwds['type'] = 'gjh' kwds['symbolic_solver_labels'] = True super(GJHSolver, self).__init__(**kwds) self.options.solver = 'gjh' self._metasolver = False # A hackish way to hold on to the model so that we can parse the # results. def _initialize_callbacks(self, model): self._model = model self._model._gjh_info = None super(GJHSolver, self)._initialize_callbacks(model) def _presolve(self, *args, **kwds): super(GJHSolver, self)._presolve(*args, **kwds) self._gjh_file = self._soln_file[:-3]+'gjh' TempfileManager.add_tempfile(self._gjh_file, exists=False) def _postsolve(self): # # TODO: We should return the information using a better data # structure (ComponentMap? so that the GJH solver does not need # to be called with symbolic_solver_labels=True # self._model._gjh_info = readgjh(self._gjh_file) self._model = None return super(GJHSolver, self)._postsolve()
class myFoo(Plugin): implements(ICustomGone)
class JSONDictionary(Plugin): alias("json", "JSON file interface") implements(IDataManager, service=False) def __init__(self): self._info = {} self.options = Options() def available(self): return True def initialize(self, **kwds): self.filename = kwds.pop('filename') self.add_options(**kwds) def add_options(self, **kwds): self.options.update(kwds) def open(self): if self.filename is None: raise IOError("No filename specified") def close(self): pass def read(self): """ This function loads data from a JSON file and tuplizes the nested dictionaries and lists of lists. """ if not os.path.exists(self.filename): raise IOError("Cannot find file '%s'" % self.filename) INPUT = open(self.filename, 'r') if six.PY2 and self.options.convert_unicode: def _byteify(data, ignore_dicts=False): if isinstance(data, six.text_type): return data.encode('utf-8') if isinstance(data, list): return [_byteify(item, True) for item in data] if isinstance(data, dict) and not ignore_dicts: return dict((_byteify(key, True), _byteify(value, True)) for (key, value) in data.iteritems()) return data jdata = json.load(INPUT, object_hook=_byteify) else: jdata = json.load(INPUT) INPUT.close() if jdata is None or len(jdata) == 0: raise IOError("Empty JSON data file") self._info = {} for k, v in jdata.items(): self._info[k] = tuplize(v) def write(self, data): """ This function creates a JSON file for the specified data. """ with open(self.filename, 'w') as OUTPUT: jdata = {} if self.options.data is None: for k, v in data.items(): jdata[k] = detuplize(v) elif type(self.options.data) in (list, tuple): for k in self.options.data: jdata[k] = detuplize(data[k], sort=self.options.sort) else: k = self.options.data jdata[k] = detuplize(data[k]) json.dump(jdata, OUTPUT) def process(self, model, data, default): """ Set the data for the selected components """ if not self.options.namespace in data: data[self.options.namespace] = {} # try: if self.options.data is None: for key in self._info: self._set_data(data, self.options.namespace, key, self._info[key]) elif type(self.options.data) in (list, tuple): for key in self.options.data: self._set_data(data, self.options.namespace, key, self._info[key]) else: key = self.options.data self._set_data(data, self.options.namespace, key, self._info[key]) except KeyError: raise IOError( "Data value for '%s' is not available in JSON file '%s'" % (key, self.filename)) def _set_data(self, data, namespace, name, value): if type(value) is dict: data[namespace][name] = value else: data[namespace][name] = {None: value} def clear(self): self._info = {}
class myFoo(Plugin): implements(IFoo) alias('my_foo', 'myFoo docs')
class YamlDictionary(Plugin): alias("yaml", "YAML file interface") implements(IDataManager, service=False) def __init__(self): self._info = {} self.options = Options() def available(self): return yaml_available def requirements(self): return "pyyaml" def initialize(self, **kwds): self.filename = kwds.pop('filename') self.add_options(**kwds) def add_options(self, **kwds): self.options.update(kwds) def open(self): if self.filename is None: raise IOError("No filename specified") def close(self): pass def read(self): """ This function loads data from a YAML file and tuplizes the nested dictionaries and lists of lists. """ if not os.path.exists(self.filename): raise IOError("Cannot find file '%s'" % self.filename) INPUT = open(self.filename, 'r') jdata = yaml.load(INPUT) INPUT.close() if jdata is None: raise IOError("Empty YAML file") self._info = {} for k, v in jdata.items(): self._info[k] = tuplize(v) def write(self, data): """ This function creates a YAML file for the specified data. """ with open(self.filename, 'w') as OUTPUT: jdata = {} if self.options.data is None: for k, v in data.items(): jdata[k] = detuplize(v) elif type(self.options.data) in (list, tuple): for k in self.options.data: jdata[k] = detuplize(data[k], sort=self.options.sort) else: k = self.options.data jdata[k] = detuplize(data[k]) yaml.dump(jdata, OUTPUT) def process(self, model, data, default): """ Set the data for the selected components """ if not self.options.namespace in data: data[self.options.namespace] = {} # try: if self.options.data is None: for key in self._info: self._set_data(data, self.options.namespace, key, self._info[key]) elif type(self.options.data) in (list, tuple): for key in self.options.data: self._set_data(data, self.options.namespace, key, self._info[key]) else: key = self.options.data self._set_data(data, self.options.namespace, key, self._info[key]) except KeyError: raise IOError( "Data value for '%s' is not available in YAML file '%s'" % (key, self.filename)) def _set_data(self, data, namespace, name, value): if type(value) is dict: data[namespace][name] = value else: data[namespace][name] = {None: value} def clear(self): self._info = {}
class myFoo(Plugin): implements(IFoo, namespace='here')
class myFoo(Plugin): implements(IFoo)
class TMP(Plugin): implements(IModelComponent, service=False) alias(cls.__name__, description) component = cls
class myFoo(Plugin): implements(IGone)
class mySingleton(SingletonPlugin): implements(IFoo)
class JSONSolutionLoaderExtension(PySPConfiguredExtension, PySPConfiguredObject, SingletonPlugin): implements(IPySPSolutionLoaderExtension) @classmethod def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "input_name") safe_declare_common_option(options, "load_stages") return options _default_options_prefix = "jsonloader_" # # Note: Do not try to user super() or access the # class name inside the __init__ method when # a class derives from a SingletonPlugin. Due to # how Pyutilib implements its Singleton type, # the __class__ cell will be empty. # (See: https://stackoverflow.com/questions/ # 13126727/how-is-super-in-python-3-implemented) # def __init__(self): PySPConfiguredExtension.__init__(self) def load(self, manager): if self.get_option("input_name") is not None: stage_solutions = None # Do NOT open file in 'binary' mode when loading JSON # (produces an error in Python3) with open(self.get_option("input_name"), 'r') as f: stage_solutions = json.load(f) cntr = 0 if self.get_option('load_stages') > len( manager.scenario_tree.stages): raise ValueError( "The value of the %s option (%s) can not be greater than " "the number of time stages in the local scenario tree (%s)" % (self.get_full_option_name('load_stages'), self.get_option('load_stages'), len(manager.scenario_tree.stages))) if self.get_option('load_stages') > len(stage_solutions): raise ValueError( "The value of the %s option (%s) can not be greater than " "the number of time stages in the scenario tree solution " "stored in %s (%s)" % (self.get_full_option_name('load_stages'), self.get_option('load_stages'), self.get_option('input_name'), len(stage_solutions))) for stage, stage_solution in zip_longest( manager.scenario_tree.stages, stage_solutions): if stage_solution is None: break if (self.get_option('load_stages') <= 0) or \ (cntr+1 <= self.get_option('load_stages')): if stage is None: raise RuntimeError( "Local scenario tree has fewer stages (%s) than what is " "held by the solution loaded from file %s. Use the " "option %s to limit the number of stages that " "are loaded." % (cntr, self.get_option('input_name'), self.get_full_option_name('load_stages'))) cntr += 1 for tree_node in stage.nodes: try: node_solution = stage_solution[tree_node.name] except KeyError: raise KeyError( "Local scenario tree contains a tree node " "that was not found in the solution at time" "-stage %s: %s" % (cntr, tree_node.name)) load_node_solution(tree_node, node_solution) else: break print("Loaded scenario tree solution for %s time stages " "from file %s" % (cntr, self.get_option('input_name'))) return True print("No value was set for %s option 'input_name'. " "Nothing will be saved." % (type(self).__name__)) return False
class JSONSolutionSaverExtension(PySPConfiguredExtension, PySPConfiguredObject, SingletonPlugin): implements(IPySPSolutionSaverExtension) @classmethod def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "output_name") safe_declare_common_option(options, "save_stages") return options _default_options_prefix = "jsonsaver_" # # Note: Do not try to user super() or access the # class name inside the __init__ method when # a class derives from a SingletonPlugin. Due to # how Pyutilib implements its Singleton type, # the __class__ cell will be empty. # (See: https://stackoverflow.com/questions/ # 13126727/how-is-super-in-python-3-implemented) # def __init__(self): PySPConfiguredExtension.__init__(self) def save(self, manager): if self.get_option("output_name") is not None: stage_solutions = [] # Do NOT open file in 'binary' mode when dumping JSON # (produces an error in Python3) with open(self.get_option('output_name'), 'w') as f: cntr = 0 for stage in manager.scenario_tree.stages: if (self.get_option('save_stages') <= 0) or \ (cntr+1 <= self.get_option('save_stages')): cntr += 1 node_solutions = {} for tree_node in stage.nodes: _node_solution = extract_node_solution(tree_node) if _node_solution is None: print( "No solution appears to be stored in node with " "name %s. No solution will be saved." % (tree_node.name)) return False node_solutions[tree_node.name] = _node_solution stage_solutions.append(node_solutions) else: break json.dump(stage_solutions, f, indent=2, sort_keys=True) print("Saved scenario tree solution for %s time stages " "to file %s" % (cntr, self.get_option('output_name'))) return True print("No value was set for %s option 'output_name'. " "Nothing will be saved." % (type(self).__name__)) return False