def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "verbose", ap_group=_rho_group_label) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "extension_precedence") return options
class JSONSolutionSaverExtension(PySPConfiguredExtension, PySPConfiguredObject, SingletonPlugin): implements(IPySPSolutionSaverExtension) _declared_options = \ PySPConfigBlock("Options declared for the " "JSONSolutionSaverExtension class") safe_declare_common_option(_declared_options, "output_name") safe_declare_common_option(_declared_options, "save_stages") _default_prefix = "jsonsaver_" # # Note: Do not try to user super() or access the # class name inside the __init__ method when # a class derives from a SingletonPlugin. Due to # how Pyutilib implements its Singleton type, # the __class__ cell will be empty. # (See: https://stackoverflow.com/questions/ # 13126727/how-is-super-in-python-3-implemented) # def __init__(self): PySPConfiguredExtension.__init__(self) def save(self, manager): if self.get_option("output_name") is not None: stage_solutions = [] # Do NOT open file in 'binary' mode when dumping JSON # (produces an error in Python3) with open(self.get_option('output_name'), 'w') as f: cntr = 0 for stage in manager.scenario_tree.stages: if (self.get_option('save_stages') <= 0) or \ (cntr+1 <= self.get_option('save_stages')): cntr += 1 node_solutions = {} for tree_node in stage.nodes: _node_solution = extract_node_solution(tree_node) if _node_solution is None: print( "No solution appears to be stored in node with " "name %s. No solution will be saved." % (tree_node.name)) return False node_solutions[tree_node.name] = _node_solution stage_solutions.append(node_solutions) else: break json.dump(stage_solutions, f, indent=2, sort_keys=True) print("Saved scenario tree solution for %s time stages " "to file %s" % (cntr, self.get_option('output_name'))) return True print("No value was set for %s option 'output_name'. " "Nothing will be saved." % (type(self).__name__)) return False
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "input_name") safe_declare_common_option(options, "load_stages") return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "verbose", ap_group=_admm_group_label) ScenarioTreeManagerSolverFactory.register_options( options, options_prefix="subproblem_", setup_argparse=False) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "max_iterations", ap_group=_admm_group_label) safe_declare_unique_option( options, "primal_residual_relative_tolerance", PySPConfigValue( 1.0e-4, domain=_domain_positive, description=( "Relative primal-residual tolerance. Default is 1e-4." ), doc=None, visibility=0), ap_group=_admm_group_label) safe_declare_unique_option( options, "dual_residual_relative_tolerance", PySPConfigValue( 1.0e-4, domain=_domain_positive, description=( "Relative dual-residual tolerance. Default is 1e-4." ), doc=None, visibility=0), ap_group=_admm_group_label) ADMMAlgorithm._declare_options(options) for rstype in RhoStrategyFactory.registered_types.values(): rstype._declare_options(options) assert 'adaptive' in RhoStrategyFactory.registered_types safe_declare_unique_option( options, "rho_strategy", PySPConfigValue( 'adaptive', domain=_domain_must_be_str, description=( "Rho update strategy. Choices are: %s. Default is 'adaptive'." % (str(sorted(RhoStrategyFactory.registered_types.keys()))) ), doc=None, visibility=0), ap_group=_admm_group_label) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "preprocess_fixed_variables") safe_declare_common_option(options, "symbolic_solver_labels") safe_declare_common_option(options, "output_times") safe_declare_common_option(options, "verbose") return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() # # scenario instance construction # safe_declare_common_option(options, "objective_sense_stage_based") safe_declare_common_option(options, "output_instance_construction_time") safe_declare_common_option(options, "compile_scenario_instances") # # various # safe_declare_common_option(options, "verbose") safe_declare_common_option(options, "profile_memory") return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_unique_option( options, "firststage_suffix", PySPConfigValue( "__DDSIP_FIRSTSTAGE", domain=_domain_must_be_str, description=( "The suffix used to identity first-stage " "variables to DDSIP. Default is " "'__DDSIP_FIRSTSTAGE'" ), doc=None, visibility=0), ap_group=_ddsip_group_label) safe_declare_unique_option( options, "config_file", PySPConfigValue( None, domain=_domain_must_be_str, description=( "The name of a partial DDSIP configuration file " "that contains option specifications unrelated to " "the problem structure. If specified, the contents " "of this file will be appended to the " "configuration created by this solver interface. " "Default is None." ), doc=None, visibility=0), ap_group=_ddsip_group_label) safe_declare_common_option(options, "verbose", ap_group=_ddsip_group_label) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_unique_option( options, "firststage_suffix", PySPConfigValue( "__DDSIP_FIRSTSTAGE", domain=_domain_must_be_str, description=("The suffix used to identity first-stage " "variables to DDSIP. Default is " "'__DDSIP_FIRSTSTAGE'"), doc=None, visibility=0), ap_group=_ddsip_group_label) safe_declare_unique_option( options, "config_file", PySPConfigValue( None, domain=_domain_must_be_str, description=( "The name of a partial DDSIP configuration file " "that contains option specifications unrelated to " "the problem structure. If specified, the contents " "of this file will be appended to the " "configuration created by this solver interface. " "Default is None."), doc=None, visibility=0), ap_group=_ddsip_group_label) safe_declare_common_option(options, "verbose", ap_group=_ddsip_group_label) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() # options for controlling the solver manager # (not the scenario tree manager) safe_declare_common_option(options, "solver_manager_pyro_host") safe_declare_common_option(options, "solver_manager_pyro_port") safe_declare_common_option(options, "solver_manager_pyro_shutdown") ScenarioTreePreprocessor._declare_options(options) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_unique_option( options, "cvar_weight", PySPConfigValue( 1.0, domain=_domain_nonnegative, description=("The weight associated with the CVaR term in " "the risk-weighted objective " "formulation. If the weight is 0, then " "*only* a non-weighted CVaR cost will appear " "in the EF objective - the expected cost " "component will be dropped. Default is 1.0."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "generate_weighted_cvar", PySPConfigValue( False, domain=bool, description=("Add a weighted CVaR term to the " "primary objective. Default is False."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "risk_alpha", PySPConfigValue( 0.95, domain=_domain_unit_interval, description=("The probability threshold associated with " "CVaR (or any future) risk-oriented " "performance metrics. Default is 0.95."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "cc_alpha", PySPConfigValue( 0.0, domain=_domain_unit_interval, description=("The probability threshold associated with a " "chance constraint. The RHS will be one " "minus this value. Default is 0."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "cc_indicator_var", PySPConfigValue( None, domain=_domain_must_be_str, description=("The name of the binary variable to be used " "to construct a chance constraint. Default " "is None, which indicates no chance " "constraint."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_common_option(options, "solver") safe_declare_common_option(options, "solver_io") safe_declare_common_option(options, "solver_manager") safe_declare_common_option(options, "solver_options") safe_declare_common_option(options, "disable_warmstart") safe_declare_common_option(options, "solver_manager_pyro_host") safe_declare_common_option(options, "solver_manager_pyro_port") safe_declare_common_option(options, "solver_manager_pyro_shutdown") safe_declare_common_option(options, "verbose", ap_group=_ef_group_label) safe_declare_common_option(options, "output_times", ap_group=_ef_group_label) safe_declare_common_option(options, "output_solver_results", ap_group=_ef_group_label) return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_common_option(options, "verbose") return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() # # solve and I/O related # safe_declare_common_option(options, "symbolic_solver_labels") safe_declare_common_option(options, "solver_options") safe_declare_common_option(options, "solver") safe_declare_common_option(options, "solver_io") safe_declare_common_option(options, "solver_manager") safe_declare_common_option(options, "disable_warmstart") safe_declare_common_option(options, "disable_advanced_preprocessing") safe_declare_common_option(options, "output_solver_log") safe_declare_common_option(options, "keep_solver_files") safe_declare_common_option(options, "comparison_tolerance_for_fixed_variables") return options
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_unique_option( options, "stopping_rule_tolerance", PySPConfigValue( "nominal", domain=_domain_sd_tolerance, description=("Stopping rule tolerance used by the SD solver. " "Must be one of: %s. Default is 'nominal'." % (str(_domain_sd_tolerance._values))), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "single_replication", PySPConfigValue( False, domain=bool, description=("Disables multiple replication procedure in " "SD and uses a single replication."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "print_cycle", PySPConfigValue( 100, domain=_domain_positive_integer, description=("Number of iterations between output of " "solution data to screen and file."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "eval_run_flag", PySPConfigValue( False, domain=bool, description=( "Set to evaluate on the run. This should be " "only used for instances with relatively complete " "recourse. This flag is not recommended because " "accurate function evaluations are unnecessarily " "time consuming. It is best to use a large print " "cycle when this option is activated."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "eval_flag", PySPConfigValue( False, domain=bool, description=( "Set to get an estimated objective function value " "for the final incumbent of each replication."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "eval_seed1", PySPConfigValue( 2668655841019641, domain=int, description=( "Random number seed for re-sampling omegas during " "optimality test. Default is None, meaning no " "seed will be provided."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "eval_error", PySPConfigValue( 0.01, domain=_domain_positive, description=( "Objective evaluation is accurate to within " "this much, with 95%% confidence. Default is 0.01."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "mean_dev", PySPConfigValue( 0.05, domain=_domain_positive, description=("Solution tolerance for deciding the usage of " "mean solution. Default is 0.05."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "min_iterations", PySPConfigValue( None, domain=_domain_nonnegative_integer, description=("Number of iterations which must pass before " "optimality is checked. Default is None, meaning " "no minimum is given."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_unique_option( options, "max_iterations", PySPConfigValue( 5000, domain=_domain_positive_integer, description=("Maximum number of iterations for any given " "problem. Default is 5000."), doc=None, visibility=0), ap_group=_sd_group_label) safe_declare_common_option(options, "verbose", ap_group=_sd_group_label) return options
class ScenarioTreePreprocessor(PySPConfiguredObject): _declared_options = \ PySPConfigBlock("Options declared for the " "ScenarioTreePreprocessor class") safe_declare_common_option(_declared_options, "disable_advanced_preprocessing") safe_declare_common_option(_declared_options, "preprocess_fixed_variables") # # various # safe_declare_common_option(_declared_options, "output_times") safe_declare_common_option(_declared_options, "verbose") def __init__(self, *args, **kwds): super(ScenarioTreePreprocessor, self).__init__(*args, **kwds) self._scenario_solver = {} self._scenario_instance = {} self._scenario_objective = {} # # Bundle related objects # self._bundle_instances = {} self._bundle_solvers = {} self._bundle_scenarios = {} self._scenario_to_bundle_map = {} self._bundle_first_preprocess = {} # maps between instance name and a list of variable (name, # index) pairs self.fixed_variables = {} self.freed_variables = {} # indicates update status of instances since the last # preprocessing round self.objective_updated = {} self.all_constraints_updated = {} self.constraints_updated_list = {} def add_scenario(self, scenario, scenario_instance, scenario_solver): assert scenario._name not in self._scenario_instance assert scenario._name not in self._scenario_to_bundle_map self._scenario_instance[scenario._name] = scenario_instance self._scenario_solver[scenario._name] = scenario_solver self._scenario_objective[scenario._name] = scenario._instance_objective self.fixed_variables[scenario._name] = [] self.freed_variables[scenario._name] = [] self.objective_updated[scenario._name] = True self.all_constraints_updated[scenario._name] = True self.constraints_updated_list[scenario._name] = [] self.objective_updated[scenario._name] = True self.all_constraints_updated[scenario._name] = True if not self._options.disable_advanced_preprocessing: scenario_instance = self._scenario_instance[scenario._name] for block in scenario_instance.block_data_objects(active=True): block._gen_obj_ampl_repn = False block._gen_con_ampl_repn = False block._gen_obj_canonical_repn = False block._gen_con_canonical_repn = False def remove_scenario(self, scenario): assert scenario._name in self._scenario_instance assert scenario._name not in self._scenario_to_bundle_map if self._options.disable_advanced_preprocessing: scenario_instance = self._scenario_instance[scenario_name] for block in scenario_instance.block_data_objects(active=True): block._gen_obj_ampl_repn = False block._gen_con_ampl_repn = False block._gen_obj_canonical_repn = False block._gen_con_canonical_repn = False del self._scenario_instance[scenario._name] del self._scenario_solver[scenario._name] del self.fixed_variables[scenario._name] del self.freed_variables[scenario._name] del self.objective_updated[scenario._name] del self.all_constraints_updated[scenario._name] del self.constraints_updated_list[scenario._name] del self.objective_updated[scenario._name] del self.all_constraints_updated[scenario._name] def add_bundle(self, bundle, bundle_instance, bundle_solver): assert bundle._name not in self._bundle_instances self._bundle_instances[bundle._name] = bundle_instance self._bundle_solvers[bundle._name] = bundle_solver self._bundle_scenarios[bundle._name] = list(bundle._scenario_names) self._bundle_first_preprocess[bundle._name] = True for scenario_name in self._bundle_scenarios[bundle._name]: assert scenario_name in self._scenario_instance assert scenario_name not in self._scenario_to_bundle_map self._scenario_to_bundle_map[scenario_name] = bundle._name def remove_bundle(self, bundle): assert bundle._name in self._bundle_instances for scenario_name in self._bundle_scenarios[bundle._name]: assert scenario_name in self._scenario_instance assert scenario_name in self._scenario_to_bundle_map self._scenario_to_bundle_map[scenario_name] = bundle._name del self._bundle_instances[bundle._name] del self._bundle_solvers[bundle._name] del self._bundle_scenarios[bundle._name] del self._bundle_first_preprocess[bundle._name] def clear_update_flags(self, name=None): if name is not None: self.objective_updated[name] = False self.all_constraints_updated[name] = False self.constraints_updated_list[name] = [] else: for key in self.instances: self.objective_updated[key] = False self.all_constraints_updated[key] = False self.constraints_updated_list[key] = [] def has_fixed_variables(self, name=None): if name is None: for val in itervalues(self.fixed_variables): if len(val) > 0: return True return False else: return len(self.fixed_variables[name]) > 0 def has_freed_variables(self, name=None): if name is None: for val in itervalues(self.freed_variables): if len(val) > 0: return True return False else: return len(self.freed_variables[name]) > 0 def clear_fixed_variables(self, name=None): if name is None: for key in self.fixed_variables: self.fixed_variables[key] = [] else: if name in self.fixed_variables: self.fixed_variables[name] = [] else: raise KeyError("KeyError: %s" % name) def clear_freed_variables(self, name=None): if name is None: for key in self.freed_variables: self.freed_variables[key] = [] else: if name in self.freed_variables: self.freed_variables[name] = [] else: raise KeyError("KeyError: %s" % name) # # Preprocess scenarios (ignoring bundles even if they exists) # def preprocess_scenarios(self, scenarios=None): start_time = time.time() if scenarios is None: scenarios = self._scenario_instance.keys() if self._options.verbose: print("Preprocessing %s scenarios" % len(scenarios)) if self._options.verbose: if len(self._bundle_instances) > 0: print("Preprocessing scenarios without bundles. Bundle " "preprocessing dependencies will be lost. Scenario " "preprocessing flags must be reset before preprocessing " "bundles.") for scenario_name in scenarios: self._preprocess_scenario(scenario_name, self._scenario_solver[scenario_name]) # We've preprocessed the instance, reset the relevant flags self.clear_update_flags(scenario_name) self.clear_fixed_variables(scenario_name) self.clear_freed_variables(scenario_name) end_time = time.time() if self._options.output_times: print("Scenario preprocessing time=%.2f seconds" % (end_time - start_time)) # # Preprocess bundles (and the scenarios they depend on) # def preprocess_bundles(self, bundles=None, force_preprocess_bundle_objective=False, force_preprocess_bundle_constraints=False): start_time = time.time() if len(self._bundle_instances) == 0: raise RuntimeError( "Unable to preprocess scenario bundles. Bundling " "does not seem to be activated.") if bundles is None: bundles = self._bundle_instances.keys() if self._options.verbose: print("Preprocessing %s bundles" % len(bundles)) preprocess_bundle_objective = 0b01 preprocess_bundle_constraints = 0b10 for bundle_name in bundles: preprocess_bundle = 0 solver = self._bundle_solvers[bundle_name] for scenario_name in self._bundle_scenarios[bundle_name]: if self.objective_updated[scenario_name]: preprocess_bundle |= preprocess_bundle_objective if ((len(self.fixed_variables[scenario_name]) > 0) or \ (len(self.freed_variables[scenario_name]) > 0)) and \ self._options.preprocess_fixed_variables: preprocess_bundle |= \ preprocess_bundle_objective | \ preprocess_bundle_constraints if self._bundle_first_preprocess[bundle_name]: preprocess_bundle |= \ preprocess_bundle_objective | \ preprocess_bundle_constraints self._bundle_first_preprocess[bundle_name] = False self._preprocess_scenario(scenario_name, solver) # We've preprocessed the instance, reset the relevant flags self.clear_update_flags(scenario_name) self.clear_fixed_variables(scenario_name) self.clear_freed_variables(scenario_name) if force_preprocess_bundle_objective: preprocess_bundle |= preprocess_bundle_objective if force_preprocess_bundle_constraints: preprocess_bundle |= preprocess_bundle_constraints if preprocess_bundle: bundle_ef_instance = \ self._bundle_instances[bundle_name] if solver.problem_format == ProblemFormat.nl: idMap = {} if preprocess_bundle & preprocess_bundle_objective: ampl_preprocess_block_objectives(bundle_ef_instance, idMap=idMap) if preprocess_bundle & preprocess_bundle_constraints: ampl_preprocess_block_constraints(bundle_ef_instance, idMap=idMap) else: idMap = {} if preprocess_bundle & preprocess_bundle_objective: canonical_preprocess_block_objectives( bundle_ef_instance, idMap=idMap) if preprocess_bundle & preprocess_bundle_constraints: canonical_preprocess_block_constraints( bundle_ef_instance, idMap=idMap) end_time = time.time() if self._options.output_times: print("Bundle preprocessing time=%.2f seconds" % (end_time - start_time)) def _preprocess_scenario(self, scenario_name, solver): assert scenario_name in self._scenario_instance scenario_objective_active = self._scenario_objective[ scenario_name].active # because the preprocessor will skip the scenario objective if it is # part of a bundle and not active self._scenario_objective[scenario_name].activate() def _cleanup(): if not scenario_objective_active: self._scenario_objective[scenario_name].deactivate() scenario_instance = self._scenario_instance[scenario_name] instance_fixed_variables = self.fixed_variables[scenario_name] instance_freed_variables = self.freed_variables[scenario_name] instance_all_constraints_updated = \ self.all_constraints_updated[scenario_name] instance_constraints_updated_list = \ self.constraints_updated_list[scenario_name] instance_objective_updated = self.objective_updated[scenario_name] persistent_solver_in_use = isinstance(solver, PersistentSolver) if (not instance_objective_updated) and \ (not instance_fixed_variables) and \ (not instance_freed_variables) and \ (not instance_all_constraints_updated) and \ (len(instance_constraints_updated_list) == 0): # instances are already preproccessed, nothing # needs to be done if self._options.verbose: print("No preprocessing necessary for scenario %s" % (scenario_name)) _cleanup() return if (instance_fixed_variables or instance_freed_variables) and \ (self._options.preprocess_fixed_variables): if self._options.verbose: print("Running full preprocessing for scenario %s" % (scenario_name)) if solver.problem_format() == ProblemFormat.nl: ampl_expression_preprocessor({}, model=scenario_instance) else: canonical_expression_preprocessor({}, model=scenario_instance) # We've preprocessed the entire instance, no point in checking # anything else _cleanup() return if instance_objective_updated: if self._options.verbose: print("Preprocessing objective for scenario %s" % (scenario_name)) # if only the objective changed, there is minimal work to do. if solver.problem_format() == ProblemFormat.nl: ampl_preprocess_block_objectives(scenario_instance) else: canonical_preprocess_block_objectives(scenario_instance) if persistent_solver_in_use and \ solver.instance_compiled(): solver.compile_objective(scenario_instance) if (instance_fixed_variables or instance_freed_variables) and \ (persistent_solver_in_use): if self._options.verbose: print("Compiling fixed status updates in persistent solver " "for scenario %s" % (scenario_name)) # it can be the case that the solver plugin no longer has an # instance compiled, depending on what state the solver plugin # is in relative to the instance. if this is the case, just # don't compile the variable bounds. if solver.instance_compiled(): variables_to_change = \ instance_fixed_variables + instance_freed_variables solver.compile_variable_bounds( scenario_instance, vars_to_update=variables_to_change) if instance_all_constraints_updated: if self._options.verbose: print("Preprocessing all constraints for scenario %s" % (scenario_name)) if solver.problem_format() == ProblemFormat.nl: idMap = {} for block in scenario_instance.block_data_objects( active=True, descend_into=True): ampl_preprocess_block_constraints(block, idMap=idMap) else: idMap = {} for block in scenario_instance.block_data_objects( active=True, descend_into=True): canonical_preprocess_block_constraints(block, idMap=idMap) elif len(instance_constraints_updated_list) > 0: if self._options.verbose: print("Preprocessing constraint list (size=%s) for " "scenario %s" % (len(instance_constraints_updated_list), scenario_name)) idMap = {} repn_name = None repn_func = None if solver.problem_format() == ProblemFormat.nl: repn_name = "_ampl_repn" repn_func = generate_ampl_repn else: repn_name = "_canonical_repn" repn_func = generate_canonical_repn for constraint_data in instance_constraints_updated_list: if isinstance(constraint_data, LinearCanonicalRepn): continue block = constraint_data.parent_block() # Get/Create the ComponentMap for the repn storage if not hasattr(block, repn_name): setattr(block, repn_name, ComponentMap()) getattr(block, repn_name)[constraint_data] = \ repn_func(constraint_data.body, idMap=idMap) _cleanup() def get_solver_keywords(self): kwds = {} if not self._options.disable_advanced_preprocessing: if not self._options.preprocess_fixed_variables: kwds['output_fixed_variable_bounds'] = True return kwds
class PySPConfiguredExtension(PySPConfiguredObject): _declared_options = \ PySPConfigBlock("Options declared for the " "PySPConfiguredExtension class") safe_declare_common_option(_declared_options, "extension_precedence") _default_prefix = None def __init__(self): PySPConfiguredObject.__init__(self, None, prefix=self.extension_prefix()) if not isinstance(self, SingletonPlugin): raise TypeError("PySPConfiguredExtension implementations must " "inherit from SingletonPlugin") @classmethod def extension_prefix(cls): bases = inspect.getmro(cls) assert bases[-1] is object for base in bases: if getattr(base, '_default_prefix', None) is not None: return base._default_prefix return cls.__name__.lower() + "_" @classmethod def register_options(cls, *args, **kwds): """Cls.register_options([options]) -> options. Fills an options block will all registered options for this class. The optional argument 'options' can be a previously existing options block, which would be both updated and returned by this function. The optional flag 'prefix' can be set to indicate that all class options should be registered with the given prefix prepended to their original name.""" if 'prefix' not in kwds: kwds['prefix'] = cls.extension_prefix() return super(PySPConfiguredExtension, cls).\ register_options(*args, **kwds) @classmethod def extract_options(cls, options, prefix=None, srcprefix=None, error_if_missing=True): """Copy the set of registered options for this class from an existing options block and return a new options block with only those values. This method will preserve the _userSet status of all options. The optional flag 'prefix' can be set to indicate that all registered class options in the returned options object will have a name predended with the given prefix. The optional flag 'srcprefix' can be set to indicate that all registered class options on the input options object have a named prepended with the given prefix. The optional flag 'error_if_missing' controls whether or not an exception is raised when registered options are missing from the input options object.""" if prefix is None: prefix = cls.extension_prefix() if srcprefix is None: srcprefix = cls.extension_prefix() return super(PySPConfiguredExtension, cls).extract_options(options, prefix=prefix, srcprefix=srcprefix, error_if_missing=error_if_missing) @classmethod def extract_user_options_to_dict(cls, options, prefix=None, srcprefix=None, error_if_missing=True, sparse=False): """Copy the set of registered options for this class from an existing options block and return a dictionary of options (name -> value) with those values. This method will preserve the _userSet status of all options. The optional flag 'prefix' can be set to indicate that all registered class options will have a name predended with the given prefix in the output dictionary. The optional flag 'srcprefix' can be set to indicate that all registered class options on the input options object have a named prepended with the given prefix. The optional flag 'error_if_missing' controls whether or not an exception is raised when registered options are missing from the input options object. The optional flag 'sparse' controls whether non user-set values should be included in the returned dictionary.""" if prefix is None: prefix = cls.extension_prefix() if srcprefix is None: srcprefix = cls.extension_prefix() return super(PySPConfiguredExtension, cls).\ extract_user_options_to_dict( options, prefix=prefix, srcprefix=srcprefix, error_if_missing=error_if_missing, sparse=sparse) @classmethod def update_options_from_argparse(cls, options, ap_data, prefix=None, srcprefix=None, skip_userset=False, error_if_missing=True): """Update the input options object by extracting all registered options for this class from an argparse Namespace object. This method cannot determine if the values on the argparse Namespace object were set explicity or are defaults. Therefore, the _userSet status will be updated on all options that are found. The method only compares the names against the attributes found on the argparse Namespace object. No other form of validation is performed. The optional flag 'prefix' can be set to indicate that all registered class options will have a name predended with the given prefix on the updated options object. The optional flag 'srcprefix' can be set to indicate that registered class option names should be prepended with the the given prefix when searching for items on the argparse Namespace object. The optional flag 'skip_userset' can be set to indicate that options with the _userSet flag already set to True (on the options object being updated) should be skipped when loading options from the argparse Namespace object. The optional flag 'error_if_missing' controls whether or not an exception is raised when registered option names are missing from the argparse Namespace object.""" if prefix is None: prefix = cls.extension_prefix() if srcprefix is None: srcprefix = cls.extension_prefix() return super(PySPConfiguredExtension, cls).\ update_options_from_argparse( options, ap_data, prefix=prefix, srcprefix=srcprefix, skip_userset=skip_userset, error_if_missing=error_if_missing) @classmethod def validate_options(cls, options, prefix=None, error_if_missing=True): """Validate that all registered options can be found in the options block and that their option definitions are the same. The optional flag 'prefix' can be set to indicate that all registered class options will have a name predended with the given prefix. The optional flag 'error_if_missing' can be used to control whether or not an exception is raised when registered options are missing.""" if prefix is None: prefix = cls.extension_prefix() return super(PySPConfiguredExtension, cls).validate_options(options, prefix=prefix, error_if_missing=error_if_missing)
class ScenarioTreeManagerWorkerPyro(_ScenarioTreeManagerWorker, ScenarioTreeManager, PySPConfiguredObject): _declared_options = \ PySPConfigBlock("Options declared for the " "ScenarioTreeManagerWorkerPyro class") # # scenario instance construction # safe_declare_common_option(_declared_options, "objective_sense_stage_based") safe_declare_common_option(_declared_options, "output_instance_construction_time") safe_declare_common_option(_declared_options, "compile_scenario_instances") # # various # safe_declare_common_option(_declared_options, "verbose") safe_declare_common_option(_declared_options, "profile_memory") def __init__(self, *args, **kwds): super(ScenarioTreeManagerWorkerPyro, self).__init__(*args, **kwds) self._modules_imported = None # The name of the scenario tree server owning this worker self._server_name = None # So we have access to real scenario and bundle probabilities self._full_scenario_tree = None self._worker_name = None # # Abstract methods for ScenarioTreeManager: # def _init(self, server_name, full_scenario_tree, worker_name, worker_init, modules_imported): # check to make sure no base class has implemented _init try: super(ScenarioTreeManagerWorkerPyro, self)._init() except NotImplementedError: pass else: assert False, "developer error" self._modules_imported = modules_imported # The name of the scenario tree server owning this worker self._server_name = server_name # So we have access to real scenario and bundle probabilities self._full_scenario_tree = full_scenario_tree self._worker_name = worker_name scenarios_to_construct = [] if worker_init.type_ == WorkerInitType.Scenarios: assert type(worker_init.names) in (list, tuple) assert len(worker_init.names) > 0 assert worker_init.data is None if self._options.verbose: print("Constructing worker with name %s for scenarios: %s" % (worker_name, str(worker_init.names))) scenarios_to_construct.extend(worker_init.names) elif worker_init.type_ == WorkerInitType.Bundles: assert type(worker_init.names) in (list, tuple) assert type(worker_init.data) is dict assert len(worker_init.names) > 0 if self._options.verbose: print("Constructing worker with name %s for bundle list:" % (worker_name)) for bundle_name in worker_init.names: assert type(worker_init.data[bundle_name]) in (list, tuple) print(" - %s: %s" % (bundle_name, worker_init.data[bundle_name])) for bundle_name in worker_init.names: assert type(worker_init.data[bundle_name]) in (list, tuple) scenarios_to_construct.extend(worker_init.data[bundle_name]) # compress the scenario tree to reflect those instances for # which this ph solver server is responsible for constructing. self._scenario_tree = \ self._full_scenario_tree.make_compressed(scenarios_to_construct, normalize=False) self._instances = \ self._full_scenario_tree._scenario_instance_factory.\ construct_instances_for_scenario_tree( self._scenario_tree, output_instance_construction_time=\ self._options.output_instance_construction_time, profile_memory=self._options.profile_memory, compile_scenario_instances=self._options.compile_scenario_instances) # with the scenario instances now available, have the scenario # tree compute the variable match indices at each node. self._scenario_tree.linkInInstances( self._instances, objective_sense=self._options.objective_sense_stage_based, create_variable_ids=True) self._objective_sense = \ self._scenario_tree._scenarios[0]._objective_sense assert all(_s._objective_sense == self._objective_sense for _s in self._scenario_tree._scenarios) # # Create bundle if needed # if worker_init.type_ == WorkerInitType.Bundles: for bundle_name in worker_init.names: assert not self._scenario_tree.contains_bundle(bundle_name) self.add_bundle(bundle_name, worker_init.data[bundle_name]) assert self._scenario_tree.contains_bundle(bundle_name) # Override the implementation on _ScenarioTreeManagerWorker def _close_impl(self): super(ScenarioTreeManagerWorkerPyro, self)._close_impl() ignored_options = dict((_c._name, _c.value(False)) for _c in self._options.unused_user_values()) if len(ignored_options): print("") print("*** WARNING: The following options were explicitly " "set but never accessed by worker %s: " % (self._worker_name)) for name in ignored_options: print(" - %s: %s" % (name, ignored_options[name])) print("*** If you believe this is a bug, please report it " "to the PySP developers.") print("") def _invoke_function_impl(self, function_name, module_name, invocation_type=InvocationType.Single, function_args=(), function_kwds=None): start_time = time.time() if self._options.verbose: print("Received request to invoke external function" "=" + function_name + " in module=" + module_name) # pyutilib.Enum can not be serialized depending on the # serializer type used by Pyro, so we just transmit it # as a (key, data) tuple in that case if type(invocation_type) is tuple: _invocation_type_key, _invocation_type_data = invocation_type assert isinstance(_invocation_type_key, string_types) invocation_type = getattr(InvocationType, _invocation_type_key) if _invocation_type_data is not None: assert isinstance(invocation_type, _EnumValueWithData) invocation_type = invocation_type(_invocation_type_data) result = self._invoke_function_by_worker( function_name, module_name, invocation_type=invocation_type, function_args=function_args, function_kwds=function_kwds) end_time = time.time() if self._options.output_times or \ self._options.verbose: print("External function invocation time=%.2f seconds" % (end_time - start_time)) return result def _invoke_method_impl(self, method_name, method_args=(), method_kwds=None): start_time = time.time() if self._options.verbose: print("Received request to invoke method=" + method_name) if method_kwds is None: method_kwds = {} result = getattr(self, method_name)(*method_args, **method_kwds) end_time = time.time() if self._options.output_times or \ self._options.verbose: print("Method invocation time=%.2f seconds" % (end_time - start_time)) return result # implemented by _ScenarioTreeManagerWorker #def _add_bundle_impl(...) # implemented by _ScenarioTreeManagerWorker #def _remove_bundle_impl(...) # # Override the invoke_function and invoke_method interface methods # on ScenarioTreeManager # ** NOTE **: These version are meant to be invoked locally. # The client-side will always invoke the *_impl # methods, which do not accept the async or # oneway keywords. When invoked here, the # async and oneway keywords behave like they # do for the Serial solver manager (they are # a dummy interface) # def invoke_function(self, function_name, module_name, invocation_type=InvocationType.Single, function_args=(), function_kwds=None, async=False, oneway=False):
class ExtensiveFormAlgorithm(PySPConfiguredObject): _declared_options = \ PySPConfigBlock("Options declared for the " "ExtensiveFormAlgorithm class") safe_declare_unique_option( _declared_options, "cvar_weight", PySPConfigValue( 1.0, domain=_domain_nonnegative, description=("The weight associated with the CVaR term in " "the risk-weighted objective " "formulation. If the weight is 0, then " "*only* a non-weighted CVaR cost will appear " "in the EF objective - the expected cost " "component will be dropped. Default is 1.0."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "generate_weighted_cvar", PySPConfigValue(False, domain=bool, description=("Add a weighted CVaR term to the " "primary objective. Default is False."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "risk_alpha", PySPConfigValue( 0.95, domain=_domain_unit_interval, description=("The probability threshold associated with " "CVaR (or any future) risk-oriented " "performance metrics. Default is 0.95."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "cc_alpha", PySPConfigValue( 0.0, domain=_domain_unit_interval, description=("The probability threshold associated with a " "chance constraint. The RHS will be one " "minus this value. Default is 0."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "cc_indicator_var", PySPConfigValue( None, domain=_domain_must_be_str, description=("The name of the binary variable to be used " "to construct a chance constraint. Default " "is None, which indicates no chance " "constraint."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "mipgap", PySPConfigValue(None, domain=_domain_unit_interval, description=("Specifies the mipgap for the EF solve."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "solver", PySPConfigValue( "cplex", domain=_domain_must_be_str, description=("Specifies the solver used to solve the " "extensive form model. Default is cplex."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "solver_io", PySPConfigValue( None, domain=_domain_must_be_str, description=("The type of IO used to execute the " "solver. Different solvers support different " "types of IO, but the following are common " "options: lp - generate LP files, nl - " "generate NL files, python - direct Python " "interface, os - generate OSiL XML files."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "solver_manager", PySPConfigValue('serial', domain=_domain_must_be_str, description=("The type of solver manager used to " "coordinate solves. Default is serial."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "solver_options", PySPConfigValue((), domain=_domain_tuple_of_str_or_dict, description=("Persistent solver options used when " "solving the extensive form model. This " "option can be used multiple times from " "the command line to specify more than " "one solver option."), doc=None, visibility=0), ap_kwds={'action': 'append'}, ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "disable_warmstart", PySPConfigValue(False, domain=bool, description=("Disable warm-start of EF solves. " "Default is False."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "pyro_host", PySPConfigValue(None, domain=_domain_must_be_str, description=("The hostname to bind on when searching " "for a Pyro nameserver."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "pyro_port", PySPConfigValue(None, domain=_domain_nonnegative_integer, description=("The port to bind on when searching for " "a Pyro nameserver."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "pyro_shutdown", PySPConfigValue( False, domain=bool, description=( "Attempt to shut down all Pyro-related components " "associated with the Pyro name server used by any scenario " "tree manager or solver manager. Components to shutdown " "include the name server, dispatch server, and any " "scenariotreeserver or pyro_mip_server processes. Note " "that if Pyro4 is in use the nameserver will always " "ignore this request."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( _declared_options, "pyro_shutdown_workers", PySPConfigValue( False, domain=bool, description=( "Upon exit, send shutdown requests to all worker " "processes that were acquired through the dispatcher. " "This typically includes scenariotreeserver processes " "(used by the Pyro scenario tree manager) and pyro_mip_server " "processes (used by the Pyro solver manager). This leaves " "any dispatchers and namservers running as well as any " "processes registered with the dispather that were not " "acquired for work by this client."), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "symbolic_solver_labels", ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "output_solver_log", ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "verbose", ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "output_times", ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "keep_solver_files", ap_group=_ef_group_label) safe_declare_common_option(_declared_options, "output_solver_results", ap_group=_ef_group_label) def __enter__(self): return self def __exit__(self, *args): self.close() def close(self): self.destroy_ef() if self._solver is not None: self._solver.deactivate() if self._solver_manager is not None: if isinstance(self._solver_manager, pyomo.solvers.plugins.smanager.\ pyro.SolverManager_Pyro): if self.get_option("pyro_shutdown_workers"): self._solver_manager.shutdown_workers() self._solver_manager.deactivate() self._solver_manager = None self._manager = None self.objective = undefined self.objective_sense = undefined self.gap = undefined self.termination_condition = undefined self.solver_status = undefined self.solution_status = undefined self.solver_results = undefined self.pyomo_solve_time = undefined self.solve_time = undefined def __init__(self, manager, *args, **kwds): import pyomo.solvers.plugins.smanager.pyro super(ExtensiveFormAlgorithm, self).__init__(*args, **kwds) # TODO: after PH moves over to the new code #if not isinstance(manager, ScenarioTreeManager): # raise TypeError("ExtensiveFormAlgorithm requires an instance of the " # "ScenarioTreeManager interface as the " # "second argument") if not manager.initialized: raise ValueError("ExtensiveFormAlgorithm requires a scenario tree " "manager that has been fully initialized") self._manager = manager self.instance = None self._solver_manager = None self._solver = None # The following attributes will be modified by the # solve() method. For users that are scripting, these # can be accessed after the solve() method returns. # They will be reset each time solve() is called. ############################################ self.objective = undefined self.gap = undefined self.termination_condition = undefined self.solver_status = undefined self.solution_status = undefined self.solver_results = undefined self.pyomo_solve_time = undefined self.solve_time = undefined ############################################ self._solver = SolverFactory(self.get_option("solver"), solver_io=self.get_option("solver_io")) if isinstance(self._solver, UnknownSolver): raise ValueError("Failed to create solver of type=" + self.get_option("solver") + " for use in extensive form solve") if len(self.get_option("solver_options")) > 0: if self.get_option("verbose"): print("Initializing ef solver with options=" + str(list(self.get_option("solver_options")))) self._solver.set_options("".join( self.get_option("solver_options"))) if self.get_option("mipgap") is not None: if (self.get_option("mipgap") < 0.0) or \ (self.get_option("mipgap") > 1.0): raise ValueError("Value of the mipgap parameter for the EF " "solve must be on the unit interval; " "value specified=" + str(self.get_option("mipgap"))) self._solver.options.mipgap = float(self.get_option("mipgap")) solver_manager_type = self.get_option("solver_manager") if solver_manager_type == "phpyro": print("*** WARNING ***: PHPyro is not a supported solver " "manager type for the extensive-form solver. " "Falling back to serial.") solver_manager_type = 'serial' self._solver_manager = SolverManagerFactory( solver_manager_type, host=self.get_option("pyro_host"), port=self.get_option("pyro_port")) if self._solver_manager is None: raise ValueError("Failed to create solver manager of type=" + self.get_option("solver") + " for use in extensive form solve") def build_ef(self): self.destroy_ef() if self.get_option("verbose"): print("Creating extensive form instance") start_time = time.time() # then validate the associated parameters. generate_weighted_cvar = False cvar_weight = None risk_alpha = None if self.get_option("generate_weighted_cvar"): generate_weighted_cvar = True cvar_weight = self.get_option("cvar_weight") risk_alpha = self.get_option("risk_alpha") self.instance = create_ef_instance( self._manager.scenario_tree, verbose_output=self.get_option("verbose"), generate_weighted_cvar=generate_weighted_cvar, cvar_weight=cvar_weight, risk_alpha=risk_alpha, cc_indicator_var_name=self.get_option("cc_indicator_var"), cc_alpha=self.get_option("cc_alpha")) if self.get_option("verbose") or self.get_option("output_times"): print("Time to construct extensive form instance=%.2f seconds" % (time.time() - start_time)) def destroy_ef(self): if self.instance is not None: for scenario in self._manager.scenario_tree.scenarios: self.instance.del_component(scenario.name) scenario._instance_objective.activate() self.instance = None def write(self, filename): if self.instance is None: raise RuntimeError( "The extensive form instance has not been constructed." "Call the build_ef() method to construct it.") suf = os.path.splitext(filename)[1] if suf not in ['.nl', '.lp', '.mps']: if self._solver.problem_format() == ProblemFormat.cpxlp: filename += '.lp' elif self._solver.problem_format() == ProblemFormat.nl: filename += '.nl' elif self._solver.problem_format() == ProblemFormat.mps: filename += '.mps' else: raise ValueError("Could not determine output file format. " "No recognized ending suffix was provided " "and no format was indicated was by the " "--solver-io option.") start_time = time.time() if self.get_option("verbose"): print("Starting to write extensive form") smap_id = write_ef(self.instance, filename, self.get_option("symbolic_solver_labels")) print("Extensive form written to file=" + filename) if self.get_option("verbose") or self.get_option("output_times"): print("Time to write output file=%.2f seconds" % (time.time() - start_time)) return filename, smap_id def solve(self, check_status=True, exception_on_failure=True, io_options=None): if self.instance is None: raise RuntimeError( "The extensive form instance has not been constructed." "Call the build_ef() method to construct it.") start_time = time.time() if self.get_option("verbose"): print("Queuing extensive form solve") self.objective = undefined self.gap = undefined self.bound = undefined self.pyomo_solve_time = undefined self.solve_time = undefined self.termination_condition = undefined self.solver_status = undefined self.solution_status = undefined self.solver_results = undefined if isinstance(self._solver, PersistentSolver): self._solver.set_instance(self.instance, symbolic_solver_labels=self.get_option( "symbolic_solver_labels")) solve_kwds = {} solve_kwds['load_solutions'] = False if self.get_option("keep_solver_files"): solve_kwds['keepfiles'] = True if self.get_option("symbolic_solver_labels"): solve_kwds['symbolic_solver_labels'] = True if self.get_option("output_solver_log"): solve_kwds['tee'] = True if io_options is not None: solve_kwds.update(io_options) self.objective_sense = \ find_active_objective(self.instance).sense if (not self.get_option("disable_warmstart")) and \ (self._solver.warm_start_capable()): action_handle = self._solver_manager.queue(self.instance, opt=self._solver, warmstart=True, **solve_kwds) else: action_handle = self._solver_manager.queue(self.instance, opt=self._solver, **solve_kwds) if self.get_option("verbose"): print("Waiting for extensive form solve") results = self._solver_manager.wait_for(action_handle) if self.get_option("verbose"): print("Done with extensive form solve - loading results") if self.get_option("output_solver_results"): print("Results for ef:") results.write(num=1) self.solver_results = results if hasattr(results.solver,"user_time") and \ (not isinstance(results.solver.user_time, UndefinedData)) and \ (results.solver.user_time is not None): # the solve time might be a string, or might # not be - we eventually would like more # consistency on this front from the solver # plugins. self.solve_time = \ float(results.solver.user_time) elif hasattr(results.solver, "time"): self.solve_time = \ float(results.solver.time) else: self.solve_time = undefined if hasattr(results, "pyomo_solve_time"): self.pyomo_solve_time = \ results.pyomo_solve_time else: self.pyomo_solve_times = undefined self.termination_condition = \ results.solver.termination_condition self.solver_status = \ results.solver.status if len(results.solution) > 0: assert len(results.solution) == 1 results_sm = results._smap self.instance.solutions.load_from(results) solution0 = results.solution(0) if hasattr(solution0, "gap") and \ (solution0.gap is not None): self.gap = solution0.gap else: self.gap = undefined self.solution_status = solution0.status if self.get_option("verbose"): print("Storing solution in scenario tree") for scenario in self._manager.scenario_tree.scenarios: scenario.update_solution_from_instance() self._manager.scenario_tree.snapshotSolutionFromScenarios() self.objective = self._manager.scenario_tree.\ findRootNode().\ computeExpectedNodeCost() if self.gap is not undefined: if self.objective_sense == pyomo.core.base.minimize: self.bound = self.objective - self.gap else: self.bound = self.objective + self.gap else: self.objective = undefined self.gap = undefined self.bound = undefined self.solution_status = undefined failure = False if check_status: if not ((self.solution_status == SolutionStatus.optimal) or \ (self.solution_status == SolutionStatus.feasible)): failure = True if self.get_option("verbose") or \ exception_on_failure: msg = ("EF solve failed solution status check:\n" "Solver Status: %s\n" "Termination Condition: %s\n" "Solution Status: %s\n" % (self.solver_status, self.termination_condition, self.solution_status)) if self.get_option("verbose"): print(msg) if exception_on_failure: raise RuntimeError(msg) else: if self.get_option("verbose"): print("EF solve completed. Skipping status check.") if self.get_option("verbose") or self.get_option("output_times"): print("Time to solve and load results for the " "extensive form=%.2f seconds" % (time.time() - start_time)) return failure
class JSONSolutionLoaderExtension(PySPConfiguredExtension, PySPConfiguredObject, SingletonPlugin): implements(IPySPSolutionLoaderExtension) _declared_options = \ PySPConfigBlock("Options declared for the " "JSONSolutionLoaderExtension class") safe_declare_common_option(_declared_options, "input_name") safe_declare_common_option(_declared_options, "load_stages") _default_prefix = "jsonloader_" # # Note: Do not try to user super() or access the # class name inside the __init__ method when # a class derives from a SingletonPlugin. Due to # how Pyutilib implements its Singleton type, # the __class__ cell will be empty. # (See: https://stackoverflow.com/questions/ # 13126727/how-is-super-in-python-3-implemented) # def __init__(self): PySPConfiguredExtension.__init__(self) def load(self, manager): if self.get_option("input_name") is not None: stage_solutions = None # Do NOT open file in 'binary' mode when loading JSON # (produces an error in Python3) with open(self.get_option("input_name"), 'r') as f: stage_solutions = json.load(f) cntr = 0 if self.get_option('load_stages') > len( manager.scenario_tree.stages): raise ValueError( "The value of the %s option (%s) can not be greater than " "the number of time stages in the local scenario tree (%s)" % (self.get_full_option_name('load_stages'), self.get_option('load_stages'), len(manager.scenario_tree.stages))) if self.get_option('load_stages') > len(stage_solutions): raise ValueError( "The value of the %s option (%s) can not be greater than " "the number of time stages in the scenario tree solution " "stored in %s (%s)" % (self.get_full_option_name('load_stages'), self.get_option('load_stages'), self.get_option('input_name'), len(stage_solutions))) for stage, stage_solution in zip_longest( manager.scenario_tree.stages, stage_solutions): if stage_solution is None: break if (self.get_option('load_stages') <= 0) or \ (cntr+1 <= self.get_option('load_stages')): if stage is None: raise RuntimeError( "Local scenario tree has fewer stages (%s) than what is " "held by the solution loaded from file %s. Use the " "option %s to limit the number of stages that " "are loaded." % (cntr, self.get_option('input_name'), self.get_full_option_name('load_stages'))) cntr += 1 for tree_node in stage.nodes: try: node_solution = stage_solution[tree_node.name] except KeyError: raise KeyError( "Local scenario tree contains a tree node " "that was not found in the solution at time" "-stage %s: %s" % (cntr, tree_node.name)) load_node_solution(tree_node, node_solution) else: break print("Loaded scenario tree solution for %s time stages " "from file %s" % (cntr, self.get_option('input_name'))) return True print("No value was set for %s option 'input_name'. " "Nothing will be saved." % (type(self).__name__)) return False
def _declare_options(cls, options=None): if options is None: options = PySPConfigBlock() safe_declare_unique_option( options, "cvar_weight", PySPConfigValue( 1.0, domain=_domain_nonnegative, description=( "The weight associated with the CVaR term in " "the risk-weighted objective " "formulation. If the weight is 0, then " "*only* a non-weighted CVaR cost will appear " "in the EF objective - the expected cost " "component will be dropped. Default is 1.0." ), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "generate_weighted_cvar", PySPConfigValue( False, domain=bool, description=( "Add a weighted CVaR term to the " "primary objective. Default is False." ), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "risk_alpha", PySPConfigValue( 0.95, domain=_domain_unit_interval, description=( "The probability threshold associated with " "CVaR (or any future) risk-oriented " "performance metrics. Default is 0.95." ), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "cc_alpha", PySPConfigValue( 0.0, domain=_domain_unit_interval, description=( "The probability threshold associated with a " "chance constraint. The RHS will be one " "minus this value. Default is 0." ), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_unique_option( options, "cc_indicator_var", PySPConfigValue( None, domain=_domain_must_be_str, description=( "The name of the binary variable to be used " "to construct a chance constraint. Default " "is None, which indicates no chance " "constraint." ), doc=None, visibility=0), ap_group=_ef_group_label) safe_declare_common_option(options, "solver") safe_declare_common_option(options, "solver_io") safe_declare_common_option(options, "solver_manager") safe_declare_common_option(options, "solver_options") safe_declare_common_option(options, "disable_warmstart") safe_declare_common_option(options, "solver_manager_pyro_host") safe_declare_common_option(options, "solver_manager_pyro_port") safe_declare_common_option(options, "solver_manager_pyro_shutdown") safe_declare_common_option(options, "verbose", ap_group=_ef_group_label) safe_declare_common_option(options, "output_times", ap_group=_ef_group_label) safe_declare_common_option(options, "output_solver_results", ap_group=_ef_group_label) return options
class ScenarioTreeServerPyro(TaskWorker, PySPConfiguredObject): # Maps name to a registered worker class to instantiate _registered_workers = {} _declared_options = \ PySPConfigBlock("Options declared for the " "ScenarioTreeServerPyro class") # # scenario instance construction # safe_declare_common_option(_declared_options, "model_location") safe_declare_common_option(_declared_options, "scenario_tree_location") # # scenario tree generation # safe_declare_common_option(_declared_options, "scenario_tree_random_seed") safe_declare_common_option(_declared_options, "scenario_tree_downsample_fraction") # # various # safe_declare_common_option(_declared_options, "verbose") @classmethod def get_registered_worker_type(cls, name): if name in cls._registered_workers: return cls._registered_workers[name] raise KeyError("No worker type has been registered under the name " "'%s' for ScenarioTreeServerPyro" % (name)) def __init__(self, *args, **kwds): # add for purposes of diagnostic output. kwds["name"] = ("ScenarioTreeServerPyro_%d@%s" % (os.getpid(), socket.gethostname())) kwds["caller_name"] = kwds["name"] self._modules_imported = kwds.pop('modules_imported', {}) TaskWorker.__init__(self, **kwds) # This classes options get updated during the "setup" phase options = self.register_options() PySPConfiguredObject.__init__(self, options) self.type = self.WORKERNAME self.block = True self.timeout = None self._worker_map = {} # # These will be used by all subsequent workers created # by this server. Their creation can eat up a nontrivial # amount of initialization time when a large number of # workers are created on this server, so we only create # them once. # self._scenario_instance_factory = None self._full_scenario_tree = None def reset(self): if self._scenario_instance_factory is not None: self._scenario_instance_factory.close() self._scenario_instance_factory = None self._full_scenario_tree = None for worker_name in list(self._worker_map): self.remove_worker(worker_name) def remove_worker(self, name): self._worker_map[name].close() del self._worker_map[name] def process(self, data): self._worker_task_return_queue = self._current_task_client try: return self._process(data) except: logger.error("Scenario tree server %s caught an exception of type " "%s while processing a task. Going idle." % (self.WORKERNAME, sys.exc_info()[0].__name__)) traceback.print_exception(*sys.exc_info()) self._worker_error = True return TaskProcessingError(traceback.format_exc()) def _process(self, data): data = pyutilib.misc.Bunch(**data) result = None if not data.action.startswith('ScenarioTreeServerPyro_'): #with PauseGC() as pgc: result = getattr(self._worker_map[data.worker_name], data.action)\ (*data.args, **data.kwds) elif data.action == 'ScenarioTreeServerPyro_setup': options = self.register_options() for name, val in iteritems(data.options): options.get(name).set_value(val) self.set_options(options) self._options.verbose = self._options.verbose | self._verbose assert self._scenario_instance_factory is None assert self._full_scenario_tree is None if self._options.verbose: print("Server %s received setup request." % (self.WORKERNAME)) print("Options:") self.display_options() # Make sure these are not archives assert os.path.exists(self._options.model_location) assert (self._options.scenario_tree_location is None) or \ os.path.exists(self._options.scenario_tree_location) self._scenario_instance_factory = \ ScenarioTreeInstanceFactory( self._options.model_location, scenario_tree_location=self._options.scenario_tree_location, verbose=self._options.verbose) # # Try prevent unnecessarily re-imported the model module # if other callbacks are in the same location # self._modules_imported[ self._scenario_instance_factory._model_location] = \ self._scenario_instance_factory._model_module self._modules_imported[ self._scenario_instance_factory._model_filename] = \ self._scenario_instance_factory._model_module self._full_scenario_tree = \ self._scenario_instance_factory.generate_scenario_tree( downsample_fraction=self._options.scenario_tree_downsample_fraction, random_seed=self._options.scenario_tree_random_seed) if self._full_scenario_tree is None: raise RuntimeError("Unable to launch scenario tree worker - " "scenario tree construction failed.") result = True elif data.action == "ScenarioTreeServerPyro_initialize": worker_name = data.worker_name if self._options.verbose: print("Server %s received request to initialize " "scenario tree worker with name %s." % (self.WORKERNAME, worker_name)) assert self._scenario_instance_factory is not None assert self._full_scenario_tree is not None if worker_name in self._worker_map: raise RuntimeError( "Server %s Cannot initialize worker with name '%s' " "because a worker already exists with that name." % (self.WORKERNAME, worker_name)) worker_type = self._registered_workers[data.worker_type] options = worker_type.register_options() for name, val in iteritems(data.options): options.get(name).set_value(val) # # Depending on the Pyro serializer, the namedtuple # may be been converted to a tuple # if not isinstance(data.worker_init, WorkerInit): assert type(data.worker_init) is tuple data.worker_init = WorkerInit(type_=data.worker_init[0], names=data.worker_init[1], data=data.worker_init[2]) # replace enum string representation with the actual enum # object now that we've unserialized the Pyro data worker_init = WorkerInit(type_=getattr(WorkerInitType, data.worker_init.type_), names=data.worker_init.names, data=data.worker_init.data) self._worker_map[worker_name] = worker_type(options) self._worker_map[worker_name].initialize(self.WORKERNAME, self._full_scenario_tree, worker_name, worker_init, self._modules_imported) result = True elif data.action == "ScenarioTreeServerPyro_release": if self._options.verbose: print("Server %s releasing worker: %s" % (self.WORKERNAME, data.worker_name)) self.remove_worker(data.worker_name) result = True elif data.action == "ScenarioTreeServerPyro_reset": if self._options.verbose: print("Server %s received reset request" % (self.WORKERNAME)) self.reset() result = True elif data.action == "ScenarioTreeServerPyro_shutdown": if self._options.verbose: print("Server %s received shutdown request" % (self.WORKERNAME)) self.reset() self._worker_shutdown = True result = True else: raise ValueError("Server %s: Invalid command: %s" % (self.WORKERNAME, data.action)) return result