Ejemplo n.º 1
0
def solver_call_separation(model_data, config, solver, solve_data, is_global):
    """
    Solve the separation problem.
    """
    save_dir = config.subproblem_file_directory

    if is_global or config.bypass_local_separation:
        backup_solvers = deepcopy(config.backup_global_solvers)
    else:
        backup_solvers = deepcopy(config.backup_local_solvers)
    backup_solvers.insert(0, solver)
    solver_status_dict = {}
    while len(backup_solvers) > 0:
        solver = backup_solvers.pop(0)
        nlp_model = model_data.separation_model

        # === Fix to Master solution
        initialize_separation(model_data, config)

        if not solver.available():
            raise RuntimeError("Solver %s is not available." % solver)
        try:
            results = solver.solve(nlp_model, tee=config.tee)
        except ValueError as err:
            if 'Cannot load a SolverResults object with bad status: error' in str(
                    err):
                solve_data.termination_condition = tc.error
                return True
            else:
                raise
        solver_status_dict[str(solver)] = results.solver.termination_condition
        solve_data.termination_condition = results.solver.termination_condition
        solve_data.results = results
        # === Process result
        is_violation(model_data, config, solve_data)

        if solve_data.termination_condition in globally_acceptable or \
                (not is_global and solve_data.termination_condition in locally_acceptable):
            return False

        # Else: continue with backup solvers unless we have hit time limit or not found any acceptable solutions
        elapsed = get_main_elapsed_time(model_data.timing)
        if config.time_limit:
            if elapsed >= config.time_limit:
                return True

    # === Write this instance to file for user to debug because this separation instance did not return an optimal solution
    if save_dir and config.keepfiles:
        objective = str(
            list(nlp_model.component_data_objects(Objective,
                                                  active=True))[0].name)
        name = os.path.join(
            save_dir, config.uncertainty_set.type + "_" + nlp_model.name +
            "_separation_" + str(model_data.iteration) + "_obj_" + objective +
            ".bar")
        nlp_model.write(name, io_options={'symbolic_solver_labels': True})
        output_logger(config=config,
                      separation_error=True,
                      filename=name,
                      iteration=model_data.iteration,
                      objective=objective,
                      status_dict=solver_status_dict)
    return True
Ejemplo n.º 2
0
    def solve(self, model, first_stage_variables, second_stage_variables,
              uncertain_params, uncertainty_set, local_solver, global_solver,
              **kwds):
        """Solve the model.

        Parameters
        ----------
        model: ConcreteModel
            A ``ConcreteModel`` object representing the deterministic
            model, cast as a minimization problem.
        first_stage_variables: List[Var]
            The list of ``Var`` objects referenced in ``model``
            representing the design variables.
        second_stage_variables: List[Var]
            The list of ``Var`` objects referenced in ``model``
            representing the control variables.
        uncertain_params: List[Param]
            The list of ``Param`` objects referenced in ``model``
            representing the uncertain parameters.  MUST be ``mutable``.
            Assumes entries are provided in consistent order with the
            entries of 'nominal_uncertain_param_vals' input.
        uncertainty_set: UncertaintySet
            ``UncertaintySet`` object representing the uncertainty space
            that the final solutions will be robust against.
        local_solver: Solver
            ``Solver`` object to utilize as the primary local NLP solver.
        global_solver: Solver
            ``Solver`` object to utilize as the primary global NLP solver.

        """

        # === Add the explicit arguments to the config
        config = self.CONFIG(kwds.pop('options', {}))
        config.first_stage_variables = first_stage_variables
        config.second_stage_variables = second_stage_variables
        config.uncertain_params = uncertain_params
        config.uncertainty_set = uncertainty_set
        config.local_solver = local_solver
        config.global_solver = global_solver

        dev_options = kwds.pop('dev_options', {})
        config.set_value(kwds)
        config.set_value(dev_options)

        model = model

        # === Validate kwarg inputs
        validate_kwarg_inputs(model, config)

        # === Validate ability of grcs RO solver to handle this model
        if not model_is_valid(model):
            raise AttributeError(
                "This model structure is not currently handled by the ROSolver."
            )

        # === Define nominal point if not specified
        if len(config.nominal_uncertain_param_vals) == 0:
            config.nominal_uncertain_param_vals = list(
                p.value for p in config.uncertain_params)
        elif len(config.nominal_uncertain_param_vals) != len(
                config.uncertain_params):
            raise AttributeError(
                "The nominal_uncertain_param_vals list must be the same length"
                "as the uncertain_params list")

        # === Create data containers
        model_data = ROSolveResults()
        model_data.timing = Bunch()

        # === Set up logger for logging results
        with time_code(model_data.timing, 'total', is_main_timer=True):
            config.progress_logger.setLevel(logging.INFO)

            # === PREAMBLE
            output_logger(config=config,
                          preamble=True,
                          version=str(self.version()))

            # === DISCLAIMER
            output_logger(config=config, disclaimer=True)

            # === A block to hold list-type data to make cloning easy
            util = Block(concrete=True)
            util.first_stage_variables = config.first_stage_variables
            util.second_stage_variables = config.second_stage_variables
            util.uncertain_params = config.uncertain_params

            model_data.util_block = unique_component_name(model, 'util')
            model.add_component(model_data.util_block, util)
            # Note:  model.component(model_data.util_block) is util

            # === Validate uncertainty set happens here, requires util block for Cardinality and FactorModel sets
            validate_uncertainty_set(config=config)

            # === Deactivate objective on model
            for o in model.component_data_objects(Objective):
                o.deactivate()

            # === Leads to a logger warning here for inactive obj when cloning
            model_data.original_model = model
            # === For keeping track of variables after cloning
            cname = unique_component_name(model_data.original_model,
                                          'tmp_var_list')
            src_vars = list(
                model_data.original_model.component_data_objects(Var))
            setattr(model_data.original_model, cname, src_vars)
            model_data.working_model = model_data.original_model.clone()

            # === Add objective expressions
            identify_objective_functions(model_data.working_model, config)

            # === Put model in standard form
            transform_to_standard_form(model_data.working_model)

            # === Replace variable bounds depending on uncertain params with
            #     explicit inequality constraints
            replace_uncertain_bounds_with_constraints(
                model_data.working_model,
                model_data.working_model.util.uncertain_params)

            # === Add decision rule information
            add_decision_rule_variables(model_data, config)
            add_decision_rule_constraints(model_data, config)

            # === Move bounds on control variables to explicit ineq constraints
            wm_util = model_data.working_model

            # === Assuming all other Var objects in the model are state variables
            fsv = ComponentSet(
                model_data.working_model.util.first_stage_variables)
            ssv = ComponentSet(
                model_data.working_model.util.second_stage_variables)
            sv = ComponentSet()
            model_data.working_model.util.state_vars = []
            for v in model_data.working_model.component_data_objects(Var):
                if v not in fsv and v not in ssv and v not in sv:
                    model_data.working_model.util.state_vars.append(v)
                    sv.add(v)

            # Bounds on second stage variables and state variables are separation objectives,
            #  they are brought in this was as explicit constraints
            for c in model_data.working_model.util.second_stage_variables:
                turn_bounds_to_constraints(c, wm_util, config)

            for c in model_data.working_model.util.state_vars:
                turn_bounds_to_constraints(c, wm_util, config)

            # === Make control_variable_bounds array
            wm_util.ssv_bounds = []
            for c in model_data.working_model.component_data_objects(
                    Constraint, descend_into=True):
                if "bound_con" in c.name:
                    wm_util.ssv_bounds.append(c)

            # === Solve and load solution into model
            pyros_soln, final_iter_separation_solns = ROSolver_iterative_solve(
                model_data, config)

            return_soln = ROSolveResults()
            if pyros_soln is not None and final_iter_separation_solns is not None:
                if config.load_solution and \
                        (pyros_soln.pyros_termination_condition is pyrosTerminationCondition.robust_optimal or
                         pyros_soln.pyros_termination_condition is pyrosTerminationCondition.robust_feasible):
                    load_final_solution(model_data, pyros_soln.master_soln,
                                        config)

                # === Return time info
                model_data.total_cpu_time = get_main_elapsed_time(
                    model_data.timing)
                iterations = pyros_soln.total_iters + 1

                # === Return config to user
                return_soln.config = config
                # Report the negative of the objective value if it was originally maximize, since we use the minimize form in the algorithm
                if next(model.component_data_objects(
                        Objective)).sense == maximize:
                    negation = -1
                else:
                    negation = 1
                if config.objective_focus == ObjectiveType.nominal:
                    return_soln.final_objective_value = negation * value(
                        pyros_soln.master_soln.master_model.obj)
                elif config.objective_focus == ObjectiveType.worst_case:
                    return_soln.final_objective_value = negation * value(
                        pyros_soln.master_soln.master_model.zeta)
                return_soln.pyros_termination_condition = pyros_soln.pyros_termination_condition

                return_soln.time = model_data.total_cpu_time
                return_soln.iterations = iterations

                # === Remove util block
                model.del_component(model_data.util_block)

                del pyros_soln.util_block
                del pyros_soln.working_model
            else:
                return_soln.pyros_termination_condition = pyrosTerminationCondition.robust_infeasible
                return_soln.final_objective_value = None
                return_soln.time = get_main_elapsed_time(model_data.timing)
                return_soln.iterations = 0
        return return_soln
Ejemplo n.º 3
0
def ROSolver_iterative_solve(model_data, config):
    '''
    GRCS algorithm implementation
    :model_data: ROSolveData object with deterministic model information
    :config: ConfigBlock for the instance being solved
    '''

    # === The "violation" e.g. uncertain parameter values added to the master problem are nominal in iteration 0
    #     User can supply a nominal_uncertain_param_vals if they want to set nominal to a certain point,
    #     Otherwise, the default init value for the params is used as nominal_uncertain_param_vals
    violation = list(p for p in config.nominal_uncertain_param_vals)

    # === Do coefficient matching
    constraints = [
        c for c in model_data.working_model.component_data_objects(Constraint)
        if c.equality and c not in ComponentSet(
            model_data.working_model.util.decision_rule_eqns)
    ]
    model_data.working_model.util.h_x_q_constraints = ComponentSet()
    for c in constraints:
        coeff_matching_success, robust_infeasible = coefficient_matching(
            model=model_data.working_model,
            constraint=c,
            uncertain_params=model_data.working_model.util.uncertain_params,
            config=config)
        if not coeff_matching_success and not robust_infeasible:
            raise ValueError(
                "Equality constraint \"%s\" cannot be guaranteed to be robustly feasible, "
                "given the current partitioning between first-stage, second-stage and state variables. "
                "You might consider editing this constraint to reference some second-stage "
                "and/or state variable(s)." % c.name)
        elif not coeff_matching_success and robust_infeasible:
            config.progress_logger.info(
                "PyROS has determined that the model is robust infeasible. "
                "One reason for this is that equality constraint \"%s\" cannot be satisfied "
                "against all realizations of uncertainty, "
                "given the current partitioning between first-stage, second-stage and state variables. "
                "You might consider editing this constraint to reference some (additional) second-stage "
                "and/or state variable(s)." % c.name)
            return None, None
        else:
            pass

    # h(x,q) == 0 becomes h'(x) == 0
    for c in model_data.working_model.util.h_x_q_constraints:
        c.deactivate()

    # === Build the master problem and master problem data container object
    master_data = master_problem_methods.initial_construct_master(model_data)

    # === If using p_robustness, add ConstraintList for additional constraints
    if config.p_robustness:
        master_data.master_model.p_robust_constraints = ConstraintList()

    # === Add scenario_0
    master_data.master_model.scenarios[0, 0].transfer_attributes_from(
        master_data.original.clone())
    if len(master_data.master_model.scenarios[
            0, 0].util.uncertain_params) != len(violation):
        raise ValueError

    # === Set the nominal uncertain parameters to the violation values
    for i, v in enumerate(violation):
        master_data.master_model.scenarios[
            0, 0].util.uncertain_params[i].value = v

    # === Add objective function (assuming minimization of costs) with nominal second-stage costs
    if config.objective_focus is ObjectiveType.nominal:
        master_data.master_model.obj = Objective(
            expr=master_data.master_model.scenarios[0,
                                                    0].first_stage_objective +
            master_data.master_model.scenarios[0, 0].second_stage_objective)
    elif config.objective_focus is ObjectiveType.worst_case:
        # === Worst-case cost objective
        master_data.master_model.zeta = Var(initialize=value(
            master_data.master_model.scenarios[0, 0].first_stage_objective +
            master_data.master_model.scenarios[0, 0].second_stage_objective))
        master_data.master_model.obj = Objective(
            expr=master_data.master_model.zeta)
        master_data.master_model.scenarios[0, 0].epigraph_constr = Constraint(
            expr=master_data.master_model.scenarios[0,
                                                    0].first_stage_objective +
            master_data.master_model.scenarios[0, 0].second_stage_objective <=
            master_data.master_model.zeta)
        master_data.master_model.scenarios[
            0,
            0].util.first_stage_variables.append(master_data.master_model.zeta)

    # === Add deterministic constraints to ComponentSet on original so that these become part of separation model
    master_data.original.util.deterministic_constraints = \
        ComponentSet(c for c in master_data.original.component_data_objects(Constraint, descend_into=True))

    # === Make separation problem model once before entering the solve loop
    separation_model = separation_problem_methods.make_separation_problem(
        model_data=master_data, config=config)

    # === Create separation problem data container object and add information to catalog during solve
    separation_data = SeparationProblemData()
    separation_data.separation_model = separation_model
    separation_data.points_separated = [
    ]  # contains last point separated in the separation problem
    separation_data.points_added_to_master = [
        config.nominal_uncertain_param_vals
    ]  # explicitly robust against in master
    separation_data.constraint_violations = [
    ]  # list of constraint violations for each iteration
    separation_data.total_global_separation_solves = 0  # number of times global solve is used
    separation_data.timing = master_data.timing  # timing object

    # === Keep track of subsolver termination statuses from each iteration
    separation_data.separation_problem_subsolver_statuses = []

    # === Nominal information
    nominal_data = Block()
    nominal_data.nom_fsv_vals = []
    nominal_data.nom_ssv_vals = []
    nominal_data.nom_first_stage_cost = 0
    nominal_data.nom_second_stage_cost = 0
    nominal_data.nom_obj = 0

    # === Time information
    timing_data = Block()
    timing_data.total_master_solve_time = 0
    timing_data.total_separation_local_time = 0
    timing_data.total_separation_global_time = 0
    timing_data.total_dr_polish_time = 0

    dr_var_lists_original = []
    dr_var_lists_polished = []

    k = 0
    while config.max_iter == -1 or k < config.max_iter:
        master_data.iteration = k

        # === Add p-robust constraint if iteration > 0
        if k > 0 and config.p_robustness:
            master_problem_methods.add_p_robust_constraint(
                model_data=master_data, config=config)

        # === Solve Master Problem
        config.progress_logger.info("PyROS working on iteration %s..." % k)
        master_soln = master_problem_methods.solve_master(
            model_data=master_data, config=config)
        #config.progress_logger.info("Done solving Master Problem!")
        master_soln.master_problem_subsolver_statuses = []

        # === Keep track of total time and subsolver termination conditions
        timing_data.total_master_solve_time += get_time_from_solver(
            master_soln.results)
        timing_data.total_master_solve_time += get_time_from_solver(
            master_soln.feasibility_problem_results)

        master_soln.master_problem_subsolver_statuses.append(
            master_soln.results.solver.termination_condition)

        # === Check for robust infeasibility or error or time-out in master problem solve
        if master_soln.master_subsolver_results[
                1] is pyrosTerminationCondition.robust_infeasible:
            term_cond = pyrosTerminationCondition.robust_infeasible
            output_logger(config=config, robust_infeasible=True)
        elif master_soln.pyros_termination_condition is pyrosTerminationCondition.subsolver_error:
            term_cond = pyrosTerminationCondition.subsolver_error
        else:
            term_cond = None
        if term_cond == pyrosTerminationCondition.subsolver_error or \
                term_cond == pyrosTerminationCondition.robust_infeasible:
            update_grcs_solve_data(pyros_soln=model_data,
                                   k=k,
                                   term_cond=term_cond,
                                   nominal_data=nominal_data,
                                   timing_data=timing_data,
                                   separation_data=separation_data,
                                   master_soln=master_soln)
            return model_data, []
        # === Check if time limit reached
        elapsed = get_main_elapsed_time(model_data.timing)
        if config.time_limit:
            if elapsed >= config.time_limit:
                output_logger(config=config, time_out=True, elapsed=elapsed)
                update_grcs_solve_data(
                    pyros_soln=model_data,
                    k=k,
                    term_cond=pyrosTerminationCondition.time_out,
                    nominal_data=nominal_data,
                    timing_data=timing_data,
                    separation_data=separation_data,
                    master_soln=master_soln)
                return model_data, []

        # === Save nominal information
        if k == 0:
            for val in master_soln.fsv_vals:
                nominal_data.nom_fsv_vals.append(val)

            for val in master_soln.ssv_vals:
                nominal_data.nom_ssv_vals.append(val)

            nominal_data.nom_first_stage_cost = master_soln.first_stage_objective
            nominal_data.nom_second_stage_cost = master_soln.second_stage_objective
            nominal_data.nom_obj = value(master_data.master_model.obj)

        if (
                # === Decision rule polishing (do not polish on first iteration if no ssv or if decision_rule_order = 0)
            (config.decision_rule_order != 0
             and len(config.second_stage_variables) > 0 and k != 0)):
            # === Save initial values of DR vars to file
            for varslist in master_data.master_model.scenarios[
                    0, 0].util.decision_rule_vars:
                vals = []
                for dvar in varslist.values():
                    vals.append(dvar.value)
                dr_var_lists_original.append(vals)

            polishing_results = master_problem_methods.minimize_dr_vars(
                model_data=master_data, config=config)
            timing_data.total_dr_polish_time += get_time_from_solver(
                polishing_results)

            #=== Save after polish
            for varslist in master_data.master_model.scenarios[
                    0, 0].util.decision_rule_vars:
                vals = []
                for dvar in varslist.values():
                    vals.append(dvar.value)
                dr_var_lists_polished.append(vals)

        # === Set up for the separation problem
        separation_data.opt_fsv_vals = [
            v.value for v in master_soln.master_model.scenarios[
                0, 0].util.first_stage_variables
        ]
        separation_data.opt_ssv_vals = master_soln.ssv_vals

        # === Provide master model scenarios to separation problem for initialization options
        separation_data.master_scenarios = master_data.master_model.scenarios

        if config.objective_focus is ObjectiveType.worst_case:
            separation_model.util.zeta = value(master_soln.master_model.obj)

        # === Solve Separation Problem
        separation_data.iteration = k
        separation_data.master_nominal_scenario = master_data.master_model.scenarios[
            0, 0]

        separation_data.master_model = master_data.master_model

        separation_solns, violating_realizations, constr_violations, is_global, \
            local_sep_time, global_sep_time = \
                separation_problem_methods.solve_separation_problem(model_data=separation_data, config=config)

        for sep_soln_list in separation_solns:
            for s in sep_soln_list:
                separation_data.separation_problem_subsolver_statuses.append(
                    s.termination_condition)

        if is_global:
            separation_data.total_global_separation_solves += 1

        timing_data.total_separation_local_time += local_sep_time
        timing_data.total_separation_global_time += global_sep_time

        separation_data.constraint_violations.append(constr_violations)

        if not any(s.found_violation for solve_data_list in separation_solns
                   for s in solve_data_list):
            separation_data.points_separated = []
        else:
            separation_data.points_separated = violating_realizations

        # === Check if time limit reached
        elapsed = get_main_elapsed_time(model_data.timing)
        if config.time_limit:
            if elapsed >= config.time_limit:
                output_logger(config=config, time_out=True, elapsed=elapsed)
                termination_condition = pyrosTerminationCondition.time_out
                update_grcs_solve_data(pyros_soln=model_data,
                                       k=k,
                                       term_cond=termination_condition,
                                       nominal_data=nominal_data,
                                       timing_data=timing_data,
                                       separation_data=separation_data,
                                       master_soln=master_soln)
                return model_data, separation_solns

        # === Check if we exit due to solver returning unsatisfactory statuses (not in permitted_termination_conditions)
        local_solve_term_conditions = {
            TerminationCondition.optimal, TerminationCondition.locallyOptimal,
            TerminationCondition.globallyOptimal
        }
        global_solve_term_conditions = {
            TerminationCondition.optimal, TerminationCondition.globallyOptimal
        }
        if (is_global and any((s.termination_condition not in global_solve_term_conditions)
                                  for sep_soln_list in separation_solns for s in sep_soln_list)) or \
            (not is_global and any((s.termination_condition not in local_solve_term_conditions)
                                  for sep_soln_list in separation_solns for s in sep_soln_list)):
            termination_condition = pyrosTerminationCondition.subsolver_error
            update_grcs_solve_data(pyros_soln=model_data,
                                   k=k,
                                   term_cond=termination_condition,
                                   nominal_data=nominal_data,
                                   timing_data=timing_data,
                                   separation_data=separation_data,
                                   master_soln=master_soln)
            return model_data, separation_solns

        # === Check if we terminate due to robust optimality or feasibility
        if not any(s.found_violation for sep_soln_list in separation_solns
                   for s in sep_soln_list) and is_global:
            if config.solve_master_globally and config.objective_focus is ObjectiveType.worst_case:
                output_logger(config=config, robust_optimal=True)
                termination_condition = pyrosTerminationCondition.robust_optimal
            else:
                output_logger(config=config, robust_feasible=True)
                termination_condition = pyrosTerminationCondition.robust_feasible
            update_grcs_solve_data(pyros_soln=model_data,
                                   k=k,
                                   term_cond=termination_condition,
                                   nominal_data=nominal_data,
                                   timing_data=timing_data,
                                   separation_data=separation_data,
                                   master_soln=master_soln)
            return model_data, separation_solns

        # === Add block to master at violation
        master_problem_methods.add_scenario_to_master(master_data,
                                                      violating_realizations)
        separation_data.points_added_to_master.append(violating_realizations)

        k += 1

    output_logger(config=config, max_iter=True)
    update_grcs_solve_data(pyros_soln=model_data,
                           k=k,
                           term_cond=pyrosTerminationCondition.max_iter,
                           nominal_data=nominal_data,
                           timing_data=timing_data,
                           separation_data=separation_data,
                           master_soln=master_soln)

    # === In this case we still return the final solution objects for the last iteration
    return model_data, separation_solns
Ejemplo n.º 4
0
def solver_call_master(model_data, config, solver, solve_data):
    '''
    Function interfacing with optimization solver
    :param model_data:
    :param config:
    :param solver:
    :param solve_data:
    :param is_global:
    :return:
    '''
    nlp_model = model_data.master_model
    master_soln = solve_data
    solver_term_cond_dict = {}

    if config.solve_master_globally:
        backup_solvers = deepcopy(config.backup_global_solvers)
    else:
        backup_solvers = deepcopy(config.backup_local_solvers)
    backup_solvers.insert(0, solver)

    if not solver.available():
        raise RuntimeError("NLP solver %s is not available." % config.solver)

    higher_order_decision_rule_efficiency(config, model_data)

    while len(backup_solvers) > 0:
        solver = backup_solvers.pop(0)
        try:
            results = solver.solve(nlp_model, tee=config.tee)
        except ValueError as err:
            if 'Cannot load a SolverResults object with bad status: error' in str(
                    err):
                results.solver.termination_condition = tc.error
                results.solver.message = str(err)
                master_soln.results = results
                master_soln.pyros_termination_condition = pyrosTerminationCondition.subsolver_error
                return master_soln, ()
            else:
                raise
        solver_term_cond_dict[str(solver)] = str(
            results.solver.termination_condition)
        master_soln.termination_condition = results.solver.termination_condition
        master_soln.pyros_termination_condition = None  # determined later in the algorithm
        master_soln.fsv_vals = list(
            v.value
            for v in nlp_model.scenarios[0, 0].util.first_stage_variables)

        if config.objective_focus is ObjectiveType.nominal:
            master_soln.ssv_vals = list(
                v.value
                for v in nlp_model.scenarios[0, 0].util.second_stage_variables)
            master_soln.second_stage_objective = value(
                nlp_model.scenarios[0, 0].second_stage_objective)
        else:
            idx = max(nlp_model.scenarios.keys())[0]
            master_soln.ssv_vals = list(
                v.value
                for v in nlp_model.scenarios[idx,
                                             0].util.second_stage_variables)
            master_soln.second_stage_objective = value(
                nlp_model.scenarios[idx, 0].second_stage_objective)
        master_soln.first_stage_objective = value(
            nlp_model.scenarios[0, 0].first_stage_objective)

        master_soln.nominal_block = nlp_model.scenarios[0, 0]
        master_soln.results = results
        master_soln.master_model = nlp_model

        master_soln.master_subsolver_results = process_termination_condition_master_problem(
            config=config, results=results)

        if master_soln.master_subsolver_results[0] == False:
            return master_soln

    # === At this point, all sub-solvers have been tried and none returned an acceptable status or return code
    save_dir = config.subproblem_file_directory
    if save_dir and config.keepfiles:
        name = os.path.join(
            save_dir,
            config.uncertainty_set.type + "_" + model_data.original.name +
            "_master_" + str(model_data.iteration) + ".bar")
        nlp_model.write(name, io_options={'symbolic_solver_labels': True})
        output_logger(config=config,
                      master_error=True,
                      status_dict=solver_term_cond_dict,
                      filename=name,
                      iteration=model_data.iteration)
    master_soln.pyros_termination_condition = pyrosTerminationCondition.subsolver_error
    return master_soln