Esempio n. 1
0
def _build_equality_set(model):
    """Construct an equality set map.

    Maps all variables to the set of variables that are linked to them by
    equality. Mapping takes place using id(). That is, if you have x = y, then
    you would have id(x) -> ComponentSet([x, y]) and id(y) -> ComponentSet([x,
    y]) in the mapping.

    """
    # Map of variables to their equality set (ComponentSet)
    eq_var_map = ComponentMap()

    # Loop through all the active constraints in the model
    for constraint in model.component_data_objects(
            ctype=Constraint, active=True, descend_into=True):
        eq_linked_vars = _get_equality_linked_variables(constraint)
        if not eq_linked_vars:
            continue  # if we get an empty tuple, skip to next constraint.
        v1, v2 = eq_linked_vars
        set1 = eq_var_map.get(v1, ComponentSet((v1, v2)))
        set2 = eq_var_map.get(v2, (v2,))

        # if set1 and set2 are equivalent, skip to next constraint.
        if set1 is set2:
            continue

        # add all elements of set2 to set 1
        set1.update(set2)
        # Update all elements to point to set 1
        for v in set1:
            eq_var_map[v] = set1

    return eq_var_map
Esempio n. 2
0
    def _getXorConstraint(self, disjunction):
        # Put the disjunction constraint on its parent block and
        # determine whether it is an OR or XOR constraint.

        # We never do this for just a DisjunctionData because we need
        # to know about the index set of its parent component. So if
        # we called this on a DisjunctionData, we did something wrong.
        assert isinstance(disjunction, Disjunction)
        parent = disjunction.parent_block()
        if hasattr(parent, "_gdp_transformation_info"):
            infodict = parent._gdp_transformation_info
            if type(infodict) is not dict:
                raise GDP_Error(
                    "Component %s contains an attribute named "
                    "_gdp_transformation_info. The transformation requires "
                    "that it can create this attribute!" % parent.name)
            try:
                # On the off-chance that another GDP transformation went
                # first, the infodict may exist, but the specific map we
                # want will not be present
                orConstraintMap = infodict['disjunction_or_constraint']
            except KeyError:
                orConstraintMap = infodict['disjunction_or_constraint'] \
                                  = ComponentMap()
        else:
            infodict = parent._gdp_transformation_info = {}
            orConstraintMap = infodict['disjunction_or_constraint'] \
                              = ComponentMap()

        # If the Constraint already exists, return it
        if disjunction in orConstraintMap:
            return orConstraintMap[disjunction]

        # add the XOR (or OR) constraints to parent block (with unique name)
        # It's indexed if this is an IndexedDisjunction, not otherwise
        orC = Constraint(disjunction.index_set()) if \
            disjunction.is_indexed() else Constraint()
        # The name used to indicate if thee were OR or XOR disjunctions,
        # however now that Disjunctions ae allowed to mix the state we
        # can no longer make that distinction in the name.
        #    nm = '_xor' if xor else '_or'
        nm = '_xor'
        orCname = unique_component_name(
            parent, '_gdp_bigm_relaxation_' + disjunction.name + nm)
        parent.add_component(orCname, orC)
        orConstraintMap[disjunction] = orC
        return orC
Esempio n. 3
0
    def _transform_disjunct(self, obj, transBlock, varSet, localVars):
        if hasattr(obj, "_gdp_transformation_info"):
            infodict = obj._gdp_transformation_info
            # If the user has something with our name that is not a dict, we
            # scream. If they have a dict with this name then we are just going
            # to use it...
            if type(infodict) is not dict:
                raise GDP_Error(
                    "Disjunct %s contains an attribute named "
                    "_gdp_transformation_info. The transformation requires "
                    "that it can create this attribute!" % obj.name)
        else:
            infodict = obj._gdp_transformation_info = {}
        # deactivated means either we've already transformed or user deactivated
        if not obj.active:
            if obj.indicator_var.is_fixed():
                if value(obj.indicator_var) == 0:
                    # The user cleanly deactivated the disjunct: there
                    # is nothing for us to do here.
                    return
                else:
                    raise GDP_Error(
                        "The disjunct %s is deactivated, but the "
                        "indicator_var is fixed to %s. This makes no sense." %
                        (obj.name, value(obj.indicator_var)))
            if not infodict.get('relaxed', False):
                raise GDP_Error(
                    "The disjunct %s is deactivated, but the "
                    "indicator_var is not fixed and the disjunct does not "
                    "appear to have been relaxed. This makes no sense." %
                    (obj.name, ))

        if 'chull' in infodict:
            # we've transformed it (with CHull), so don't do it again.
            return

        # add reference to original disjunct to info dict on
        # transformation block
        relaxedDisjuncts = transBlock.relaxedDisjuncts
        relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)]
        relaxationBlockInfo = relaxationBlock._gdp_transformation_info = {
            'src': obj,
            'srcVars': ComponentMap(),
            'srcConstraints': ComponentMap(),
            'boundConstraintToSrcVar': ComponentMap(),
        }
        infodict['chull'] = chull = {
            'relaxationBlock': relaxationBlock,
            'relaxedConstraints': ComponentMap(),
            'disaggregatedVars': ComponentMap(),
            'bigmConstraints': ComponentMap(),
        }

        # if this is a disjunctData from an indexed disjunct, we are
        # going to want to check at the end that the container is
        # deactivated if everything in it is. So we save it in our
        # dictionary of things to check if it isn't there already.
        disjParent = obj.parent_component()
        if disjParent.is_indexed() and \
           disjParent not in transBlock.disjContainers:
            transBlock.disjContainers.add(disjParent)

        # add the disaggregated variables and their bigm constraints
        # to the relaxationBlock
        for var in varSet:
            lb = var.lb
            ub = var.ub
            if lb is None or ub is None:
                raise GDP_Error("Variables that appear in disjuncts must be "
                                "bounded in order to use the chull "
                                "transformation! Missing bound for %s." %
                                (var.name))

            disaggregatedVar = Var(within=Reals,
                                   bounds=(min(0, lb), max(0, ub)),
                                   initialize=var.value)
            # naming conflicts are possible here since this is a bunch
            # of variables from different blocks coming together, so we
            # get a unique name
            disaggregatedVarName = unique_component_name(
                relaxationBlock, var.local_name)
            relaxationBlock.add_component(disaggregatedVarName,
                                          disaggregatedVar)
            chull['disaggregatedVars'][var] = disaggregatedVar
            relaxationBlockInfo['srcVars'][disaggregatedVar] = var

            bigmConstraint = Constraint(transBlock.lbub)
            relaxationBlock.add_component(disaggregatedVarName + "_bounds",
                                          bigmConstraint)
            if lb:
                bigmConstraint.add('lb',
                                   obj.indicator_var * lb <= disaggregatedVar)
            if ub:
                bigmConstraint.add('ub',
                                   disaggregatedVar <= obj.indicator_var * ub)
            chull['bigmConstraints'][var] = bigmConstraint
            relaxationBlockInfo['boundConstraintToSrcVar'][
                bigmConstraint] = var

        for var in localVars:
            lb = var.lb
            ub = var.ub
            if lb is None or ub is None:
                raise GDP_Error("Variables that appear in disjuncts must be "
                                "bounded in order to use the chull "
                                "transformation! Missing bound for %s." %
                                (var.name))
            if value(lb) > 0:
                var.setlb(0)
            if value(ub) < 0:
                var.setub(0)

            # naming conflicts are possible here since this is a bunch
            # of variables from different blocks coming together, so we
            # get a unique name
            conName = unique_component_name(relaxationBlock,
                                            var.local_name + "_bounds")
            bigmConstraint = Constraint(transBlock.lbub)
            relaxationBlock.add_component(conName, bigmConstraint)
            bigmConstraint.add('lb', obj.indicator_var * lb <= var)
            bigmConstraint.add('ub', var <= obj.indicator_var * ub)
            chull['bigmConstraints'][var] = bigmConstraint
            relaxationBlockInfo['boundConstraintToSrcVar'][
                bigmConstraint] = var

        var_substitute_map = dict(
            (id(v), newV) for v, newV in iteritems(chull['disaggregatedVars']))
        zero_substitute_map = dict(
            (id(v), NumericConstant(0))
            for v, newV in iteritems(chull['disaggregatedVars']))
        zero_substitute_map.update(
            (id(v), NumericConstant(0)) for v in localVars)

        # Transform each component within this disjunct
        self._transform_block_components(obj, obj, infodict,
                                         var_substitute_map,
                                         zero_substitute_map)

        # deactivate disjunct so we know we've relaxed it
        obj._deactivate_without_fixing_indicator()
        infodict['relaxed'] = True
Esempio n. 4
0
    def _transformDisjunctionData(self, obj, transBlock, index):
        # Convex hull doesn't work if this is an or constraint. So if
        # xor is false, give up
        if not obj.xor:
            raise GDP_Error(
                "Cannot do convex hull transformation for "
                "disjunction %s with or constraint. Must be an xor!" %
                obj.name)

        parent_component = obj.parent_component()
        transBlock.disjContainers.add(parent_component)
        orConstraint, disaggregationConstraint \
            = self._getDisjunctionConstraints(parent_component)

        # We first go through and collect all the variables that we
        # are going to disaggregate.
        varOrder_set = ComponentSet()
        varOrder = []
        varsByDisjunct = ComponentMap()
        for disjunct in obj.disjuncts:
            # This is crazy, but if the disjunct has been previously
            # relaxed, the disjunct *could* be deactivated.
            not_active = not disjunct.active
            if not_active:
                disjunct._activate_without_unfixing_indicator()
            try:
                disjunctVars = varsByDisjunct[disjunct] = ComponentSet()
                for cons in disjunct.component_data_objects(
                        Constraint,
                        active=True,
                        sort=SortComponents.deterministic,
                        descend_into=Block):
                    # we aren't going to disaggregate fixed
                    # variables. This means there is trouble if they are
                    # unfixed later...
                    for var in EXPR.identify_variables(cons.body,
                                                       include_fixed=False):
                        # Note the use of a list so that we will
                        # eventually disaggregate the vars in a
                        # deterministic order (the order that we found
                        # them)
                        disjunctVars.add(var)
                        if var not in varOrder_set:
                            varOrder.append(var)
                            varOrder_set.add(var)
            finally:
                if not_active:
                    disjunct._deactivate_without_fixing_indicator()

        # We will only disaggregate variables that
        #  1) appear in multiple disjuncts, or
        #  2) are not contained in this disjunct, or
        #  3) are not themselves disaggregated variables
        varSet = []
        localVars = ComponentMap((d, []) for d in obj.disjuncts)
        for var in varOrder:
            disjuncts = [d for d in varsByDisjunct if var in varsByDisjunct[d]]
            if len(disjuncts) > 1:
                varSet.append(var)
            elif self._contained_in(var, disjuncts[0]):
                localVars[disjuncts[0]].append(var)
            elif self._contained_in(var, transBlock):
                # There is nothing to do here: these are already
                # disaggregated vars that can/will be forced to 0 when
                # their disjunct is not active.
                pass
            else:
                varSet.append(var)

        # Now that we know who we need to disaggregate, we will do it
        # while we also transform the disjuncts.
        or_expr = 0
        for disjunct in obj.disjuncts:
            or_expr += disjunct.indicator_var
            self._transform_disjunct(disjunct, transBlock, varSet,
                                     localVars[disjunct])
        orConstraint.add(index, (or_expr, 1))

        for i, var in enumerate(varSet):
            disaggregatedExpr = 0
            for disjunct in obj.disjuncts:
                if 'chull' not in disjunct._gdp_transformation_info:
                    if not disjunct.indicator_var.is_fixed() \
                            or value(disjunct.indicator_var) != 0:
                        raise RuntimeError(
                            "GDP chull: disjunct was not relaxed, but "
                            "does not appear to be correctly deactivated.")
                    continue
                disaggregatedVar = disjunct._gdp_transformation_info['chull'][
                    'disaggregatedVars'][var]
                disaggregatedExpr += disaggregatedVar
            if type(index) is tuple:
                consIdx = index + (i, )
            elif parent_component.is_indexed():
                consIdx = (index, ) + (i, )
            else:
                consIdx = i

            disaggregationConstraint.add(consIdx, var == disaggregatedExpr)
Esempio n. 5
0
    def _getDisjunctionConstraints(self, disjunction):
        # Put the disjunction constraint on its parent block

        # We never do this for just a DisjunctionData because we need
        # to know about the index set of its parent component. So if
        # we called this on a DisjunctionData, we did something wrong.
        assert isinstance(disjunction, Disjunction)
        parent = disjunction.parent_block()
        if hasattr(parent, "_gdp_transformation_info"):
            infodict = parent._gdp_transformation_info
            if type(infodict) is not dict:
                raise GDP_Error(
                    "Component %s contains an attribute named "
                    "_gdp_transformation_info. The transformation requires "
                    "that it can create this attribute!" % parent.name)
            try:
                # On the off-chance that another GDP transformation went
                # first, the infodict may exist, but the specific map we
                # want will not be present
                orConstraintMap = infodict['disjunction_or_constraint']
            except KeyError:
                orConstraintMap = infodict['disjunction_or_constraint'] \
                                  = ComponentMap()
            try:
                disaggregationConstraintMap = infodict[
                    'disjunction_disaggregation_constraints']
            except KeyError:
                disaggregationConstraintMap = infodict[
                    'disjunction_disaggregation_constraints'] \
                    = ComponentMap()
        else:
            infodict = parent._gdp_transformation_info = {}
            orConstraintMap = infodict['disjunction_or_constraint'] \
                              = ComponentMap()
            disaggregationConstraintMap = infodict[
                'disjunction_disaggregation_constraints'] \
                = ComponentMap()

        if disjunction in disaggregationConstraintMap:
            disaggregationConstraint = disaggregationConstraintMap[disjunction]
        else:
            # add the disaggregation constraint
            disaggregationConstraint \
                = disaggregationConstraintMap[disjunction] = Constraint(Any)
            parent.add_component(
                unique_component_name(parent, '_gdp_chull_relaxation_' + \
                                      disjunction.name + '_disaggregation'),
                disaggregationConstraint)

        # If the Constraint already exists, return it
        if disjunction in orConstraintMap:
            orC = orConstraintMap[disjunction]
        else:
            # add the XOR (or OR) constraints to parent block (with
            # unique name) It's indexed if this is an
            # IndexedDisjunction, not otherwise
            orC = Constraint(disjunction.index_set()) if \
                  disjunction.is_indexed() else Constraint()
            parent.add_component(
                unique_component_name(
                    parent,
                    '_gdp_chull_relaxation_' + disjunction.name + '_xor'), orC)
            orConstraintMap[disjunction] = orC

        return orC, disaggregationConstraint
Esempio n. 6
0
    def _apply_to(self, model, detect_fixed_vars=True):
        """Apply the transformation to the given model."""
        # Generate the equality sets
        eq_var_map = _build_equality_set(model)

        # Detect and process fixed variables.
        if detect_fixed_vars:
            _fix_equality_fixed_variables(model)

        # Generate aggregation infrastructure
        model._var_aggregator_info = Block(
            doc="Holds information for the variable aggregation "
            "transformation system.")
        z = model._var_aggregator_info.z = VarList(doc="Aggregated variables.")
        # Map of the aggregate var to the equalty set (ComponentSet)
        z_to_vars = model._var_aggregator_info.z_to_vars = ComponentMap()
        # Map of variables to their corresponding aggregate var
        var_to_z = model._var_aggregator_info.var_to_z = ComponentMap()
        processed_vars = ComponentSet()

        # TODO This iteritems is sorted by the variable name of the key in
        # order to preserve determinism. Unfortunately, var.name() is an
        # expensive operation right now.
        for var, eq_set in sorted(eq_var_map.items(),
                                  key=lambda tup: tup[0].name):
            if var in processed_vars:
                continue  # Skip already-process variables

            # This would be weird. The variable hasn't been processed, but is
            # in the map. Raise an exception.
            assert var_to_z.get(var, None) is None

            z_agg = z.add()
            z_to_vars[z_agg] = eq_set
            var_to_z.update(ComponentMap((v, z_agg) for v in eq_set))

            # Set the bounds of the aggregate variable based on the bounds of
            # the variables in its equality set.
            z_agg.setlb(max_if_not_None(v.lb for v in eq_set if v.has_lb()))
            z_agg.setub(min_if_not_None(v.ub for v in eq_set if v.has_ub()))

            # Set the fixed status of the aggregate var
            fixed_vars = [v for v in eq_set if v.fixed]
            if fixed_vars:
                # Check to make sure all the fixed values are the same.
                if any(var.value != fixed_vars[0].value
                       for var in fixed_vars[1:]):
                    raise ValueError(
                        "Aggregate variable for equality set is fixed to "
                        "multiple different values: %s" % (fixed_vars,))
                z_agg.fix(fixed_vars[0].value)

                # Check that the fixed value lies within bounds.
                if z_agg.has_lb() and z_agg.value < value(z_agg.lb):
                    raise ValueError(
                        "Aggregate variable for equality set is fixed to "
                        "a value less than its lower bound: %s < LB %s" %
                        (z_agg.value, value(z_agg.lb))
                    )
                if z_agg.has_ub() and z_agg.value > value(z_agg.ub):
                    raise ValueError(
                        "Aggregate variable for equality set is fixed to "
                        "a value greater than its upper bound: %s > UB %s" %
                        (z_agg.value, value(z_agg.ub))
                    )
            else:
                # Set the value to be the average of the values within the
                # bounds only if the value is not already fixed.
                values_within_bounds = [
                    v.value for v in eq_set if (
                        v.value is not None and
                        ((z_agg.has_lb() and v.value >= value(z_agg.lb))
                         or not z_agg.has_lb()) and
                        ((z_agg.has_ub() and v.value <= value(z_agg.ub))
                         or not z_agg.has_ub())
                    )]
                num_vals = len(values_within_bounds)
                z_agg.value = (
                    sum(val for val in values_within_bounds) / num_vals) \
                    if num_vals > 0 else None

            processed_vars.update(eq_set)

        # Do the substitution
        substitution_map = {id(var): z_var
                            for var, z_var in var_to_z.items()}
        for constr in model.component_data_objects(
            ctype=Constraint, active=True
        ):
            new_body = ExpressionReplacementVisitor(
                substitute=substitution_map
            ).dfs_postorder_stack(constr.body)
            constr.set_value((constr.lower, new_body, constr.upper))

        for objective in model.component_data_objects(
            ctype=Objective, active=True
        ):
            new_expr = ExpressionReplacementVisitor(
                substitute=substitution_map
            ).dfs_postorder_stack(objective.expr)
            objective.set_value(new_expr)
Esempio n. 7
0
    def _bigM_relax_disjunct(self, obj, transBlock, bigM, suffix_list):
        if hasattr(obj, "_gdp_transformation_info"):
            infodict = obj._gdp_transformation_info
            # If the user has something with our name that is not a dict, we
            # scream. If they have a dict with this name then we are just going
            # to use it...
            if type(infodict) is not dict:
                raise GDP_Error(
                    "Disjunct %s contains an attribute named "
                    "_gdp_transformation_info. The transformation requires "
                    "that it can create this attribute!" % obj.name)
        else:
            infodict = obj._gdp_transformation_info = {}

        # deactivated -> either we've already transformed or user deactivated
        if not obj.active:
            if obj.indicator_var.is_fixed():
                if value(obj.indicator_var) == 0:
                    # The user cleanly deactivated the disjunct: there
                    # is nothing for us to do here.
                    return
                else:
                    raise GDP_Error(
                        "The disjunct %s is deactivated, but the "
                        "indicator_var is fixed to %s. This makes no sense." %
                        (obj.name, value(obj.indicator_var)))
            if not infodict.get('relaxed', False):
                raise GDP_Error(
                    "The disjunct %s is deactivated, but the "
                    "indicator_var is not fixed and the disjunct does not "
                    "appear to have been relaxed. This makes no sense." %
                    (obj.name, ))

        if 'bigm' in infodict:
            # we've transformed it (with BigM), so don't do it again.
            return

        # add reference to original disjunct to info dict on transformation
        # block
        relaxedDisjuncts = transBlock.relaxedDisjuncts
        relaxationBlock = relaxedDisjuncts[len(relaxedDisjuncts)]
        relaxationBlock._gdp_transformation_info = {
            'src': obj,
            'srcConstraints': ComponentMap(),
        }

        # add reference to transformation block on original disjunct
        assert 'bigm' not in infodict
        infodict['bigm'] = {
            'relaxationBlock': relaxationBlock,
            'relaxedConstraints': ComponentMap()
        }

        # if this is a disjunctData from an indexed disjunct, we are
        # going to want to check at the end that the container is
        # deactivated if everything in it is. So we save it in our
        # dictionary of things to check if it isn't there already.
        disjParent = obj.parent_component()
        if disjParent.is_indexed() and \
           disjParent not in transBlock.disjContainers:
            transBlock.disjContainers.add(disjParent)

        # This is crazy, but if the disjunction has been previously
        # relaxed, the disjunct *could* be deactivated.  This is a big
        # deal for CHull, as it uses the component_objects /
        # component_data_objects generators.  For BigM, that is OK,
        # because we never use those generators with active=True.  I am
        # only noting it here for the future when someone (me?) is
        # comparing the two relaxations.
        #
        # Transform each component within this disjunct
        self._transform_block_components(obj, obj, infodict, bigM, suffix_list)

        # deactivate disjunct so we know we've relaxed it
        obj._deactivate_without_fixing_indicator()
        infodict['relaxed'] = True
Esempio n. 8
0
def add_outer_approximation_cuts(var_values, duals, solve_data, config):
    """Add outer approximation cuts to the linear GDP model."""
    m = solve_data.linear_GDP
    GDPopt = m.GDPopt_utils
    sign_adjust = -1 if GDPopt.objective.sense == minimize else 1

    # copy values over
    for var, val in zip(GDPopt.working_var_list, var_values):
        if val is not None and not var.fixed:
            var.value = val

    # TODO some kind of special handling if the dual is phenomenally small?
    config.logger.debug('Adding OA cuts.')

    nonlinear_constraints = ComponentSet(GDPopt.working_nonlinear_constraints)
    counter = 0
    for constr, dual_value in zip(GDPopt.working_constraints_list, duals):
        if dual_value is None or constr not in nonlinear_constraints:
            continue

        # Determine if the user pre-specified that OA cuts should not be
        # generated for the given constraint.
        parent_block = constr.parent_block()
        ignore_set = getattr(parent_block, 'GDPopt_ignore_OA', None)
        config.logger.debug('Ignore_set %s' % ignore_set)
        if (ignore_set and
            (constr in ignore_set or constr.parent_component() in ignore_set)):
            config.logger.debug(
                'OA cut addition for %s skipped because it is in '
                'the ignore set.' % constr.name)
            continue

        config.logger.debug("Adding OA cut for %s with dual value %s" %
                            (constr.name, dual_value))

        # TODO make this more efficient by not having to use differentiate()
        # at each iteration.
        constr_vars = list(EXPR.identify_variables(constr.body))
        jac_list = differentiate(constr.body, wrt_list=constr_vars)
        jacobians = ComponentMap(zip(constr_vars, jac_list))

        # Create a block on which to put outer approximation cuts.
        oa_utils = parent_block.component('GDPopt_OA')
        if oa_utils is None:
            oa_utils = parent_block.GDPopt_OA = Block(
                doc="Block holding outer approximation cuts "
                "and associated data.")
            oa_utils.GDPopt_OA_cuts = ConstraintList()
            oa_utils.GDPopt_OA_slacks = VarList(bounds=(0, config.max_slack),
                                                domain=NonNegativeReals,
                                                initialize=0)

        oa_cuts = oa_utils.GDPopt_OA_cuts
        slack_var = oa_utils.GDPopt_OA_slacks.add()
        oa_cuts.add(expr=copysign(1, sign_adjust * dual_value) *
                    (value(constr.body) + sum(
                        value(jacobians[var]) * (var - value(var))
                        for var in constr_vars)) + slack_var <= 0)
        counter += 1

    config.logger.info('Added %s OA cuts' % counter)
Esempio n. 9
0
    def solve(self, model, **kwds):
        """Solve the model.

        Warning: this solver is still in beta. Keyword arguments subject to
        change. Undocumented keyword arguments definitely subject to change.

        This function performs all of the GDPopt solver setup and problem
        validation. It then calls upon helper functions to construct the
        initial master approximation and iteration loop.

        Args:
            model (Block): a Pyomo model or block to be solved

        """
        config = self.CONFIG(kwds.pop('options', {}))
        config.set_value(kwds)
        solve_data = GDPoptSolveData()
        created_GDPopt_block = False

        old_logger_level = config.logger.getEffectiveLevel()
        try:
            if config.tee and old_logger_level > logging.INFO:
                # If the logger does not already include INFO, include it.
                config.logger.setLevel(logging.INFO)
            config.logger.info("---Starting GDPopt---")

            # Create a model block on which to store GDPopt-specific utility
            # modeling objects.
            if hasattr(model, 'GDPopt_utils'):
                raise RuntimeError(
                    "GDPopt needs to create a Block named GDPopt_utils "
                    "on the model object, but an attribute with that name "
                    "already exists.")
            else:
                created_GDPopt_block = True
                model.GDPopt_utils = Block(
                    doc="Container for GDPopt solver utility modeling objects")

            solve_data.original_model = model

            solve_data.working_model = clone_orig_model_with_lists(model)
            GDPopt = solve_data.working_model.GDPopt_utils
            record_original_model_statistics(solve_data, config)

            solve_data.current_strategy = config.strategy

            # Reformulate integer variables to binary
            reformulate_integer_variables(solve_data.working_model, config)

            # Save ordered lists of main modeling components, so that data can
            # be easily transferred between future model clones.
            build_ordered_component_lists(solve_data.working_model)
            record_working_model_statistics(solve_data, config)
            solve_data.results.solver.name = 'GDPopt ' + str(self.version())

            # Save model initial values. These are used later to initialize NLP
            # subproblems.
            solve_data.initial_var_values = list(
                v.value for v in GDPopt.working_var_list)

            # Store the initial model state as the best solution found. If we
            # find no better solution, then we will restore from this copy.
            solve_data.best_solution_found = solve_data.initial_var_values

            # Validate the model to ensure that GDPopt is able to solve it.
            if not model_is_valid(solve_data, config):
                return

            # Maps in order to keep track of certain generated constraints
            GDPopt.oa_cut_map = ComponentMap()

            # Integer cuts exclude particular discrete decisions
            GDPopt.integer_cuts = ConstraintList(doc='integer cuts')

            # Feasible integer cuts exclude discrete realizations that have
            # been explored via an NLP subproblem. Depending on model
            # characteristics, the user may wish to revisit NLP subproblems
            # (with a different initialization, for example). Therefore, these
            # cuts are not enabled by default, unless the initial model has no
            # discrete decisions.

            # Note: these cuts will only exclude integer realizations that are
            # not already in the primary GDPopt_integer_cuts ConstraintList.
            GDPopt.no_backtracking = ConstraintList(
                doc='explored integer cuts')

            # Set up iteration counters
            solve_data.master_iteration = 0
            solve_data.mip_iteration = 0
            solve_data.nlp_iteration = 0

            # set up bounds
            solve_data.LB = float('-inf')
            solve_data.UB = float('inf')
            solve_data.iteration_log = {}

            # Flag indicating whether the solution improved in the past
            # iteration or not
            solve_data.feasible_solution_improved = False

            # Initialize the master problem
            GDPopt_initialize_master(solve_data, config)

            # Algorithm main loop
            GDPopt_iteration_loop(solve_data, config)

            # Update values in working model
            copy_var_list_values(
                from_list=solve_data.best_solution_found,
                to_list=GDPopt.working_var_list,
                config=config)
            GDPopt.objective_value.set_value(
                value(solve_data.working_objective_expr, exception=False))

            # Update values in original model
            copy_var_list_values(
                GDPopt.orig_var_list,
                solve_data.original_model.GDPopt_utils.orig_var_list,
                config)

            solve_data.results.problem.lower_bound = solve_data.LB
            solve_data.results.problem.upper_bound = solve_data.UB

        finally:
            config.logger.setLevel(old_logger_level)
            if created_GDPopt_block:
                model.del_component('GDPopt_utils')