Esempio n. 1
0
def disjunctify(model, indicator_name, disjunct_name, LHS_disjunct_set,
                RHS_disjunct_set):
    assert len(LHS_disjunct_set) == len(RHS_disjunct_set)

    dset = list(range(len(LHS_disjunct_set)))

    setattr(model, indicator_name, None)
    _dj = getattr(model, indicator_name)
    setattr(model, disjunct_name, None)
    _ddj = getattr(model, disjunct_name)

    def _disjunct_rule(disjunct, i, flag):
        if flag:
            con_lists = LHS_disjunct_set
        else:
            con_lists = RHS_disjunct_set

        disjunct.c = pe.ConstraintList()
        for k, cik in con_lists[i]:
            disjunct.c.add(pe.inequality(cik.lower, cik.body, cik.upper))
            cik.deactivate()

    _dj = gdp.Disjunct(dset, [0, 1], rule=_disjunct_rule)

    # Define the disjunction
    def _disjunction_rule(model, i):
        return [model._dj[i, 0], model._dj[i, 1]]

    _ddj = gdp.Disjunction(range(len(LHS_disjunct_set)),
                           rule=_disjunction_rule)
Esempio n. 2
0
    def test_with_gdp(self, name: str, opt_class: Type[PersistentSolver]):
        opt: PersistentSolver = opt_class()
        if not opt.available():
            raise unittest.SkipTest

        m = pe.ConcreteModel()
        m.x = pe.Var(bounds=(-10, 10))
        m.y = pe.Var(bounds=(-10, 10))
        m.obj = pe.Objective(expr=m.y)
        m.d1 = gdp.Disjunct()
        m.d1.c1 = pe.Constraint(expr=m.y >= m.x + 2)
        m.d1.c2 = pe.Constraint(expr=m.y >= -m.x + 2)
        m.d2 = gdp.Disjunct()
        m.d2.c1 = pe.Constraint(expr=m.y >= m.x + 1)
        m.d2.c2 = pe.Constraint(expr=m.y >= -m.x + 1)
        m.disjunction = gdp.Disjunction(expr=[m.d2, m.d1])
        pe.TransformationFactory("gdp.bigm").apply_to(m)

        res = opt.solve(m)
        self.assertAlmostEqual(res.best_feasible_objective, 1)
        self.assertAlmostEqual(m.x.value, 0)
        self.assertAlmostEqual(m.y.value, 1)

        opt: PersistentSolver = opt_class()
        opt.use_extensions = True
        res = opt.solve(m)
        self.assertAlmostEqual(res.best_feasible_objective, 1)
        self.assertAlmostEqual(m.x.value, 0)
        self.assertAlmostEqual(m.y.value, 1)
 def test_disjunct_not_in_active_disjunction(self):
     m = pe.ConcreteModel()
     m.x = pe.Var()
     m.d1 = gdp.Disjunct()
     m.d1.c = pe.Constraint(expr=m.x == 1)
     m.d2 = gdp.Disjunct()
     m.d2.c = pe.Constraint(expr=m.x == 0)
     m.disjunction = gdp.Disjunction(expr=[m.d1, m.d2])
     m.disjunction.deactivate()
     with self.assertRaisesRegexp(
             gdp.GDP_Error, '.*While it participates in a Disjunction, '
             'that Disjunction is currently deactivated.*'):
         pe.TransformationFactory('gdp.bigm').apply_to(m)
Esempio n. 4
0
 def test_disjunct_not_in_active_disjunction(self):
     m = pe.ConcreteModel()
     m.x = pe.Var()
     m.d1 = gdp.Disjunct()
     m.d1.c = pe.Constraint(expr=m.x == 1)
     m.d2 = gdp.Disjunct()
     m.d2.c = pe.Constraint(expr=m.x == 0)
     m.disjunction = gdp.Disjunction(expr=[m.d1, m.d2])
     m.disjunction.deactivate()
     pe.TransformationFactory('gdp.bigm').apply_to(m)
     log = StringIO()
     with LoggingIntercept(log, 'pyomo.gdp', logging.WARNING):
         check_model_algebraic(m)
     self.assertRegexpMatches(
         log.getvalue(), '.*While it participates in a Disjunction, '
         'that Disjunction is currently deactivated.*')
Esempio n. 5
0
    def generate_nn_guard_pyo(model: pyo.ConcreteModel,
                              input,
                              nn: torch.nn.Sequential,
                              action_ego=0,
                              M=1e2):
        model.nn_contraints = pyo.ConstraintList()
        gurobi_vars = []
        gurobi_vars.append(input)
        for i, layer in enumerate(nn):
            if type(layer) is torch.nn.Linear:
                layer_size = int(layer.out_features)
                v = pyo.Var(range(layer_size),
                            name=f"layer_{i}",
                            within=pyo.Reals)
                model.add_component(name=f"layer_{i}", val=v)
                lin_expr = np.zeros(layer_size)
                weights = layer.weight.data.numpy()
                bias = 0
                if layer.bias is not None:
                    bias = layer.bias.data.numpy()
                else:
                    bias = np.zeros(layer_size)
                for j in range(layer_size):
                    res = sum(gurobi_vars[-1][k] * weights[j, k]
                              for k in range(weights.shape[1])) + bias[j]

                for j in range(layer_size):
                    model.nn_contraints.add(
                        v[j] == sum(gurobi_vars[-1][k] * weights[j, k]
                                    for k in range(weights.shape[1])) +
                        bias[j])
                gurobi_vars.append(v)
            elif type(layer) is torch.nn.ReLU:
                layer_size = int(nn[i - 1].out_features)
                v = pyo.Var(range(layer_size),
                            name=f"layer_{i}",
                            within=pyo.PositiveReals)
                model.add_component(name=f"layer_{i}", val=v)

                z = pyo.Var(range(layer_size),
                            name=f"relu_{i}",
                            within=pyo.Binary)
                model.add_component(name=f"relu_{i}", val=z)
                # for j in range(layer_size):
                #     model.nn_contraints.add(expr=v[j] >= gurobi_vars[-1][j])
                #     model.nn_contraints.add(expr=v[j] <= gurobi_vars[-1][j] + M * z[j])
                #     model.nn_contraints.add(expr=v[j] >= 0)
                #     model.nn_contraints.add(expr=v[j] <= M - M * z[j])

                for j in range(layer_size):
                    # model.nn_contraints.add(expr=v[j] <= gurobi_vars[-1][j])
                    dis = gdp.Disjunction(expr=[[
                        v[j] >= gurobi_vars[-1][j], v[j] <= gurobi_vars[-1][j],
                        gurobi_vars[-1][j] >= 0
                    ], [v[j] == 0, gurobi_vars[-1][j] <= 0]])
                    model.add_component(f"relu_{i}_{j}", dis)
                gurobi_vars.append(v)
                """
                y = Relu(x)
                0 <= z <= 1, z is integer
                y >= x
                y <= x + Mz
                y >= 0
                y <= M - Mz"""
        for i in range(len(gurobi_vars[-1])):
            if i == action_ego:
                continue
            model.nn_contraints.add(
                gurobi_vars[-1][action_ego] >= gurobi_vars[-1][i])
        if model.component("obj"):
            model.del_component("obj")
        model.obj = pyo.Objective(expr=gurobi_vars[-1][action_ego],
                                  sense=pyo.minimize)
        TransformationFactory('gdp.bigm').apply_to(model, bigM=M)
        result = Experiment.solve(model, solver=Experiment.use_solver)
        if (result.solver.status
                == SolverStatus.ok) and (result.solver.termination_condition
                                         == TerminationCondition.optimal):
            return True
        elif (result.solver.termination_condition
              == TerminationCondition.infeasible
              or result.solver.termination_condition
              == TerminationCondition.infeasibleOrUnbounded):
            # log_infeasible_constraints(model)
            return False
        else:
            print(f"Solver status: {result.solver.status}")
            return False
Esempio n. 6
0
        def set_deadline_condition(model, case, session):
            return model.SESSION_DATES[
                session] <= model.CASE_DEADLINES[case] + (
                    (1 - model.SESSION_ASSIGNED[case, session]) * model.M)

        model.APPLY_DEADLINE = pe.Constraint(model.TASKS,
                                             rule=set_deadline_condition)

        def no_case_overlap(model, case1, case2, session):
            return [model.CASE_START_TIME[case1, session] + model.CASE_DURATION[case1] <= model.CASE_START_TIME[case2, session] + \
                    ((2 - model.SESSION_ASSIGNED[case1, session] - model.SESSION_ASSIGNED[case2, session])*model.M),
                    model.CASE_START_TIME[case2, session] + model.CASE_DURATION[case2] <= model.CASE_START_TIME[case1, session] + \
                    ((2 - model.SESSION_ASSIGNED[case1, session] - model.SESSION_ASSIGNED[case2, session])*model.M)]

        model.DISJUNCTIONS_RULE = pyogdp.Disjunction(model.DISJUNCTIONS,
                                                     rule=no_case_overlap)

        def theatre_util(model, session):
            return model.CASES_IN_SESSION[session] == \
                   sum([model.SESSION_ASSIGNED[case, session] for case in model.CASES])

        model.THEATRE_UTIL = pe.Constraint(model.SESSIONS, rule=theatre_util)

        pe.TransformationFactory("gdp.bigm").apply_to(model)

        return model

    def solve(self, solver_name, options=None, solver_path=None, local=True):

        if solver_path is not None:
            solver = pe.SolverFactory(solver_name, executable=solver_path)
Esempio n. 7
0
import pyomo.environ as pe
import pyomo.gdp as gdp
from premixing_models import premixing_model

m = pe.ConcreteModel()
m.pre_mix = premixing_model(model='hybrid')

m.Reynolds = pe.Var(within=pe.PositiveReals)

# Disjunts
m.pre_mix.re_turbulent.re_cons = pe.Constraint(expr=m.Reynolds >= 1e4)
m.pre_mix.re_transition.re_cons = pe.Constraint(expr=m.Reynolds <= 9e3)

# Disjuntions
m.regim = gdp.Disjunction(
    expr=[m.pre_mix.re_turbulent, m.pre_mix.re_transition])

m.BigM = pe.Suffix(direction=pe.Suffix.LOCAL)
m.BigM[None] = 1000

bigM = pe.TransformationFactory("gdp.bigm")
bigM.apply_to(m)

m.pprint()
Esempio n. 8
0
    def __init__(self):

        self.model = m = pe.ConcreteModel()  # main model

        self.alpha = 0.72  # compressor coefficient
        self.eta = 0.75  # compressor efficiency
        self.gamma = 0.23077  # ratio of constant pressure heat capacity to constant volume heat capacity
        self.cp = 35.0  # heat capacity
        self.heat_of_reaction = -15
        self.volume_conversion = dict()
        self.volume_conversion[9] = 0.1
        self.volume_conversion[10] = 0.05
        self.reactor_volume = 100
        self.electricity_cost = 0.255
        self.cooling_cost = 700
        self.heating_cost = 8000
        self.purity_demand = 0.9  #purity demand in product stream
        self.demand = 1.0  # flowrate restriction on product flow
        self.flow_feed_lb = 0.5
        self.flow_feed_ub = 5
        self.flow_feed_temp = 3
        self.flow_feed_pressure = 1
        self.cost_flow_1 = 795.6
        self.cost_flow_2 = 1009.8
        self.price_of_product = 7650
        self.price_of_byproduct = 642.6
        self.cheap_reactor_fixed_cost = 100
        self.cheap_reactor_variable_cost = 5
        self.expensive_reactor_fixed_cost = 250
        self.expensive_reactor_variable_cost = 10
        self.heat_unit_match = 0.00306
        self.capacity_redundancy = 1.2
        self.antoine_unit_trans = 7500.6168
        self.K = 0.415
        self.delta_H = 26.25
        self.reactor_relation = 0.9
        self.purity_demand = 0.9
        self.fix_electricity_cost = 175
        self.two_stage_fix_cost = 50

        m.streams = pe.Set(initialize=list(range(1, 34)), ordered=True)
        m.components = pe.Set(initialize=['H2', 'CO', 'CH3OH', 'CH4'],
                              ordered=True)
        m.flows = pe.Var(m.streams, bounds=(0, 20))
        m.temps = pe.Var(m.streams, bounds=(3, 9))
        m.pressures = pe.Var(m.streams, bounds=(0.1, 15))
        m.component_flows = pe.Var(m.streams, m.components, bounds=(0, 20))

        flow_1 = dict()
        flow_1['H2'] = 0.6
        flow_1['CO'] = 0.25
        flow_1['CH4'] = 0.15
        m.flow_1_composition = pe.Param(m.components,
                                        initialize=flow_1,
                                        default=0)
        flow_2 = dict()
        flow_2['H2'] = 0.65
        flow_2['CO'] = 0.30
        flow_2['CH4'] = 0.05
        m.flow_2_composition = pe.Param(m.components,
                                        initialize=flow_2,
                                        default=0)

        m.pressures[13].setlb(2.5)
        m.temps[13].setlb(4.23)
        m.temps[13].setub(8.73)
        m.temps[18].setlb(5.23)
        m.temps[18].setub(8.73)
        m.flows[22].setlb(0.1)
        m.flows[22].setub(1.0)
        m.temps[23].fix(4)
        m.temps[25].fix(4)

        self.inlet_streams = dict()
        self.outlet_streams = dict()
        self.vapor_outlets = dict()
        self.liquid_outlets = dict()

        self.inlet_streams[3] = 4
        self.inlet_streams[4] = 5
        self.inlet_streams[5] = 7
        self.inlet_streams[6] = 8
        self.inlet_streams[7] = 11
        self.inlet_streams[8] = 12
        self.inlet_streams[9] = 15
        self.inlet_streams[10] = 14
        self.inlet_streams[11] = 18
        self.inlet_streams[12] = 19
        self.inlet_streams[13] = 20
        self.inlet_streams[14] = 22
        self.inlet_streams[15] = 24
        self.inlet_streams[16] = 27
        self.inlet_streams[17] = 28
        self.inlet_streams[18] = 30
        self.inlet_streams[19] = 31
        self.inlet_streams['feed_mixer'] = (1, 2)
        self.inlet_streams['feed_splitter'] = 3
        self.inlet_streams['compressed_feed_mixer'] = (6, 9)
        self.inlet_streams['recycle_feed_mixer'] = (10, 33)
        self.inlet_streams['reactor_feed_splitter'] = 13
        self.inlet_streams['reactor_product_mixer'] = (16, 17)
        self.inlet_streams['purge_splitter'] = 21
        self.inlet_streams['recycle_compressor_splitter'] = 26
        self.inlet_streams['recycle_compressor_mixer'] = (29, 32)

        self.outlet_streams[3] = 6
        self.outlet_streams[4] = 7
        self.outlet_streams[5] = 8
        self.outlet_streams[6] = 9
        self.outlet_streams[7] = 12
        self.outlet_streams[8] = 13
        self.outlet_streams[9] = 17
        self.outlet_streams[10] = 16
        self.outlet_streams[11] = 19
        self.outlet_streams[12] = 20
        self.outlet_streams[14] = 23
        self.outlet_streams[15] = 25
        self.outlet_streams[16] = 29
        self.outlet_streams[17] = 30
        self.outlet_streams[18] = 31
        self.outlet_streams[19] = 32
        self.outlet_streams['feed_mixer'] = 3
        self.outlet_streams['feed_splitter'] = (4, 5)
        self.outlet_streams['compressed_feed_mixer'] = 10
        self.outlet_streams['recycle_feed_mixer'] = 11
        self.outlet_streams['reactor_feed_splitter'] = (14, 15)
        self.outlet_streams['reactor_product_mixer'] = 18
        self.outlet_streams['purge_splitter'] = (26, 24)
        self.outlet_streams['recycle_compressor_splitter'] = (27, 28)
        self.outlet_streams['recycle_compressor_mixer'] = 33

        self.vapor_outlets[13] = 21
        self.liquid_outlets[13] = 22

        def _total_flow(_m, _s):
            return _m.flows[_s] == sum(_m.component_flows[_s, _c]
                                       for _c in _m.components)

        m.total_flow_con = pe.Constraint(m.streams, rule=_total_flow)

        m.purity_con = pe.Constraint(
            expr=m.component_flows[23, 'CH3OH'] >= self.purity_demand *
            m.flows[23])

        # ************************************
        # Feed
        # ************************************
        m.cheap_feed_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.cheap_feed_disjunct, 1, 3)
        self.build_stream_doesnt_exist_con(m.cheap_feed_disjunct, 2)
        m.cheap_feed_disjunct.feed_cons = c = pe.ConstraintList()
        c.add(m.component_flows[1, 'H2'] == m.flow_1_composition['H2'] *
              m.flows[1])
        c.add(m.component_flows[1, 'CO'] == m.flow_1_composition['CO'] *
              m.flows[1])
        c.add(m.component_flows[1, 'CH4'] == m.flow_1_composition['CH4'] *
              m.flows[1])
        c.add(m.flows[1] >= self.flow_feed_lb)
        c.add(m.flows[1] <= self.flow_feed_ub)
        c.add(m.temps[1] == self.flow_feed_temp)
        c.add(m.pressures[1] == self.flow_feed_pressure)

        m.expensive_feed_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.expensive_feed_disjunct, 2, 3)
        self.build_stream_doesnt_exist_con(m.expensive_feed_disjunct, 1)
        m.expensive_feed_disjunct.feed_cons = c = pe.ConstraintList()
        c.add(m.component_flows[2, 'H2'] == m.flow_2_composition['H2'] *
              m.flows[2])
        c.add(m.component_flows[2, 'CO'] == m.flow_2_composition['CO'] *
              m.flows[2])
        c.add(m.component_flows[2, 'CH4'] == m.flow_2_composition['CH4'] *
              m.flows[2])
        c.add(m.flows[2] >= self.flow_feed_lb)
        c.add(m.flows[2] <= self.flow_feed_ub)
        c.add(m.temps[2] == self.flow_feed_temp)
        c.add(m.pressures[2] == self.flow_feed_pressure)

        m.feed_disjunctions = gdp.Disjunction(
            expr=[m.cheap_feed_disjunct, m.expensive_feed_disjunct])

        # ************************************
        # Feed compressors
        # ************************************
        m.single_stage_feed_compressor_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.single_stage_feed_compressor_disjunct, 3, 4)
        self.build_stream_doesnt_exist_con(
            m.single_stage_feed_compressor_disjunct, 5)
        self.build_stream_doesnt_exist_con(
            m.single_stage_feed_compressor_disjunct, 7)
        self.build_stream_doesnt_exist_con(
            m.single_stage_feed_compressor_disjunct, 8)
        self.build_stream_doesnt_exist_con(
            m.single_stage_feed_compressor_disjunct, 9)
        self.build_equal_streams(m.single_stage_feed_compressor_disjunct, 6,
                                 10)
        self.build_compressor(m.single_stage_feed_compressor_disjunct, 3)

        m.two_stage_feed_compressor_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.two_stage_feed_compressor_disjunct, 3, 5)
        self.build_equal_streams(m.two_stage_feed_compressor_disjunct, 9, 10)
        self.build_stream_doesnt_exist_con(
            m.two_stage_feed_compressor_disjunct, 4)
        self.build_stream_doesnt_exist_con(
            m.two_stage_feed_compressor_disjunct, 6)
        self.build_compressor(m.two_stage_feed_compressor_disjunct, 4)
        self.build_cooler(m.two_stage_feed_compressor_disjunct, 5)
        self.build_compressor(m.two_stage_feed_compressor_disjunct, 6)
        m.two_stage_feed_compressor_disjunct.equal_electric_requirements = pe.Constraint(
            expr=m.two_stage_feed_compressor_disjunct.compressor_4.
            electricity_requirement == m.two_stage_feed_compressor_disjunct.
            compressor_6.electricity_requirement)
        m.two_stage_feed_compressor_disjunct.exists = pe.Var(bounds=(0, 1))
        m.two_stage_feed_compressor_disjunct.exists_con = pe.Constraint(
            expr=m.two_stage_feed_compressor_disjunct.exists == 1)

        m.feed_compressor_disjunction = gdp.Disjunction(expr=[
            m.single_stage_feed_compressor_disjunct,
            m.two_stage_feed_compressor_disjunct
        ])

        self.build_mixer(m, 'recycle_feed_mixer')
        self.build_cooler(m, 7)
        self.build_heater(m, 8)

        # ************************************
        # Reactors
        # ************************************
        m.expensive_reactor = gdp.Disjunct()
        self.build_equal_streams(m.expensive_reactor, 13, 15)
        self.build_equal_streams(m.expensive_reactor, 17, 18)
        self.build_stream_doesnt_exist_con(m.expensive_reactor, 14)
        self.build_stream_doesnt_exist_con(m.expensive_reactor, 16)
        self.build_reactor(m.expensive_reactor, 9)
        m.expensive_reactor.exists = pe.Var(bounds=(0, 1))
        m.expensive_reactor.exists_con = pe.Constraint(
            expr=m.expensive_reactor.exists == 1)
        m.expensive_reactor.composition_cons = c = pe.ConstraintList()
        for _comp in m.components:
            c.add(m.component_flows[17, _comp] >= 0.01)

        m.cheap_reactor = gdp.Disjunct()
        self.build_equal_streams(m.cheap_reactor, 13, 14)
        self.build_equal_streams(m.cheap_reactor, 16, 18)
        self.build_stream_doesnt_exist_con(m.cheap_reactor, 15)
        self.build_stream_doesnt_exist_con(m.cheap_reactor, 17)
        self.build_reactor(m.cheap_reactor, 10)
        m.cheap_reactor.exists = pe.Var(bounds=(0, 1))
        m.cheap_reactor.exists_con = pe.Constraint(
            expr=m.cheap_reactor.exists == 1)
        m.cheap_reactor.composition_cons = c = pe.ConstraintList()
        for _comp in m.components:
            c.add(m.component_flows[16, _comp] >= 0.01)

        m.reactor_disjunction = gdp.Disjunction(
            expr=[m.expensive_reactor, m.cheap_reactor])

        self.build_expansion_valve(m, 11)
        self.build_cooler(m, 12)
        self.build_flash(m, 13)
        self.build_heater(m, 14)
        self.build_splitter(m, 'purge_splitter')
        self.build_heater(m, 15)

        # ************************************
        # Recycle compressors
        # ************************************
        m.single_stage_recycle_compressor_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.single_stage_recycle_compressor_disjunct,
                                 26, 27)
        self.build_equal_streams(m.single_stage_recycle_compressor_disjunct,
                                 29, 33)
        self.build_stream_doesnt_exist_con(
            m.single_stage_recycle_compressor_disjunct, 28)
        self.build_stream_doesnt_exist_con(
            m.single_stage_recycle_compressor_disjunct, 30)
        self.build_stream_doesnt_exist_con(
            m.single_stage_recycle_compressor_disjunct, 31)
        self.build_stream_doesnt_exist_con(
            m.single_stage_recycle_compressor_disjunct, 32)
        self.build_compressor(m.single_stage_recycle_compressor_disjunct, 16)

        m.two_stage_recycle_compressor_disjunct = gdp.Disjunct()
        self.build_equal_streams(m.two_stage_recycle_compressor_disjunct, 26,
                                 28)
        self.build_equal_streams(m.two_stage_recycle_compressor_disjunct, 32,
                                 33)
        self.build_stream_doesnt_exist_con(
            m.two_stage_recycle_compressor_disjunct, 27)
        self.build_stream_doesnt_exist_con(
            m.two_stage_recycle_compressor_disjunct, 29)
        self.build_compressor(m.two_stage_recycle_compressor_disjunct, 17)
        self.build_cooler(m.two_stage_recycle_compressor_disjunct, 18)
        self.build_compressor(m.two_stage_recycle_compressor_disjunct, 19)
        m.two_stage_recycle_compressor_disjunct.equal_electric_requirements = pe.Constraint(
            expr=m.two_stage_recycle_compressor_disjunct.compressor_17.
            electricity_requirement == m.two_stage_recycle_compressor_disjunct.
            compressor_19.electricity_requirement)
        m.two_stage_recycle_compressor_disjunct.exists = pe.Var(bounds=(0, 1))
        m.two_stage_recycle_compressor_disjunct.exists_con = pe.Constraint(
            expr=m.two_stage_recycle_compressor_disjunct.exists == 1)

        m.recycle_compressor_disjunction = gdp.Disjunction(expr=[
            m.single_stage_recycle_compressor_disjunct,
            m.two_stage_recycle_compressor_disjunct
        ])

        # ************************************
        # Objective
        # ************************************

        e = 0
        e -= self.cost_flow_1 * m.flows[1]
        e -= self.cost_flow_2 * m.flows[2]
        e += self.price_of_product * m.flows[23]
        e += self.price_of_byproduct * m.flows[25]
        e -= self.cheap_reactor_variable_cost * self.reactor_volume * m.cheap_reactor.exists
        e -= self.cheap_reactor_fixed_cost * m.cheap_reactor.exists
        e -= self.expensive_reactor_variable_cost * self.reactor_volume * m.expensive_reactor.exists
        e -= self.expensive_reactor_fixed_cost * m.expensive_reactor.exists
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.single_stage_feed_compressor_disjunct.compressor_3.electricity_requirement
        e -= self.two_stage_fix_cost * m.two_stage_feed_compressor_disjunct.exists
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.two_stage_feed_compressor_disjunct.compressor_4.electricity_requirement
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.two_stage_feed_compressor_disjunct.compressor_6.electricity_requirement
        e -= self.cooling_cost * m.two_stage_feed_compressor_disjunct.cooler_5.heat_duty
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.single_stage_recycle_compressor_disjunct.compressor_16.electricity_requirement
        e -= self.two_stage_fix_cost * m.two_stage_recycle_compressor_disjunct.exists
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.two_stage_recycle_compressor_disjunct.compressor_17.electricity_requirement
        e -= (
            self.fix_electricity_cost + self.electricity_cost
        ) * m.two_stage_recycle_compressor_disjunct.compressor_19.electricity_requirement
        e -= self.cooling_cost * m.two_stage_recycle_compressor_disjunct.cooler_18.heat_duty
        e -= self.cooling_cost * m.cooler_7.heat_duty
        e -= self.heating_cost * m.heater_8.heat_duty
        e -= self.cooling_cost * m.cooler_12.heat_duty
        e -= self.heating_cost * m.heater_14.heat_duty
        e -= self.heating_cost * m.heater_15.heat_duty
        m.objective = pe.Objective(expr=-e)
Esempio n. 9
0
def opt_click_work():
    #retrieve values from sliders for capacity and Tmin
    for slidi in range(0,len(sliders)):
        dftd.loc[slidi, ('Tmin')]=sliders[slidi].value[0]
        dftd.loc[slidi, ('capacity')]=sliders[slidi].value[1]
    #retrieve values from sliders for supply costs   
    for slidj in range(0,len(sliderSCost)):
        dftd.loc[slidj, ('supplycost')]=sliderSCost[slidj].value/100 # read in cents, convert to dollars
       
    # pre-compute cost data per station-terminal pair (in dataframe matrix algebra)
    TruckLoads=dfdemandsarray/TruckCapacity
    MileageCost=2*dfdistance*CostPerMile*TruckLoads #round-trip
    #TripHours=TruckLoads*((2*dftime)+ExtraTime)/60
    TripHours=TruckLoads*((2*dftime)+dftd.extratime)/60
    TimeCost=TripHours*CostPerHour
    TruckCost=CostTruckMonth*TripHours/TruckHours
    SupplyCost=dfdemandsarray*dftd.supplycost
    TotalCost=SupplyCost+TruckCost+TimeCost+MileageCost
    #convert TotalCost into Indexed Dictionary variable for passing to pyomo as the objective function
    N = list(TotalCost.index.map(int))
    M = list(TotalCost.columns.map(int))
    dTotalCost = {(n, m):TotalCost.at[n,m] for n in N for m in M}

    #Create pyomo model
    model = pyomo.ConcreteModel()
    N = list(TotalCost.index.map(int))
    M = list(TotalCost.columns.map(int))
    model.Stations = range(1+max(N))
    model.Terminals = range(1+max(M))
    #decision variables - forcing to binary makes all supply to station from a single terminal
    model.x = pyomo.Var(model.Stations, model.Terminals, within=pyomo.Binary) ## to set as continuous, bounds=(0.0,1.0)
    #objective function - minimize total cost to supply/deliver to each station
    model.obj=pyomo.Objective(expr=sum(dTotalCost[n,m]*model.x[n,m] for n in model.Stations for m in model.Terminals))
    #constraints
    model.constraints = pyomo.ConstraintList()
    # force terminal selection for each station to sum to 1 (forces demand to be met)
    for n in model.Stations:
        model.constraints.add(sum( model.x[n,m] for m in model.Terminals) == 1.0 )
    # establish volume constraints at each terminal
    for m in model.Terminals: 
        model.constraints.add(expr=sum( model.x[n,m]*dfdemands.demands[n] for n in model.Stations) <= dftd.capacity[m]) 
    # force EITHER >=minimum or zero offtakefrom each terminal, using pyomo.gdp Disjunction
    # need a disjunction pair for each terminal where min or zero offtake imposed
    model.d=gdp.Disjunction(model.Terminals, rule = lambda n,m:[sum(model.x[n,m]*dfdemands.demands[n] for n in model.Stations)==0,sum(model.x[n,m]*dfdemands.demands[n] for n in model.Stations)>=dftd.Tmin[m]])
    # transform the model using "big M" methodology so that it can be solved with open-source cbc solver
    xfrm=pyomo.TransformationFactory('gdp.bigm')
    xfrm.apply_to(model)    
    #run model
    solver = pyomo.SolverFactory('glpk')
    solver.options['tmlim'] = 45  # stop solver if not converged in 45 seconds
    results = solver.solve(model)
    # check for convergence
    if results.solver.termination_condition == TerminationCondition.infeasible: #means over-constrained
        objtext='infeasible'
        divright.text=objtext
        cdsInitStations.data['color']=cdsInitStations.data['failcolor']
        return()
    if results.solver.termination_condition == TerminationCondition.feasible:  # means that solver timed out
        objtext='failed to converge'
        divright.text=objtext
        cdsInitStations.data['color']=cdsInitStations.data['failcolor']
        return()
    objtext='Total cost = '+'${:,.0f}'.format((results['Problem'][0])['Lower bound']) + '/month'
    divright.text=objtext
    # retrieve model solution
    DFTerminalAssignment = pd.DataFrame()
    for n in N:
        for m in M:

            DFTerminalAssignment.at[n,m] = int(model.x[(n,m)].value) # don't convert to integer if x is continuous

    DFtn=DFTerminalAssignment.idxmax(axis=1).to_frame(name="color")
    DFtn=DFtn['color'].map(dftd.set_index('terminalnumber')['color'])
    cpg=(100*(TotalCost*DFTerminalAssignment).sum(axis=1) /dfdemands['demands']).round(2).to_frame(name="cpg")
    #StationMap=DFstations.merge(DFtn,left_index=True, right_index=True)
    DFstations.color=DFtn
    StationMap=DFstations.merge(cpg,left_index=True, right_index=True) 
    dftname=DFTerminalAssignment.idxmax(axis=1).to_frame(name="TName")
    dftname=dftname['TName'].map(dftd.set_index('terminalnumber')['terminalname'])
    StationMap=StationMap.merge(dftname,left_index=True, right_index=True) 
    cdsInitStations.data=StationMap
    
    # summarize data by terminal
    OutputTerminalTable=pd.DataFrame() # dftd.terminalname.to_frame(name="terminalname")
    OptTruckReq=TripHours*DFTerminalAssignment/TruckHours
    OutputTerminalTable['Trucks']=OptTruckReq.sum(axis=0)
    OutputTerminalTable['Demand']=(dfdemandsarray*DFTerminalAssignment).sum(axis=0)/1000
    OutputTerminalTable['Stations']=DFTerminalAssignment.sum(axis=0)
    OutputTerminalTable['Trips']=(TruckLoads*DFTerminalAssignment).sum(axis=0)
    tmap=dftd.merge(OutputTerminalTable,left_index=True, right_index=True)
    cdsTerminals.data=tmap
    columns = [TableColumn(field="terminalname", title="Terminal"),TableColumn(field="Trucks", title="Trucks"), TableColumn(field="Stations", title="Stations supplied"),TableColumn(field="Trips", title="# of trips"),TableColumn(field="Demand", title="Demand kgal/mo")]
    OutputTable=DataTable(source=cdsTerminals, columns=columns, width=600, height=600,index_position=None,sizing_mode='fixed')
Esempio n. 10
0
# VAriables
m.x1 = pe.Var(within=pe.PositiveReals)
m.x2 = pe.Var(bounds=(0, 5))

# Outter constraints
m.ocons = pe.Constraint(expr=m.x1**2 + m.x2**2 >= 2)

# Disjuctions
m.Re1 = gdp.Disjunct()
m.Re2 = gdp.Disjunct()

m.Re1.x1_cons1 = pe.Constraint(expr=pe.exp(m.x1) >= pe.exp(2))
m.Re2.x1_cons2 = pe.Constraint(expr=m.x1 <= 1)

m.r1or2 = gdp.Disjunction(expr=[m.Re1, m.Re2])

# Objetivo
m.obj = pe.Objective(expr=3 * m.x1 + 4 * m.x2)

#m.pprint()

# SOlucion

m.BigM = pe.Suffix(direction=pe.Suffix.LOCAL)
m.BigM[None] = 1000

bigM = pe.TransformationFactory("gdp.chull")
bigM.apply_to(m)

opt = SolverFactory("bonmin")