def _get_primal_values(self): n = glp_get_num_cols(self.problem) if glp_get_num_int(self.problem) == 0: return [glp_get_col_prim(self.problem, i + 1) for i in range(n)] else: return [glp_mip_col_val(self.problem, i + 1) for i in range(n)]
def _add_constraints(self, constraints, sloppy=False): super(Model, self)._add_constraints(constraints, sloppy=sloppy) for constraint in constraints: constraint._problem = None # This needs to be done in order to not trigger constraint._get_expression() glp_add_rows(self.problem, 1) index = glp_get_num_rows(self.problem) glp_set_row_name(self.problem, index, str(constraint.name).encode()) num_cols = glp_get_num_cols(self.problem) index_array = ffi.new("int[{}]".format(num_cols + 1)) #intArray(num_cols + 1) value_array = ffi.new( "double[{}]".format(num_cols + 1)) #doubleArray(num_cols + 1) num_vars = 0 # constraint.variables is too expensive for large problems offset, coef_dict, _ = parse_optimization_expression(constraint, linear=True) num_vars = len(coef_dict) for i, (var, coef) in enumerate(coef_dict.items()): index_array[i + 1] = var._index value_array[i + 1] = float(coef) glp_set_mat_row(self.problem, index, num_vars, index_array, value_array) constraint._problem = self self._glpk_set_row_bounds(constraint)
def _add_variables(self, variables): for variable in variables: glp_add_cols(self.problem, 1) index = glp_get_num_cols(self.problem) glp_set_col_name(self.problem, index, str(variable.name).encode()) variable.problem = self self._glpk_set_col_bounds(variable) glp_set_col_kind(self.problem, variable._index, _VTYPE_TO_GLPK_VTYPE[variable.type]) super(Model, self)._add_variables(variables)
def get_linear_coefficients(self, variables): if self.problem is not None: num_cols = glp_get_num_cols(self.problem.problem) ia = ffi.new("int[{}]".format(num_cols + 1)) da = ffi.new("double[{}]".format(num_cols + 1)) nnz = glp_get_mat_row(self.problem.problem, self._index, ia, da) coefs = dict.fromkeys(variables, 0.0) coefs.update({ self.problem._variables[ia[i + 1] - 1]: da[i + 1] for i in range(nnz) if self.problem._variables[ia[i + 1] - 1] in variables }) return coefs else: raise Exception( "Can't get coefficients from solver if constraint is not in a model" )
def _get_expression(self): if self.problem is not None: col_num = glp_get_num_cols(self.problem.problem) ia = ffi.new("int[{}]".format(col_num + 1)) # intArray(col_num + 1) da = ffi.new("double[{}]".format(col_num + 1)) # doubleArray(col_num + 1) nnz = glp_get_mat_row(self.problem.problem, self._index, ia, da) constraint_variables = [ self.problem._variables[glp_get_col_name( self.problem.problem, ia[i])] for i in range(1, nnz + 1) ] expression = symbolics.add([ symbolics.mul( (symbolics.Real(da[i]), constraint_variables[i - 1])) for i in range(1, nnz + 1) ]) self._expression = expression return self._expression
def set_linear_coefficients(self, coefficients): if self.problem is not None: problem = self.problem.problem num_cols = glp_get_num_cols(problem) ia = ffi.new("int[{}]".format(col_num + 1)) va = ffi.new("double[{}]".format(col_num + 1)) num_rows = glp_get_mat_row(self.problem.problem, self._index, ia, va) variables_and_coefficients = { var.name: coeff for var, coeff in six.iteritems(coefficients) } final_variables_and_coefficients = { glp_get_col_name(problem, ia[i]): va[i] for i in range(1, num_rows + 1) } final_variables_and_coefficients.update(variables_and_coefficients) ia = ffi.new("int[{}]".format(col_num + 1)) va = ffi.new("double[{}]".format(col_num + 1)) for i, (name, coeff) in enumerate( six.iteritems(final_variables_and_coefficients)): ia[i + 1] = self.problem._variables[name]._index va[i + 1] = float(coeff) glp_set_mat_row(problem, self._index, len(final_variables_and_coefficients), ia, va) else: raise Exception( "Can't change coefficients if constraint is not associated with a model." )
def _get_reduced_costs(self): if self.is_integer: raise ValueError( "Dual values are not well-defined for integer problems") n = glp_get_num_cols(self.problem) return [glp_get_col_dual(self.problem, i + 1) for i in range(n)]
def __init__(self, problem=None, *args, **kwargs): super(Model, self).__init__(*args, **kwargs) self.configuration = Configuration() if problem is None: self.problem = glp_create_prob() glp_create_index(self.problem) if self.name is not None: glp_set_prob_name(self.problem, str(self.name).encode()) else: try: self.problem = problem glp_create_index(self.problem) except TypeError: raise TypeError("Provided problem is not a valid GLPK model.") row_num = glp_get_num_rows(self.problem) col_num = glp_get_num_cols(self.problem) for i in range(1, col_num + 1): var = Variable(glp_get_col_name(self.problem, i), lb=glp_get_col_lb(self.problem, i), ub=glp_get_col_ub(self.problem, i), problem=self, type=_GLPK_VTYPE_TO_VTYPE[glp_get_col_kind( self.problem, i)]) # This avoids adding the variable to the glpk problem super(Model, self)._add_variables([var]) variables = self.variables for j in range(1, row_num + 1): ia = ffi.new("int[{}]".format(col_num + 1)) da = ffi.new("double[{}]".format(col_num + 1)) nnz = glp_get_mat_row(self.problem, j, ia, da) constraint_variables = [ variables[ia[i] - 1] for i in range(1, nnz + 1) ] # Since constraint expressions are lazily retrieved from the solver they don't have to be built here # lhs = _unevaluated_Add(*[da[i] * constraint_variables[i - 1] # for i in range(1, nnz + 1)]) lhs = 0 glpk_row_type = glp_get_row_type(self.problem, j) if glpk_row_type == GLP_FX: row_lb = glp_get_row_lb(self.problem, j) row_ub = row_lb elif glpk_row_type == GLP_LO: row_lb = glp_get_row_lb(self.problem, j) row_ub = None elif glpk_row_type == GLP_UP: row_lb = None row_ub = glp_get_row_ub(self.problem, j) elif glpk_row_type == GLP_DB: row_lb = glp_get_row_lb(self.problem, j) row_ub = glp_get_row_ub(self.problem, j) elif glpk_row_type == GLP_FR: row_lb = None row_ub = None else: raise Exception( "Currently, optlang does not support glpk row type %s" % str(glpk_row_type)) log.exception() if isinstance(lhs, int): lhs = symbolics.Integer(lhs) elif isinstance(lhs, float): lhs = symbolics.Real(lhs) constraint_id = glp_get_row_name(self.problem, j) for variable in constraint_variables: try: self._variables_to_constraints_mapping[ variable.name].add(constraint_id) except KeyError: self._variables_to_constraints_mapping[ variable.name] = set([constraint_id]) super(Model, self)._add_constraints([ Constraint(lhs, lb=row_lb, ub=row_ub, name=constraint_id, problem=self, sloppy=True) ], sloppy=True) term_generator = ((glp_get_obj_coef(self.problem, index), variables[index - 1]) for index in range(1, glp_get_num_cols(problem) + 1)) self._objective = Objective(symbolics.add([ symbolics.mul((symbolics.Real(term[0]), term[1])) for term in term_generator if term[0] != 0. ]), problem=self, direction={ GLP_MIN: 'min', GLP_MAX: 'max' }[glp_get_obj_dir(self.problem)]) glp_scale_prob(self.problem, GLP_SF_AUTO)
def term_generator(): for index in range(1, glp_get_num_cols(self.problem.problem) + 1): coeff = glp_get_obj_coef(self.problem.problem, index) if coeff != 0.: yield (symbolics.Real(coeff), variables[index - 1])