def history_routes_load(rmp, routes): # hwo to store the route generated with open('../problem1_route.pkl', 'rb') as pkl2: new_routes = pickle.load(pkl2) n = len(new_routes) for i in range(51, n): v = new_routes[i] m = len(routes) + 1 routes[m] = {} routes[m]['demand'] = v['demand'] routes[m]['column'] = v['column'] routes[m]['distance'] = v['distance'] routes[m]['route'] = v['route'] added_column = gp.Column(routes[m]['column'], rmp.getConstrs()) routes[m]['var'] = rmp.addVar(column=added_column, obj=routes[m]['distance']) rmp.update() return rmp, routes
def add_balance_slack(self): """ add slack variables to balance constraints """ #constr_list = [self.m.getConstrByName("balance1[%s]" %(i)) for i in range(self.node_num)] #constr_list += [self.m.getConstrByName("balance2[%s]" %(i)) for i in range(self.node_num)] constr_list = [self.bound_const1[i] for i in range(self.node_num)] constr_list += [self.bound_const2[i] for i in range(self.node_num)] self.slack1 = {} self.slack2 = {} for n in range(self.node_num): self.slack1[n] = self.m.addVar(column=gb.Column( coeffs=[1 for i in range(2)], constrs=[self.bound_const1[n], self.bound_const2[n]]), name="slack1[%s]" % (n)) self.slack2[n] = self.m.addVar(column=gb.Column( coeffs=[-1 for i in range(2)], constrs=[self.bound_const1[n], self.bound_const2[n]]), name="slack2[%s]" % (n))
def addColumn(self, objective, newPattern): ctName = ('PatternUseVar[%s]' % len(self.model.getVars())) newColumn = gu.Column(newPattern, self.model.getConstrs()) self.model.addVar(vtype=gu.GRB.INTEGER, lb=0, obj=objective, column=newColumn, name=ctName) self.model.update()
def add_variables(model, variables, data): """ Adds a variable to the existing master model :param model: Master model object :param variables: A deque with variable objects (named tuples that hold: arc, no, objective, flow) :return: Nothing """ nodes, periods, commodities, arcs = data.nodes, data.periods, data.commodities, data.arcs constraints = model.getConstrs() for count_arc, arc in enumerate(data.arcs): for count_col, variable in enumerate(variables[count_arc]): no, obj = model.numvars + count_arc + count_col, variable.objective flow = variable.flow coefficients = np.zeros(shape=2 * np.sum(flow > 10e-6) + 1, dtype=float) node_in, node_out = get_2d_index(arc, nodes) constrs = [] for c in xrange(commodities): for t in xrange(periods): # node_out goes first if flow[t, c] > 1e-6: idx = len(constrs) coefficients[idx] = -flow[t, c] idx = len(constrs) + 1 coefficients[idx] = +flow[t, c] # node_out index of constraint of (c,t) idx = get_1d_index(idx1=node_out, idx2=c + 1, idx3=t, width2=commodities, width3=periods) constrs.append(constraints[idx]) idx = get_1d_index(idx1=node_in, idx2=c + 1, idx3=t, width2=commodities, width3=periods) constrs.append(constraints[idx]) idx = nodes * periods * commodities + count_arc coefficients[len(constrs)] = 1. constrs.append(constraints[idx]) column = grb.Column(coefficients, constrs) model.addVar(lb=0., ub=1., obj=obj, column=column, name='col_{}_{}'.format(count_arc, no)) model.update()
def add_column(self, routes): for route in routes: fea = self.path_eva_vrptw(route[1:]) if not fea: print('unfeasibile', route[1:]) continue temp_length = len(self.routes) added_column = gp.Column(self.routes[temp_length]['column'], self.rmp.getConstrs()) self.routes[temp_length]['var'] = self.rmp.addVar( column=added_column, obj=self.routes[temp_length]['distance'], ub=1, lb=0)
def optimize(self): counter = 1 while counter < self._num_iteration: # Solve the relaxed version: self.relax = self.master.relax() self.relax.optimize() lam_cons = np.array([ele.Pi for ele in self.relax.getConstrs()]) self.y = np.array([ self.slave.addVar(vtype="I", lb=0) for _ in range(self.num_cons) ]) self.slave.update() self.slave.addConstr(self.y @ np.array(self.length) <= b_stock) self.slave.setObjective(-self.y @ lam_cons) self.slave.optimize() # get new columns from dual problem new_cut_pattern = [int(ele.X) for ele in self.y] # add new cuts with early termination if new_cut_pattern not in self.all_cut_pattern.tolist(): self.all_cut_pattern = np.concatenate( (self.all_cut_pattern, np.array(new_cut_pattern).reshape(1, -1))) else: print( "Early termination as the cutting method has existed: {}.". format(new_cut_pattern)) print("Terminate at iteration {}. \n".format(counter)) break # add new var to master: new_col = gb.Column() for i in range(self.num_cons): new_col.addTerms(new_cut_pattern[i], self.cons[i]) self.x = np.append(self.x, None) self.x[self.num_vars - 1] = self.master.addVar(vtype="I", obj=1, column=new_col) self.master.update() counter += 1 # if max no iteration or early termination is reached, optimize original problem self.master.optimize() self.res_x = np.array([int(self.x[i].X) for i in range(self.num_vars)])
def add_path(self, path, reduced_cost): """Add path to the master problem. Arguments: path: the path to be added reduced_cost: the path reduced cost """ path_t = tuple(path) if path_t in self.paths_set: return self.paths.append(path_t) self.paths_set.add(path_t) n_customers = len(self.customers) path[-1] = n_customers coeffs = np.zeros(n_customers + 1) coeffs[path] = 1 cost = reduced_cost + sum(self.espprc.duals[path[:-1]]) self.model.addVar(obj=cost, name=f"v{len(self.paths)-1}", column=gp.Column(coeffs, self.model.getConstrs()))
def column_generation(n, demands, capacity, distances, duals, MP_branch): SP_branch = SubProblem(n, demands, capacity, distances, duals) SP_branch.build_model() SP_branch.optimize() new_MP = None newAssing = [SP_branch.y[i].x for i in SP_branch.y] # new route obj = get_min_dist(newAssing, distances) # Cost of new route if obj + SP_branch.modelo.ObjVal < 0.0: newColumn = gp.Column(newAssing, MP_branch.modelo.getConstrs()) MP_branch.modelo.addVar(vtype=GRB.BINARY, obj=obj, column=newColumn) MP_branch.modelo.update() MP_branch.RelaxOptimize() best_cost = MP_branch.getCosts() routes = MP_branch.modelo.getA().toarray() new_MP = copy_model(best_cost, routes, MP_branch) return new_MP
def branch(branch_cost, branch_routes, n, demands, capacity, distances, duals, solution_to_branch, MP_to_copy, queue, best_inc_obj): frac_ixs = [] for ix, val in enumerate(solution_to_branch): if val > 0.0 and val < 1.0: frac_ixs.append(ix) A_mp = MP_to_copy.modelo.getA().toarray() locations_index = list(MP_to_copy.locations_index) for comb in combinations(frac_ixs, 2): SP_1 = SubProblem(n, demands, capacity, distances, duals) SP_2 = SubProblem(n, demands, capacity, distances, duals) SP_1.build_model() SP_2.build_model() s1_and_s2 = [ True if (A_mp[i - 1, comb[0]] == 1 and A_mp[i - 1, comb[1]] == 1) else False for i in range(len(MP_to_copy.locations_index)) ] s1_not_s2 = [ True if (A_mp[i - 1, comb[0]] == 1 and A_mp[i - 1, comb[1]] == 0) else False for i in range(len(MP_to_copy.locations_index)) ] for i in locations_index: locations_prime = [x for x in locations_index if x != i] for j in locations_prime: if (s1_and_s2[i - 1] and s1_not_s2[j - 1]): # SP_1.modelo.addConstr(SP_1.y[i - 1] + SP_1.y[j - 1] == 2) # SP_2.modelo.addConstr(SP_2.y[i - 1] + SP_2.y[j - 1] == 1) SP_1.modelo.addConstr(SP_1.y[i - 1] == 1) SP_1.modelo.addConstr(SP_1.y[j - 1] == 1) SP_2.modelo.addConstr(SP_2.y[i - 1] == 1) SP_2.modelo.addConstr(SP_2.y[j - 1] == 0) MP_1, MP_2 = copy_models(branch_cost, branch_routes, MP_to_copy) SP_1.modelo.update() SP_1.optimize() if SP_1.modelo.Status == 2: newAssing = [SP_1.y[i].x for i in SP_1.y] # new Assingment obj = get_min_dist(newAssing, distances) # Cost of new route if obj + SP_1.modelo.ObjVal < 0.0: newColumn = gp.Column(newAssing, MP_1.modelo.getConstrs()) MP_1.modelo.addVar(vtype=GRB.BINARY, obj=obj, column=newColumn) MP_1.modelo.update() MP_1.RelaxOptimize() mp1_cost = MP_1.getCosts() mp1_routes = MP_1.modelo.getA().toarray() if MP_1.relax_modelo.ObjVal <= best_inc_obj: queue.insert(MP_1.relax_modelo.ObjVal, copy_model(mp1_cost, mp1_routes, MP_1)) SP_2.modelo.update() SP_2.optimize() if SP_2.modelo.Status == 2: newAssing = [SP_2.y[i].x for i in SP_2.y] # new Assingment obj = get_min_dist(newAssing, distances) # Cost of new route if obj + SP_2.modelo.ObjVal < 0.0: newColumn = gp.Column(newAssing, MP_2.modelo.getConstrs()) MP_2.modelo.addVar(vtype=GRB.BINARY, obj=obj, column=newColumn) MP_2.modelo.update() MP_2.RelaxOptimize() mp2_cost = MP_2.getCosts() mp2_routes = MP_2.modelo.getA().toarray() if MP_2.relax_modelo.ObjVal <= best_inc_obj: queue.insert(MP_2.relax_modelo.ObjVal, copy_model(mp2_cost, mp2_routes, MP_2)) return queue
objective2 = grb.LinExpr(coef3, var3) cgsp_model.setObjective(objective2, grb.GRB.MAXIMIZE) cgsp_model.update() ob = cgsp_model.getObjective() #print(ob) cgsp_model.write('cgsp.lp') #Column generation K = len(set_I) + 1 while True: rmp_model.optimize() print('RMP_Objective : ', rmp_model.ObjVal) rmp_model.write('day2_rmp.mps') dual = get_dual(rmp_model) #get dual from the 'rmp_model' update_obj(dual) cgsp_model.optimize() x_values = cgsp_model.x print('CGSP_Objective : ', cgsp_model.ObjVal) if cgsp_model.ObjVal <=1.001: break else: col = grb.Column() for i in range(1,n): col.addTerms(x_values[i-1], temp[i]) y_var[K] = rmp_model.addVar(obj=1, vtype=grb.GRB.CONTINUOUS, name="y_var[%d]"%K, column = col) rmp_model.update() rmp_model.write('updated.lp') K += 1
# create a copy to use FeasRelax feature later feasmodel1 = feasmodel.copy() # clear objective feasmodel.setObjective(0.0) # add slack variables for c in feasmodel.getConstrs(): sense = c.sense if sense != '>': feasmodel.addVar(obj=1.0, name="ArtN_" + c.constrName, column=gp.Column([-1], [c])) if sense != '<': feasmodel.addVar(obj=1.0, name="ArtP_" + c.constrName, column=gp.Column([1], [c])) # optimize modified model feasmodel.optimize() feasmodel.write('feasopt.lp') # use FeasRelax feature feasmodel1.feasRelaxS(0, True, False, True)
def column_generation(benchmark, param, node, verbose_print): model, x, A, sc, rj, dj = node['value'] S = A.shape[1] n_iter = 1 # number of total iteration verbose_print('Starting column generation') cgtime = process_time() # start counting time while True: master = model.relax( ) # solve the continuous relaxation of current master # write & solve master - RLP # master.write(param['PATH']['model_path'] + 'master_model.rlp') master.optimize() # compute the dual variables lambda_j lambda_j = np.array([const.Pi for const in master.getConstrs()]) # compute reduced cost # rc = np.array([sc[s] - A[:, s] @ lambda_j for s in range(S)]) # if rc < 1e-9: # break # ** PRICING ALGORITHM ** # call the pricing subproblem in case there are negatives rc new_schedules = pricing_algorithm(benchmark, rj, dj, lambda_j, param['PARAMETERS']['nnc']) if not len(new_schedules): # exit because there isn't new schedule to add verbose_print( 'No new schedule to add, already into the optimal solution') node['value'][2], node['value'][3] = A.copy(), sc.copy() return # update A with the new schedules # check column existence is counterproductive because the algorithm don't go on the next column in this way A = np.concatenate((A, new_schedules), axis=1) # simple check on the correspondence of the name with the index of the dictionary # in order to avoid that in case of branch and bound the variables take the same name last_var_name = x[len(x) - 1].getAttr(gp.GRB.Attr.VarName) var_idx = int(re.search(r'(\d+)', last_var_name).group(0)) for i in range(A.shape[1] - S): # for each new column new_sched = A[:, S + i].copy() # compute schedule cost and append it to sc variable new_cost = sched_cost( np.where(new_sched)[0], benchmark['p'], benchmark['w'], True) sc = np.append(sc, new_cost) # creates a new Column with the corresponding coefficients and constraints new_col = gp.Column( list(np.ones(len(np.where(new_sched)[0]) + 1)), list( np.array(model.getConstrs())[np.where( np.insert(new_sched, 0, 1))[0]])) # add a variable for the new column of the set-cover x[S + i] = model.addVar(obj=new_cost, vtype=gp.GRB.BINARY, name=f'x_s[{var_idx+i+1}]', column=new_col) S = A.shape[1] # re-assign the number of schedule # model.reset() model.update() # update the set-cover model n_iter += 1 # print partial time part_time = int(process_time() - cgtime) verbose_print( f'Iteration: {n_iter}\n' f'Partial time: {int(part_time / 60)} min {part_time % 60} s')