def make_graph(data): """ Makes a networkx graph from the data :param data: problem data :return: graph object """ graph = nx.DiGraph() edge_list = get_2d_index(data.arcs, data.nodes) cost = data.variable_cost graph.add_weighted_edges_from(zip(edge_list[0] - 1, edge_list[1] - 1, cost)) arc = 0 for n1, n2 in zip(*get_2d_index(data.arcs, data.nodes)): graph[n1 - 1][n2 - 1]['arc_id'] = arc arc += 1 return graph
def heuristic_main(data, return_primal=False, track_time=False, pi_only=False): if track_time: start = time.time() model = None arc_popularity = np.zeros(shape=(data.periods, data.arcs.size), dtype=float) network_duals = np.empty(shape=(data.periods, data.nodes, data.commodities), dtype=float) demand = data.demand period_demand = demand.sum(axis=1) commodity_demand = demand.sum(axis=0) for commodity in xrange(data.commodities): origin, destination = get_2d_index(data.od_pairs[commodity], data.nodes) model, flow, node_duals = solve_shortest_path(data, cost=data.variable_cost, commodity=commodity, model=model) arc_popularity[:, np.nonzero( flow)] += 1 * commodity_demand[commodity] / demand.sum() for period in xrange(data.periods): period_cost = data.variable_cost + data.fixed_cost[period, :] / ( data.commodities * 0.1) model, flow, node_duals_period = solve_shortest_path( data, cost=period_cost, commodity=commodity, model=model) arc_popularity[period, np.nonzero(flow)] += 1 * demand[ period, commodity] / period_demand[period] network_duals[period, :, commodity] = (node_duals + node_duals_period) / 2 reset_model(origin, destination, model) if pi_only: return network_duals # collect for each arc the period that maximizes the frequency of the arc in the solutions max_periods = np.empty_like(data.arcs, dtype=int) max_periods[np.where(arc_popularity.max(axis=0) > 0)] = \ arc_popularity.argmax(axis=0)[np.where(arc_popularity.max(axis=0) > 0)] max_periods[np.where(arc_popularity.max(axis=0) <= 0)] = -1 if return_primal: objective, primal_solution, model = solve_reduced_problem( data=data, fixed=max_periods, return_primal=True) print 'objective: {}'.format(objective) if track_time: stop = time.time() print 'Heuristic time: {} seconds'.format(stop - start) return objective, network_duals, primal_solution, model else: objective, model = solve_reduced_problem(data=data, fixed=max_periods, return_primal=False) print 'objective: {}'.format(objective) if track_time: stop = time.time() print 'Heuristic time: {} seconds'.format(stop - start) return objective, network_duals, model
def add_variables(model, variables, data): """ Adds a variable to the existing master model :param model: Master model object :param variables: A deque with variable objects (named tuples that hold: arc, no, objective, flow) :return: Nothing """ nodes, periods, commodities, arcs = data.nodes, data.periods, data.commodities, data.arcs constraints = model.getConstrs() for count_arc, arc in enumerate(data.arcs): for count_col, variable in enumerate(variables[count_arc]): no, obj = model.numvars + count_arc + count_col, variable.objective flow = variable.flow coefficients = np.zeros(shape=2 * np.sum(flow > 10e-6) + 1, dtype=float) node_in, node_out = get_2d_index(arc, nodes) constrs = [] for c in xrange(commodities): for t in xrange(periods): # node_out goes first if flow[t, c] > 1e-6: idx = len(constrs) coefficients[idx] = -flow[t, c] idx = len(constrs) + 1 coefficients[idx] = +flow[t, c] # node_out index of constraint of (c,t) idx = get_1d_index(idx1=node_out, idx2=c + 1, idx3=t, width2=commodities, width3=periods) constrs.append(constraints[idx]) idx = get_1d_index(idx1=node_in, idx2=c + 1, idx3=t, width2=commodities, width3=periods) constrs.append(constraints[idx]) idx = nodes * periods * commodities + count_arc coefficients[len(constrs)] = 1. constrs.append(constraints[idx]) column = grb.Column(coefficients, constrs) model.addVar(lb=0., ub=1., obj=obj, column=column, name='col_{}_{}'.format(count_arc, no)) model.update()
def heuristic_main(data): """ Naive heuristic Solves a single-period capacitated problem. Fixed arcs and iterates through periods """ arcs, nodes, periods = data.arcs.size, data.nodes, data.periods commodities = data.commodities arc_origins, arc_destinations = get_2d_index(data.arcs, nodes) model = make_model(data) open_arcs = np.zeros((periods, arcs), dtype=np.double) objective = 0. for t in xrange(periods): fixed_cost, variable_cost = data.fixed_cost[t, :], data.variable_cost demand = data.demand[t, :] flow, arc_open = model._flow, model._arc_open for arc in xrange(arcs): i, j = arc_origins[arc], arc_destinations[arc] con_name = 'cap_{}-{}'.format(i, j) con = model.getConstrByName(con_name) # We pay for arcs that are not already open if arc_open[arc].lb == 0: arc_open[arc].obj = fixed_cost[arc] for c in xrange(commodities): model.chgCoeff(con, flow[c, arc], demand[c]) flow[c, arc].obj = variable_cost[arc] * demand[c] model.optimize() if model.status == grb.GRB.status.INFEASIBLE: model.computeIIS() print 'model is infeasible' model.write(str(model.ModelName) + '_{}.ilp'.format(t)) if model.SolCount > 0: objective += model.objVal # If we use an arc and it has not been opened before, we should # mark it as open now, and keep it open all along for count, var in enumerate(arc_open): if var.X > 0.1: var.lb = 1. var.obj = 0. if np.sum(open_arcs[:t, count]) < 10e-5: open_arcs[t, count] = 1. print 'Period : {} Objective value: {}'.format(t, objective) return objective, open_arcs
def get_lagrange_cost(dual_prices, arc_pointer, data): """ Returns the coefficients of the flow variables for the arc subproblem :param dual_prices: dual prices of each node for each commodity in each period :param data: problem data :return: a numpy array of cost coefficients """ arc_no = data.arcs[arc_pointer] origin, destination = get_2d_index(arc_no, data.nodes) origin, destination = origin - 1, destination - 1 arc_cost = data.variable_cost[arc_pointer] flow_cost = arc_cost * data.demand lagrange_diff = +dual_prices[:, origin, :] - dual_prices[:, destination, :] flow_cost -= lagrange_diff return flow_cost
def make_local_branching_model(data, open_arcs, cutoff): """ Constructs a local branching model that searches a kappa-sized neighborhood per period, starting from the feasible solution open_arcs :param data: Problem data :param kappa: Local branching neighborhood (per period) :param open_arcs: binary solution that defined the neighborhood :param cutoff: cutoff value for feasible solutions """ commodities, arcs, capacity, variable_cost, nodes, demand = \ data.commodities, data.arcs.size, data.capacity, \ data.variable_cost, data.nodes, data.demand origins, destinations = data.origins, data.destinations periods, fixed_cost = data.periods, data.fixed_cost flow = np.empty(shape=(periods, commodities, arcs), dtype=object) arc_open = np.empty(shape=(periods, arcs), dtype=object) capacities = np.empty(shape=(periods, arcs), dtype=object) arc_origins, arc_destinations = get_2d_index(data.arcs, nodes) model = grb.Model('local_branching') for period in xrange(periods): for arc in xrange(arcs): i, j = arc_origins[arc], arc_destinations[arc] arc_open[period, arc] = model.addVar(vtype=grb.GRB.BINARY, obj=fixed_cost[period, arc], name='open_arc{}-{}_{}'.format( i, j, period)) for h in xrange(commodities): flow[period, h, arc] = model.addVar( obj=variable_cost[arc] * demand[period, h], lb=0., ub=min(1., capacity[arc] / demand[period, h]), vtype=grb.GRB.CONTINUOUS, name='flow{}.{},{}_{}'.format(h, i, j, period)) model._arc_open = arc_open model._flow = flow # model.update() lazy_cons = [] priority = [t for t in xrange(periods) for arc in xrange(arcs)] for period in xrange(periods): for arc in xrange(arcs): # Add initial vector of binary variables previously found arc_open[period, arc].start = open_arcs[period, arc] i, j = arc_origins[arc], arc_destinations[arc] capacities[period, arc] = model.addConstr( grb.quicksum( grb.LinExpr(demand[period, h], flow[period, h, arc]) for h in xrange(commodities)) <= capacity[arc] * grb.quicksum(arc_open[t, arc] for t in xrange(period + 1)), name='cap_{}-{}_{}'.format(i, j, period)) if period <= data.periods / 2: for h in xrange(commodities): if open_arcs[:period + 1, h].sum(axis=0) > 0.5: lazy_cons.append( model.addConstr( flow[period, h, arc] <= grb.quicksum(arc_open[:period + 1, arc]))) for h in xrange(commodities): for n in xrange(nodes): rhs = 0. if n == origins[h]: rhs = 1. if n == destinations[h]: rhs = -1. in_arcs = get_2d_index(data.arcs, nodes)[1] == n + 1 out_arcs = get_2d_index(data.arcs, nodes)[0] == n + 1 for t in xrange(periods): lhs = grb.quicksum(flow[t, h, out_arcs]) - grb.quicksum( flow[t, h, in_arcs]) model.addConstr(lhs=lhs, rhs=rhs, sense=grb.GRB.EQUAL, name='demand_n{}c{}p{}'.format(n, h, t)) weights = np.array([np.exp(-t) for t in xrange(periods)]) weights = weights[::-1] / weights.sum() for arc in xrange(arcs): model.addSOS(type=grb.GRB.SOS_TYPE1, vars=arc_open[:, arc].tolist(), wts=weights) # model.addConstr( # grb.quicksum(arc_open[t, arc] for t in xrange(periods)) <= 1, # name='sum_{}'.format(arc)) model.setAttr('BranchPriority', arc_open.flatten().tolist(), priority) arcs_in_search = np.zeros(arcs) for arc in xrange(arcs): if open_arcs[:, arc].sum() > 0.5: max_period = open_arcs[:, arc].argmax() if max_period < 3: end = min(periods, max_period + 9) lhs = grb.quicksum([ arc_open[period, arc] for period in xrange(max_period, end) ]) model.addConstr(lhs >= 1, name='local_search.{}'.format(arc)) arcs_in_search[arc] = 1 model._capacities = capacities model._arcs_in_search = arcs_in_search model.params.threads = 1 model.params.LogFile = "" model.params.presolve = 0 # model.params.cuts = 0 # model.setParam('OutputFlag', 0) model.params.TimeLimit = 500 model.params.NodeLimit = 100 model.params.MIPFocus = 1 model.params.MIPGap = 0.01 model.params.Threads = 1 model.params.Cutoff = cutoff + 0.0001 model.params.ImproveStartTime = 200 # model.params.normadjust = 3 for con in lazy_cons: con.Lazy = 3 # model.update() # model.write('eyes.lp') # model.optimize() # print_model_status(model) # print 'solutions found: {}'.format(model.SolCount) # print 'best objective value: {}'.format(model.objVal) # n_sols = min(model.SolCount, 10) # solutions = np.zeros(shape=(n_sols, periods, arcs), dtype=int) # for sol in xrange(n_sols): # model.setParam('SolutionNumber', sol) # # print "--- Solution number: {} ---".format(sol) # for period in xrange(periods): # for arc in xrange(arcs): # if arc_open[period, arc].Xn > 0: # solutions[sol, period, arc] = 1 # print 'Period: {} Arc: {}'.format(period, arc) # return model.ObjVal, model # solutions[0] # model.write('local_branching.lp') return model
def make_model(data, fixed_cost): """ Formulates the regular (single period) MCND problem """ commodities, arcs, capacity, variable_cost, nodes, demand = \ data.commodities, data.arcs.size, data.capacity, \ data.variable_cost, data.nodes, np.amax(data.demand, axis=0) origins, destinations = data.origins, data.destinations flow = np.empty(shape=(commodities, arcs), dtype=object) arc_open = np.empty(shape=arcs, dtype=object) capacities = np.empty(shape=arcs, dtype=object) arc_origins, arc_destinations = get_2d_index(data.arcs, nodes) demand_factor = np.zeros_like(data.origins) model = grb.Model('MCND') model.params.threads = 1 model.params.LogFile = "" lazy_cons = [] for arc in xrange(arcs): i, j = arc_origins[arc], arc_destinations[arc] arc_open[arc] = model.addVar(vtype=grb.GRB.BINARY, obj=fixed_cost[arc], name='open_arc{}-{}'.format(i, j)) for h in xrange(commodities): # Multiply flow cost with the demand factor of each period demand_factor[h] = demand[h] / np.average(data.demand[:, h]) flow[h, arc] = model.addVar( obj=variable_cost[arc] * data.demand[0, h] * demand_factor[h], # obj= variable_cost[arc] * demand[h], lb=0., ub=1., vtype=grb.GRB.CONTINUOUS, name='flow{}.{},{}'.format(h, i, j)) model._arc_open = arc_open model._flow = flow # model.update() for arc in xrange(arcs): for h in xrange(commodities): lazy_cons.append(model.addConstr(flow[h, arc] <= arc_open[arc])) i, j = arc_origins[arc], arc_destinations[arc] capacities[arc] = model.addConstr(grb.quicksum( grb.LinExpr(data.demand[0, h] * demand_factor[h], flow[h, arc]) for h in xrange(commodities)) <= capacity[arc] * arc_open[arc], name='cap_{}-{}'.format(i, j)) for h in xrange(commodities): for n in xrange(nodes): rhs = 0. if n == origins[h]: rhs = 1. if n == destinations[h]: rhs = -1. in_arcs = get_2d_index(data.arcs, nodes)[1] == n + 1 out_arcs = get_2d_index(data.arcs, nodes)[0] == n + 1 lhs = grb.quicksum(flow[h, out_arcs]) - grb.quicksum(flow[h, in_arcs]) model.addConstr(lhs=lhs, rhs=rhs, sense=grb.GRB.EQUAL, name='demand_n{}c{}'.format(n, h)) model._capacities = capacities model.setParam('OutputFlag', 0) model.params.BarConvTol = .1 model.params.NodeLimit = 1000 model.params.TimeLimit = 10 model.params.MIPGap = 0.01 model.params.Threads = 1 # model.update() for con in lazy_cons: con.Lazy = 3 # model.write(str(model.ModelName) + '.lp') return model
def heuristic_main(data): """ Heuristic that implements the following logic. 1. Formulate a single period instance 2. Select the maximum demand for each commodity 3. Multiply the variable costs by the ratio of (total demand) / (peak demand) of each commodity. This makes the variable costs commodity- dependent. 3. Select a weighted average of costs of all periods 4. Solve a single-period problem with these data. Store the arcs that opening a set (potential arcs) 5. Select the period of total maximum demand and solve again a single-period problem, as before 6. For each period t, solve a single-period problem, with the restrictions that (i) arcs that opened in previous periods remain open and (ii) only arcs from the set of potential arcs are allowed to open. Demand of each period should be replaced by a smoothed average of the next few periods 7. Finally, solve the entire problem by fixing the binary variables across the horizon. :return objective function of heuristic and values of arc variables """ arcs, nodes, periods = data.arcs.size, data.nodes, data.periods commodities = data.commodities arc_origins, arc_destinations = get_2d_index(data.arcs, nodes) weights = np.empty(shape=periods, dtype=float) if periods > 1: weights[0], weights[1], weights[2:] = 0.7, 0.2, 0.1 / (periods - 2) else: weights[0], weights[1:] = 1, 0. fixed_cost = np.average(data.fixed_cost, axis=0, weights=weights) upper_bounds = np.ones(shape=(data.periods, data.arcs.size)) lower_bounds = np.zeros_like(upper_bounds) # Store the arcs that are open in the single-shot problem open_arcs = np.zeros(shape=(periods, arcs), dtype=int) # Take into account the peak demand only initially model = make_model(data, fixed_cost) model.optimize() print_model_status(model) potential_arcs = set() first_arcs = model._arc_open for arc in xrange(data.arcs.size): if first_arcs[arc].x > 0.001: potential_arcs.add(str(first_arcs[arc].VarName)) # Opens all arcs, takes max demand per commodity modify_model(model, data) model.optimize() print_model_status(model) # model.NumObj = 1 all_arcs = model._arc_open flow = model._flow # Close arcs that were not opened in the single-shot problem if model.SolCount > 0: for arc, var in enumerate(all_arcs): var.lb = 0. arc_flow = sum([flow[h, arc].x for h in xrange(data.commodities)]) if arc_flow > 10e-5: potential_arcs.add(str(var.VarName)) else: var.ub = 0 # model.update() # With this set of arcs (potential_arcs) solve single-period problems with # modified demand (so that it takes into account demand of future periods). # Do this to keep the arc opening variables only alpha = 1. for t in xrange(periods): t_max = min(t + 1, data.periods - 1) fixed_cost = alpha * data.fixed_cost[t, :] + (1 - alpha) * np.average( data.fixed_cost[t_max:, :], axis=0) variable_cost = data.variable_cost * \ np.max(data.demand[t, :]) / np.average(data.demand[t, :]) demand = data.demand[t, :] arc_open = model._arc_open # if t == 0: # for var in model._flow.flatten(): # var.vtype = grb.GRB.BINARY # if t == 1: # for var in model._flow.flatten(): # var.vtype = grb.GRB.CONTINUOUS for arc in xrange(arcs): i, j = arc_origins[arc], arc_destinations[arc] con_name = 'cap_{}-{}'.format(i, j) con = model.getConstrByName(con_name) if arc_open[arc].varName not in potential_arcs: arc_open[arc].ub = 0. # We don't pay for arcs that are already open if np.sum(open_arcs[:t, arc], axis=0) > 0.5: arc_open[arc].obj = 0. else: arc_open[arc].obj = fixed_cost[arc] for c in xrange(commodities): model.chgCoeff(con, flow[c, arc], demand[c]) flow[c, arc].obj = variable_cost[arc] * demand[c] model.optimize() for arc in xrange(arcs): if arc_open[arc].x > 0.01: lower_bounds[t, arc] = 1. if np.sum(open_arcs[:t, arc]) < 10e-5: open_arcs[t, arc] = 1 else: upper_bounds[t, arc] = 0 alpha -= 1. / periods # Solve single-period problems, select among arcs that were open at the # initial problem. We can use arcs that opened in previous periods for free objective = 0. flow_cost = np.zeros(shape=(periods, commodities), dtype=float) for t in xrange(periods): fixed_cost, variable_cost = data.fixed_cost[t, :], data.variable_cost demand = data.demand[t, :] flow = model._flow arc_open = model._arc_open for arc in xrange(arcs): arc_open[arc].ub = upper_bounds[t, arc] arc_open[arc].lb = lower_bounds[t, arc] i, j = arc_origins[arc], arc_destinations[arc] con_name = 'cap_{}-{}'.format(i, j) con = model.getConstrByName(con_name) # We dont pay for arcs that are already open if np.sum(open_arcs[:t, arc], axis=0) > 0.5: arc_open[arc].obj = 0. else: arc_open[arc].obj = fixed_cost[arc] for c in xrange(commodities): model.chgCoeff(con, flow[c, arc], demand[c]) flow[c, arc].obj = variable_cost[arc] * demand[c] # model.update() model.optimize() if model.status == grb.GRB.status.INFEASIBLE: model.computeIIS() print 'model is infeasible' model.write(str(model.ModelName) + '_{}.ilp'.format(t)) if model.SolCount > 0: objective += model.objVal # If we use an arc and it has not been opened before, we should # mark it as open now, and keep it open all along for count, var in enumerate(all_arcs): if var.X > 0.1: var.lb = 1. if np.sum(open_arcs[:t, count]) < 10e-5: open_arcs[t, count] = 1. for c in xrange(commodities): if flow[c, count].X > 0.0001: flow_cost[t, c] += flow[c, count].X * \ flow[c, count].obj print 'Period : {} Objective value: {}'.format(t, objective) return objective, open_arcs, flow_cost
def populate_master(data, open_arcs=None): """ Function that populates the Benders Master problem :param data: Problem data structure :param open_arcs: If given, it is a MIP start feasible solution :rtype: Gurobi model object """ master = Model('master-model') arcs, periods = xrange(data.arcs.size), xrange(data.periods) commodities = xrange(data.commodities) graph, origins, destinations = data.graph, data.origins, data.destinations variables = np.empty(shape=(data.periods, data.arcs.size), dtype=object) bin_vars_idx = np.empty_like(variables, dtype=int) continuous_variables = np.empty(shape=(len(periods), len(commodities)), dtype=object) cont_vars_idx = np.empty_like(continuous_variables, dtype=int) start_given = open_arcs is not None count = 0 # length of shortest path, shortest path itself arc_com, arc_obj = [], [] lbs = [ shortest_path_length(graph, origins[com], destinations[com], 'weight') for com in commodities ] sps = [ shortest_path(graph, origins[com], destinations[com], 'weight') for com in commodities ] # resolve sp by removing one arc, check the increase in value for com in commodities: incr, best_arc = 0., 0 for n1, n2 in zip(sps[com], sps[com][1:]): weight = graph[n1][n2]['weight'] graph[n1][n2]['weight'] = 10000. * weight spl = shortest_path_length(graph, origins[com], destinations[com], 'weight') if spl > incr: incr = spl best_arc = graph[n1][n2]['arc_id'] graph[n1][n2]['weight'] = weight arc_com.append(best_arc) arc_obj.append(spl) # Add variables for period in periods: for arc in arcs: # Binary arc variables variables[period, arc] = master.addVar(vtype=GRB.BINARY, obj=data.fixed_cost[period, arc], name='arc_open{}_{}'.format( period, arc)) bin_vars_idx[period, arc] = count count += 1 for com in commodities: lb = lbs[com] * data.demand[period, com] # Continuous flow_cost variables (eta) continuous_variables[period, com] = master.addVar( lb=lb, obj=1., vtype=GRB.CONTINUOUS, name='flow_cost{}'.format((period, com))) cont_vars_idx[period, com] = count count += 1 master.update() # If feasible solution is given, use it as a start if start_given: for period in periods: for arc in arcs: # variables[period, arc].start = open_arcs[period, arc] variables[period, arc].VarHintVal = open_arcs[period, arc] variables[period, arc].VarHintPri = 1 # Add constraints # Add Origin - Destination Cuts for each Commodity cuts_org, cuts_dest = set(), set() for commodity in commodities: arc_origin = data.origins[commodity] arc_destination = data.destinations[commodity] if arc_origin not in cuts_org: out_origin = get_2d_index(data.arcs, data.nodes)[0] - 1 == arc_origin master.addConstr(lhs=np.sum(variables[0, out_origin]), rhs=1., sense=GRB.GREATER_EQUAL, name='origins_c{}'.format(commodity)) cuts_org.add(arc_origin) if arc_destination not in cuts_dest: in_dest = get_2d_index(data.arcs, data.nodes)[1] - 1 == arc_destination master.addConstr(lhs=np.sum(variables[0, in_dest]), rhs=1., sense=GRB.GREATER_EQUAL, name='destinations_c{}'.format(commodity)) cuts_dest.add(arc_destination) # Add that an arc can open at most once for arc in arcs: master.addSOS(GRB.SOS_TYPE1, variables[:, arc].tolist(), list(periods)[::-1]) # Add extra constraints for lower bound improvement for com in commodities: arc = arc_com[com] base_coeffs = lbs[com] - arc_obj[com] for period in periods: lhs = LinExpr() coeffs = [ cf * data.demand[period, com] for cf in [base_coeffs] * (period + 1) ] lhs.addTerms(coeffs, variables[:period + 1, arc].tolist()) lhs.add(-continuous_variables[period, com]) lhs.addConstant(arc_obj[com] * data.demand[period, com]) master.addConstr(lhs, sense=GRB.LESS_EQUAL, rhs=0, name='strengthening_{}{}'.format(period, com)) master.params.LazyConstraints = 1 # Find feasible solutions quickly, works better master.params.TimeLimit = 7200 master.params.threads = 2 master.params.BranchDir = 1 # Store the variables inside the model, we cannot access them later! master._variables = np.array(master.getVars()) master._cont_vars_idx = cont_vars_idx master._bin_vars_idx = bin_vars_idx return master
def populate_dual_subproblem(data, upper_cost=None, flow_cost=None): """ Function that populates the Benders Dual Subproblem, as suggested by the paper "Minimal Infeasible Subsystems and Bender's cuts" by Fischetti, Salvagnin and Zanette. :param data: Problem data structure :param upper_cost: Link setup decisions fixed in the master :param flow_cost: This is the cost of the continuous variables of the master problem, as explained in the paper :return: Numpy array of Gurobi model objects """ # Gurobi model objects subproblems = np.empty(shape=(data.periods, data.commodities), dtype=object) # Construct model for period/commodity 0. # Then, copy this and change the coefficients dual_subproblem = Model('dual_subproblem_(0,0)') # Ranges we are going to need arcs, periods, commodities = xrange(data.arcs.size), xrange( data.periods), xrange(data.commodities) # Origins and destinations of commodities origins, destinations = data.origins, data.destinations # We use arrays to store variable indexes and variable objects. Why use # both? Gurobi wont let us get the values of individual variables # within a callback.. We just get the values of a large array of # variables, in the order they were initially defined. To separate them # in variable categories, we will have to use index arrays flow_index = np.zeros(shape=data.nodes, dtype=int) flow_duals = np.empty_like(flow_index, dtype=object) ubounds_index = np.zeros(shape=len(arcs), dtype=int) ubounds_duals = np.empty_like(ubounds_index, dtype=object) # Makes sure we don't add variables more than once flow_duals_names = set() if upper_cost is None: upper_cost = np.zeros(shape=(len(periods), len(arcs)), dtype=float) if flow_cost is None: flow_cost = np.zeros(shape=(len(periods), len(commodities)), dtype=float) # Populate all variables in one loop, keep track of their indexes # Data for period = 0, com = 0 count = 0 for arc in arcs: ubounds_duals[arc] = dual_subproblem.addVar( obj=-upper_cost[0, arc], lb=0., name='ubound_dual_a{}'.format(arc)) ubounds_index[arc] = count count += 1 start_node, end_node = get_2d_index(data.arcs[arc], data.nodes) start_node, end_node = start_node - 1, end_node - 1 for node in (start_node, end_node): var_name = 'flow_dual_n{}'.format(node) if var_name not in flow_duals_names: flow_duals_names.add(var_name) obj, ub = 0., GRB.INFINITY if data.origins[0] == node: obj = 1. if data.destinations[0] == node: obj = -1. ub = 0. flow_duals[node] = \ dual_subproblem.addVar( obj=obj, lb=0., name=var_name) flow_index[node] = count count += 1 opt_var = dual_subproblem.addVar(obj=-flow_cost[0, 0], lb=0., name='optimality_var') dual_subproblem.params.threads = 2 dual_subproblem.params.LogFile = "" dual_subproblem.update() # Add constraints demand = data.demand[0, 0] for arc in arcs: start_node, end_node = get_2d_index(data.arcs[arc], data.nodes) start_node, end_node = start_node - 1, end_node - 1 lhs = flow_duals[start_node] - flow_duals[end_node] \ - ubounds_duals[arc] - \ opt_var * data.variable_cost[arc] * demand dual_subproblem.addConstr(lhs <= 0., name='flow_a{}'.format(arc)) # Original Fischetti model lhs = quicksum(ubounds_duals) + opt_var dual_subproblem.addConstr(lhs == 1, name='normalization_constraint') # Store variable indices dual_subproblem._ubounds_index = ubounds_index dual_subproblem._flow_index = flow_index dual_subproblem._all_variables = np.array(dual_subproblem.getVars()) dual_subproblem._flow_duals = np.take(dual_subproblem._all_variables, flow_index) dual_subproblem._ubound_duals = np.take(dual_subproblem._all_variables, ubounds_index) dual_subproblem.setParam('OutputFlag', 0) dual_subproblem.modelSense = GRB.MAXIMIZE dual_subproblem.update() subproblems[0, 0] = dual_subproblem for period, com in product(periods, commodities): if (period, com) != (0, 0): model = dual_subproblem.copy() optimality_var = model.getVarByName('optimality_var') optimality_var.Obj = -flow_cost[period, com] demand = data.demand[period, com] for node in xrange(data.nodes): variable = model.getVarByName('flow_dual_n{}'.format(node)) if origins[com] == node: obj = 1. elif destinations[com] == node: obj = -1. else: obj = 0. variable.obj = obj for arc in arcs: variable = model.getVarByName('ubound_dual_a{}'.format(arc)) variable.Obj = -np.sum(upper_cost[:period + 1, arc]) constraint = model.getConstrByName('flow_a{}'.format(arc)) model.chgCoeff(constraint, optimality_var, -demand * data.variable_cost[arc]) model._all_variables = np.array(model.getVars()) model.update() subproblems[period, com] = model return subproblems
def populate_dual_subproblem(data): """ Function that populates the Benders Dual Subproblem, as suggested by the paper "Minimal Infeasible Subsystems and Bender's cuts" by Fischetti, Salvagnin and Zanette. :param data: Problem data structure :param upper_cost: Link setup decisions fixed in the master :param flow_cost: This is the cost of the continuous variables of the master problem, as explained in the paper :return: Numpy array of Gurobi model objects """ # Gurobi model objects subproblems = np.empty(shape=(data.periods, data.commodities), dtype=object) # Construct model for period/commodity 0. # Then, copy this and change the coefficients subproblem = Model('subproblem_(0,0)') # Ranges we are going to need arcs, periods, commodities, nodes = xrange(data.arcs.size), xrange( data.periods), xrange(data.commodities), xrange(data.nodes) # Other data demand, var_cost = data.demand, data.variable_cost # Origins and destinations of commodities origins, destinations = data.origins, data.destinations # We use arrays to store variable indexes and variable objects. Why use # both? Gurobi wont let us get the values of individual variables # within a callback.. We just get the values of a large array of # variables, in the order they were initially defined. To separate them # in variable categories, we will have to use index arrays flow_vars = np.empty_like(arcs, dtype=object) # Populate all variables in one loop, keep track of their indexes # Data for period = 0, com = 0 for arc in arcs: flow_vars[arc] = subproblem.addVar(obj=demand[0, 0] * var_cost[arc], lb=0., ub=1., name='flow_a{}'.format(arc)) subproblem.update() # Add constraints for node in nodes: out_arcs = get_2d_index(data.arcs, data.nodes)[0] == node + 1 in_arcs = get_2d_index(data.arcs, data.nodes)[1] == node + 1 lhs = quicksum(flow_vars[out_arcs]) - quicksum(flow_vars[in_arcs]) subproblem.addConstr(lhs == 0., name='flow_bal{}'.format(node)) subproblem.update() # Store variables subproblem._all_variables = flow_vars.tolist() # Set parameters subproblem.setParam('OutputFlag', 0) subproblem.modelSense = GRB.MINIMIZE subproblem.params.threads = 2 subproblem.params.LogFile = "" subproblem.update() subproblems[0, 0] = subproblem for period, com in product(periods, commodities): if (period, com) != (0, 0): model = subproblem.copy() model.ModelName = 'subproblem_({},{})'.format(period, com) flow_cost = data.demand[period, com] * var_cost model.setObjective(LinExpr(flow_cost.tolist(), model.getVars())) model.setAttr('rhs', model.getConstrs(), [0.0] * data.nodes) model._all_variables = model.getVars() model.update() subproblems[period, com] = model return subproblems
def solve_reduced_problem(data, fixed, model=None, return_primal=False, node_limit=1000, track_time=False): """ Solves a multi-period capacitated network design problem where some arcs are fixed open. If an arc is fixed in period t and t is the first period that it is fixed, the arc can open any time in period t or after. :param data: problem data :param fixed: arcs/periods that are fixed to 1 :return: solution value (perhaps we should extend it to return the solution) """ if track_time: start = time.time() commodities, arcs, capacity, variable_cost, fixed_cost, nodes, demand, periods = \ data.commodities, data.arcs, data.capacity, data.variable_cost, data.fixed_cost, data.nodes, data.demand, \ data.periods origins, destinations = get_2d_index(arcs, nodes) if not model: model = grb.Model('reduced-problem') model.setParam('OutputFlag', 1) model.setParam("TimeLimit", 100.) model.setParam("Threads", 2) model.setParam("NodeLimit", node_limit) model.setParam('MIPGap', 0.01) model.setParam("Heuristics", 1.0) model._flow, model._arc_open = np.empty(shape=(periods, commodities, arcs.size), dtype=object), \ np.empty(shape=(periods, arcs.size), dtype=object) flow, arc_open = model._flow, model._arc_open for t in xrange(periods): for arc in xrange(arcs.size): i, j = origins[arc], destinations[arc] arc_open[t, arc] = model.addVar(vtype=grb.GRB.BINARY, obj=fixed_cost[t, arc], name='open_({},{})_t{}'.format( i, j, t + 1)) # if fixed[arc] <= t: maybe do this later for h in xrange(commodities): flow[t, h, arc] = model.addVar( lb=0., ub=min(1., capacity[arc] / demand[t, h]), obj=variable_cost[arc] * demand[t, h], name='flow_c{0:d}_({1:d},{2:d})_t{3:d}'.format( h + 1, i, j, t + 1)) model.update() # Arc capacity constraints for arc, t in product(xrange(arcs.size), xrange(periods)): i, j = origins[arc], destinations[arc] model.addConstr( grb.quicksum( grb.LinExpr(demand[t, h], flow[t, h, arc]) for h in xrange(commodities)) <= capacity[arc] * grb.quicksum(arc_open[s, arc] for s in xrange(t + 1)), 'cap_({0:d},{1:d})_t{2:d}'.format(i, j, t + 1)) lhs, rhs = grb.quicksum(arc_open[l, arc] for l in xrange(0, t + 1)), 1. name = 'unique_setup({0:d},{1:d})_t{2:d}'.format(i, j, t + 1) sign = grb.GRB.LESS_EQUAL if fixed[arc] <= t + 4 else grb.GRB.EQUAL model.addConstr(lhs=lhs, rhs=rhs, name=name, sense=sign) # Flow conservation constraints for commodity in xrange(commodities): origin, destination = get_2d_index(data.od_pairs[commodity], nodes) for node in xrange(nodes): rhs = 0. if node + 1 == origin: rhs = 1. if node + 1 == destination: rhs = -1. in_arcs = get_2d_index(arcs, nodes)[1] == node + 1 out_arcs = get_2d_index(arcs, nodes)[0] == node + 1 for period in xrange(periods): lhs = grb.quicksum( flow[period, commodity, out_arcs]) - grb.quicksum( flow[period, commodity, in_arcs]) model.addConstr(lhs=lhs, sense=grb.GRB.EQUAL, rhs=rhs, name='node_{}_c{}_t{}'.format( node + 1, commodity + 1, period + 1)) model.update() else: model.setParam("Nodes", 100.) model.setParam("TimeLimit", 100.) # Update arc capacity constraints, model is already populated for arc, t in product(xrange(arcs.size), xrange(periods)): i, j = origins[arc], destinations[arc] name = 'unique_setup({0:d},{1:d})_t{2:d}'.format(i, j, t + 1) constraint = model.getConstrByName(name) sign = grb.GRB.LESS_EQUAL if fixed[arc] <= t + 1 else grb.GRB.EQUAL constraint.setAttr("Sense", sign) model.optimize() if DEBUG: model.write('trial.lp') for var in model.getVars(): if str(var.VarName[0]) == 'f' and var.X > 0.001: name = var.VarName.split('_') print 'Arc: \t {} \t Commodity: {} \t Period: {} \t Value: \t {}'.format( name[2], int(name[1].replace('c', '')), int(name[3][1]), var.X * demand[int(name[3][1]) - 1, int(name[1].replace('c', '')) - 1]) if return_primal: flow, arc_open = model._flow, model._arc_open primal_solution = namedtuple('Solution', 'objective flow arc_open') primal_solution.flow, primal_solution.arc_open = np.zeros_like(flow, dtype=float), \ np.ones(shape=arcs.size, dtype=int) * (-1) print 'Problem status: {}'.format(model.status) # if model.status == grb.GRB.status.TIME_LIMIT: for arc in xrange(arcs.size): collect_flow = False for period in xrange(periods): if model._arc_open[period, arc].X > 0.5: primal_solution.arc_open[arc] = period collect_flow = True if collect_flow: for commodity in xrange(commodities): if flow[period, commodity, arc].X > 10e-5: primal_solution.flow[period, commodity, arc] = flow[period, commodity, arc].X primal_solution.objective = model.getObjective().getValue() return primal_solution.objective, primal_solution, model if track_time: stop = time.time() print 'Heuristic time: {} seconds'.format(stop - start) return model.getObjective().getValue(), model
def solve_shortest_path(data, commodity, cost, model=None): """ Solves a capacitated shortest path problem for a single commodity and a single period Network solver of Gurobi is used. If a model is already defined, we just update the cost coefficients and the source-destination constraints. Otherwise, the model is populated from scratch. April 2015, Ioannis Fragkos :param data: Problem data: commodities, nodes, arcs, od_pairs, periods, capacity, fixed_cost, variable_cost, demand :param cost: either variable cost or the sum of fixed and variable cost :return: Model object, primal solution """ nodes, arcs = data.nodes, data.arcs origin, destination = get_2d_index(data.od_pairs[commodity], nodes) counter = 0 flow, flow_solution, dual_solution = np.empty_like(arcs, dtype=object), np.empty_like(arcs, dtype=float), \ np.array(xrange(nodes), dtype=float) if model: variables, constraints = model.getVars(), model.getConstrs() new_cost = cost.reshape(arcs.size).tolist() new_objective = grb.LinExpr(new_cost, variables) model.setObjective(new_objective) origin, destination = get_2d_index(data.od_pairs[commodity], nodes) if constraints[origin - 1].RHS != 1.0: constraints[origin - 1].setAttr('rhs', 1.0) constraints[destination - 1].setAttr('rhs', -1.0) else: model = grb.Model('shortest_path') model.setParam('OutputFlag', 0) for arc in arcs: arc_from, arc_to = get_2d_index(arc, nodes) flow[counter] = model.addVar(lb=0.0, ub=1.0, obj=cost[counter], vtype=grb.GRB.CONTINUOUS, name='x({},{})'.format( arc_from, arc_to)) counter += 1 model.update() for node in xrange(nodes): rhs = 0. if node + 1 == origin: rhs = 1. if node + 1 == destination: rhs = -1. in_arcs = get_2d_index(arcs, nodes)[1] == node + 1 out_arcs = get_2d_index(arcs, nodes)[0] == node + 1 lhs = grb.quicksum(flow[out_arcs]) - grb.quicksum(flow[in_arcs]) model.addConstr(lhs=lhs, sense=grb.GRB.EQUAL, rhs=rhs, name='node_{}'.format(node + 1)) model.update() model.optimize() # Collect primal and dual solutions for arc in xrange(arcs.size): arc_from, arc_to = get_2d_index(arcs[arc], nodes) flow_solution[arc] = model.getVarByName('x({},{})'.format( arc_from, arc_to)).x if DEBUG and flow_solution[arc] > 0.01: print 'Flow: {} Arc: ({},{})'.format(flow_solution[arc], arc_from, arc_to, nodes) constraints = model.getConstrs() for node in xrange(nodes): dual_solution[node] = constraints[node].Pi if DEBUG and dual_solution[node] > 0.01: print 'Dual of node {}: {}'.format(node + 1, dual_solution[node]) return model, flow_solution, dual_solution
def lagrange_relaxation(data, time_limit=7200): """ Calculates the arc-based lagrange relaxation of the multi-period multi-commodity network design problem Ioannis Fragkos, March 2015 """ start_time = time.time() # Initialize Lagrange upper bound and dual prices pi_iter = heuristic_main(data, pi_only=True) upper_bound, incumbent_arcs, incumbent_flow_cost = heuristic(data, 4) heur_model = make_local_branching_model(data, open_arcs=incumbent_arcs, cutoff=upper_bound) lower_bound, heuristic_objective = 0., upper_bound # return # Initialize dual prices pi_best = np.empty(pi_iter.shape) np.copyto(pi_best, pi_iter) # Initialize quantities needed for Adding/Removing arcs srt_idx = np.argsort(np.diff(data.fixed_cost, axis=0), axis=1) is_same, cut_off = False, 0.7 # Initialize models used for delaying arcs models = [] for period in xrange(data.periods): model = Model(data, period) model.objective = incumbent_flow_cost[period, :].sum() inc = incumbent_arcs[:period + 1, :].sum(axis=0) for arc in xrange(data.arcs.size): if inc[arc] == 0.: start, end = data.arc_org[arc], data.arc_dest[arc] m_vars = model.vars.select('*', start, end) model.model.setAttr('ub', m_vars, [inc[arc]] * len(m_vars)) models.append(model) # Initialize violations vector, defined as # b(t, i, k) - sum(out_arcs(t, k, ij) + sum(in_arcs(t, k, ji)) violations = np.zeros(shape=(data.periods, data.nodes, data.commodities)) primal_violations = np.zeros(violations.shape) # Initialize data structure that holds columns to be added to the master. # We add 10 columns at most per arc and iteration. # Also, initialize tolerance after which we switch to CG columns_to_add, hash_cols, cg_tol = np.empty( data.arcs.shape, dtype=object), np.empty(data.arcs.shape, dtype=object), 0.0 for arc in xrange(data.arcs.size): columns_to_add[arc] = deque(maxlen=50) hash_cols[arc] = set() # Define the subgradient parameters max_iter, omega, max_lb_iter, decrease_factor, check_heuristic_iter = \ 1000, 1.99, 50, 0.99, 300 # Initialize counter for lb change the step switching value # and the deflection parameter lb_iter, step_switch, alpha = 10, 100, 0.2 # Initialize array of the constant term # (that adds or subtracts the dual variables) const_array = np.zeros(pi_iter.shape) # Initialize primal solution of the relaxation # (NOT of the original problem) primal_solution = namedtuple('Solution', 'objective_value open_arc flow') primal_solution.flow = np.zeros(shape=(data.periods, data.commodities, data.arcs.size)) primal_solution.open_arc = np.zeros(shape=(data.periods, data.arcs.size)) primal_solution.objective_value, primal_variable_cost = \ upper_bound, np.outer(data.demand, data.variable_cost).reshape( data.periods, data.commodities, data.arcs.size) kappa = 4 open_arcs = np.zeros(primal_solution.open_arc.shape) arc_popularity = np.zeros( shape=( # arc popularity in a batch of iters data.periods, data.arcs.size), dtype=float) local_arc_popularity = np.zeros(arc_popularity.shape) max_periods = np.empty(data.arcs.shape, dtype=int) # used for arc popularity as well denominator = np.array(xrange(1, max_iter + 1), dtype=float) denominator **= kappa denominator = denominator.cumsum() # Initialize switch to column generation. solve_cg = False # We have to initialize some element to +1/-1 depending # on the origin/destination pair of each commodity origins, destinations = get_2d_index(data.od_pairs, data.nodes) origins -= 1 destinations -= 1 for commodity in xrange(data.commodities): origin, destination = origins[commodity], destinations[commodity] const_array[:, origin, commodity] += 1 const_array[:, destination, commodity] -= 1 # Here is the main subgradient loop for iteration in xrange(max_iter): # Initialize the objective function value of this iteration and # of possible ip solution / initialize violations obj_val_iter = 0. violations *= 0. primal_violations *= 0. if time.time() - start_time > time_limit: 'Lagrange time limit reached. Breaking out!' break # Update approximate primal solution nominator = denominator[iteration - 1] if iteration - 1 >= 0 else 0 primal_solution.flow *= nominator / denominator[iteration] primal_solution.open_arc *= nominator / denominator[iteration] # Solve a subproblem for each arc for arc in xrange(len(data.arcs)): arc_origin, arc_dest = get_2d_index(data.arcs[arc], data.nodes) # Get vector of lagrange costs flow_coeffs = get_lagrange_cost(dual_prices=pi_iter, arc_pointer=arc, data=data) # Solve subproblem and get back solution vector subproblem_sol = solve_subproblem(lagrange_cost=flow_coeffs, demand=data.demand, fixed_cost=data.fixed_cost[:, arc], capacity=data.capacity[arc], period=data.periods - 1) # Add term to objective function for this iteration obj_val_iter += subproblem_sol.objective_value # We then check if the subproblem solution prices out # and add columns to the pool primal_objective = 0 if subproblem_sol.open_period == -1 else \ np.sum(subproblem_sol.flow * data.demand) * \ data.variable_cost[arc] + data.fixed_cost[ subproblem_sol.open_period, arc] if solve_cg: variable = Variable(primal_objective, subproblem_sol.flow, subproblem_sol.open_period) hashed_var = hashlib.sha1(variable.flow).hexdigest() if hashed_var not in hash_cols[arc]: columns_to_add[arc].append(variable) hash_cols[arc].update([hashed_var]) # Update approximate primal solution and arc popularity primal_solution.flow[:, :, arc] += ( (float(iteration)**kappa) / denominator[iteration]) * subproblem_sol.flow if subproblem_sol.open_period >= 0: primal_solution.open_arc[ subproblem_sol.open_period, arc] += (float(iteration)**kappa) / denominator[iteration] if np.random.random() > 0.01: local_arc_popularity[subproblem_sol.open_period, arc] = \ primal_solution.open_arc[ subproblem_sol.open_period, arc] else: local_arc_popularity[subproblem_sol.open_period, arc] = 1. elif np.random.random() > 0.01: local_arc_popularity[subproblem_sol.open_period, arc] = \ primal_solution.open_arc[subproblem_sol.open_period, arc] # Update node violations violations[:, arc_origin - 1, :] += subproblem_sol.flow violations[:, arc_dest - 1, :] -= subproblem_sol.flow primal_violations[:, arc_origin - 1, :] += primal_solution.flow[:, :, arc] primal_violations[:, arc_dest - 1, :] -= primal_solution.flow[:, :, arc] # print iteration, arc, obj_val_iter # Update overall violations origins, destinations = get_2d_index(data.od_pairs, data.nodes) origins -= 1 destinations -= 1 for commodity in xrange(data.commodities): origin, destination = origins[commodity], destinations[commodity] violations[:, origin, commodity] -= 1 violations[:, destination, commodity] += 1 primal_violations[:, origin, commodity] -= 1 primal_violations[:, destination, commodity] += 1 # Add constant term to objective function obj_val_iter += np.sum(np.multiply(const_array, pi_iter)) # Evaluate primal objective if iteration > 0: primal_solution.objective_value = get_primal_objective( primal_solution, data.fixed_cost, primal_variable_cost) # print 'primal violations: {} Max Violation: {}'.format( # np.square(primal_violations).sum(), np.abs( # primal_violations).max()) # Increase lower bound counter lb_iter += 1 # Check if lower bound has improved if obj_val_iter > lower_bound: lower_bound = obj_val_iter omega = min(1.05 * omega, 1.2) alpha *= 1.05 np.copyto(pi_best, pi_iter) np.copyto(arc_popularity, local_arc_popularity) else: alpha *= 0.95 # If primal solution has small violations, we stop # Remember to add solve_cg = False here max_viol = np.abs(primal_violations).max() if max_viol < 0.01: for _ in xrange(2): heuristic_objective, incumbent_arcs, heur_model = \ solve_local_branching_model( data, incumbent_arcs, lb_model=heur_model, \ cutoff=upper_bound) if heuristic_objective < upper_bound: print 'New upper bound found from heuristic: {}'.format( heuristic_objective) upper_bound = heuristic_objective print 'Stopping due to very small primal violations' break # Reduce the omega multiplier if lb_iter > max_lb_iter: omega = max(omega * decrease_factor, 10e-2) # violations *= 1 + np.random.random()*10e-6 lb_iter = 0 # Check if we can find a better upper bound, based on arc popularity if iteration > 0 and iteration % check_heuristic_iter == 0: print 'checking heuristic.. iteration {}'.format(iteration) max_periods[np.where(arc_popularity.max(axis=0) > 0.2)] = \ arc_popularity.argmax(axis=0)[np.where(arc_popularity.max( axis=0) > 0.2)] max_periods[np.where(arc_popularity.max(axis=0) <= 0.2)] = -1 # Round primal solution and define the search neighborhood open_arcs = get_rounded_solution(primal_solution.open_arc, cut_off) diff_arcs = incumbent_arcs != open_arcs cut_off = max(0.1, cut_off - 0.1) # Call the Add/Remove arcs heuristic # if not is_same: incumbent_arcs, heuristic_objective, is_same = \ lagrange_heuristic( data, models, srt_idx, incumbent_arcs, upper_bound, diff_arcs) upper_bound = min(upper_bound, heuristic_objective) # else: # diff_arcs.fill(True) # incumbent_arcs, heuristic_objective = fixing_heuristic( # data, models, incumbent_arcs, upper_bound) # is_same = False if iteration in (max_iter - 1, ): # global start print 'Lower Bound: {}'.format(lower_bound) print 'Calculation Time: {}'.format(time.time() - start_time) improved = False heuristic_objective, incumbent_arcs, heur_model = \ solve_fixing_model( data, heur_model, primal_solution, 0.1, upper_bound, incumbent_arcs) # Record if upper bound got improved improved = heuristic_objective < upper_bound upper_bound = min(upper_bound, heuristic_objective) heuristic_objective, incumbent_arcs, heur_model = \ solve_local_branching_model( data, incumbent_arcs, lb_model=heur_model, \ cutoff=upper_bound) # Before the application of the lagrange heuristic, we need # to pass the incumbent objective and the upper bounds to the # single period models.. # Record again if upper bound got improved improved = improved or (heuristic_objective < upper_bound) # We only need to update the models if there exists an # improvement, i.e., a new solution # improved = True if improved: flow_vars = np.array([ var.X for var in heur_model.getVars() if 'flow' in var.VarName ]).reshape(data.periods, data.arcs.size, data.commodities) arc_open = np.cumsum(incumbent_arcs, axis=0) for t in xrange(data.periods): model = models[t] model.set_objective_and_bounds( data, flow_vars[t, :, :].flatten(), arc_open[t, :]) print 'check active here' for _ in xrange(4): upper_bound = min(upper_bound, heuristic_objective) diff_arcs = incumbent_arcs != open_arcs incumbent_arcs, heuristic_objective, is_same = \ lagrange_heuristic( data, models, srt_idx, incumbent_arcs, upper_bound, diff_arcs) upper_bound = min(upper_bound, heuristic_objective) heuristic_objective, incumbent_arcs, heur_model = \ solve_fixing_model( data, heur_model, primal_solution, 0.3, upper_bound, incumbent_arcs) upper_bound = min(upper_bound, heuristic_objective) if heuristic_objective < upper_bound: print 'New upper bound found from heuristic: {}'.format( heuristic_objective) upper_bound = heuristic_objective arc_popularity *= 0 is_same = False else: is_same = True # Calculate relative gap gap = (upper_bound - lower_bound) / upper_bound # # True the first time we switch # if gap < cg_tol and not solve_cg: # solve_cg = True # master_model = master.make_master( # data, heur_solution=heuristic_solution) # Subgradient step if iteration < step_switch: step, squared_viol = get_subgradient_step('polyak', violations, lower_bound=obj_val_iter, upper_bound=upper_bound, omega=omega) alpha_hat = step * (iteration + 1) else: step, squared_viol = get_subgradient_step('harmonic', violations, alpha_hat=alpha_hat, iteration=iteration) # Update the search direction - deflected subgradient! pi_iter -= step * (alpha * primal_violations + (1 - alpha) * violations) if iteration % 20 == 0: print iteration, lower_bound, max_viol, round( time.time() - start_time, 0) print "Upper bound: {} Lower bound: {}".format(upper_bound, lower_bound) # exact_cg(model=master_model, data=data, columns_to_add=columns_to_add) return lower_bound
def make_adjacency_matrix(graph, data): """ Returns an adjacency matrix of the following structure: first index: arc pointer, i.e., order of arc as it is read from the input file (zero-based) second index: commodity pointer, i.e., order of commodity as it is read from the input file (zero-based) third index: for a specific arc and commodity, there exists a path from the origin to all other nodes. The third index stands for the entry of each path in which the arc participates value: for an arc, commodity pair, the vector of nodes shows in which nodes' paths balance equations the arc participates. An arc "participates" in a path if the arc is adjacent to the path (either starts from a node in the path and goes away, or ends in the path from outside the path) Note: the return values (nodes) are 1-index based, because we need to indicate both the node label AND whether the arc has a positive or negative sign. In zero-based indexing we cannot do that for node 0 """ paths = np.empty(shape=data.commodities, dtype=object) # The last element of the last dimension of the adj matrix holds the actual length of each path adjacency_matrix = np.zeros(shape=(data.arcs.size, data.commodities, data.nodes + 1), dtype=int) # origins and destinations are 1-based index. All functions assume 0-based index origins, destinations = get_2d_index(data.od_pairs, data.nodes) arc_origins, arc_destinations = get_2d_index(data.arcs, data.nodes) for index, origin in enumerate(origins): # For each commodity, calculate a shortest path from its origin to all the nodes # paths[index] is a dictionary of the form {"node_label1" : [node_1, node_2, ..., node_n, "node_label2": etc ] paths[index] = nx.single_source_dijkstra_path(graph, origin - 1) # Remove path to destination, this constraint is redundant del paths[index][destinations[index] - 1] # Add each path to the adjacency matrices of each arcs for path in paths[index].values(): for node in path: # Find arcs that depart or arrive at this node node_origins_ptr = np.where( (arc_origins == node + 1)) # pointers of arcs that have node as origin node_destinations_ptr = np.where(arc_destinations == node + 1) if node_origins_ptr[0].size > 0: for arc_pointer in np.nditer(node_origins_ptr): arc_end = get_2d_index(data.arcs[arc_pointer], data.nodes)[1] - 1 if arc_end not in path: length = adjacency_matrix[arc_pointer, index, data.nodes] adjacency_matrix[ arc_pointer, index, length] = path[len(path) - 1] + 1 # Adding +1 for 1-index adjacency_matrix[arc_pointer, index, data.nodes] += 1 if node_destinations_ptr[0].size > 0: for arc_pointer in np.nditer(node_destinations_ptr): arc_start = get_2d_index(data.arcs[arc_pointer], data.nodes)[0] - 1 if arc_start not in path: length = adjacency_matrix[arc_pointer, index, data.nodes] adjacency_matrix[arc_pointer, index, length] = -(path[len(path) - 1] + 1) adjacency_matrix[arc_pointer, index, data.nodes] += 1 return adjacency_matrix
def make_local_branching_model(data, kappa, open_arcs, cutoff, model=None): """ Constructs a local branching model that searches a kappa-sized neighborhood per period, starting from the feasible solution open_arcs :param data: Problem data :param kappa: Local branching neighborhood (per period) :param open_arcs: binary solution that defined the neighborhood :param cutoff: cutoff value for feasible solutions :param model: if given, there is a populated model """ commodities, arcs, capacity, variable_cost, nodes, demand = \ data.commodities, data.arcs.size, data.capacity, \ data.variable_cost, data.nodes, data.demand origins, destinations = data.origins, data.destinations periods, fixed_cost = data.periods, data.fixed_cost if model is None: flow = np.empty(shape=(periods, commodities, arcs), dtype=object) arc_open = np.empty(shape=(periods, arcs), dtype=object) capacities = np.empty(shape=(periods, arcs), dtype=object) arc_origins, arc_destinations = get_2d_index(data.arcs, nodes) model = grb.Model('local_branching') for period in xrange(periods): for arc in xrange(arcs): i, j = arc_origins[arc], arc_destinations[arc] arc_open[period, arc] = model.addVar( vtype=grb.GRB.BINARY, obj=fixed_cost[period, arc], name='open_arc{}-{}_{}'.format(i, j, period)) for h in xrange(commodities): flow[period, h, arc] = model.addVar( obj=variable_cost[arc]*demand[period, h], lb=0., ub=min(1., capacity[arc] / demand[period, h]), vtype=grb.GRB.CONTINUOUS, name='flow{}.{},{}_{}'.format(h, i, j, period)) model._arc_open = arc_open model._flow = flow model.update() for period in xrange(periods): for arc in xrange(arcs): # Add initial vector of binary variables previously found arc_open[period, arc].start = open_arcs[period, arc] i, j = arc_origins[arc], arc_destinations[arc] capacities[period, arc] = model.addConstr( grb.quicksum( grb.LinExpr( demand[period, h], flow[period, h, arc]) for h in xrange(commodities)) <= capacity[arc] * grb.quicksum(arc_open[t, arc] for t in xrange(period+1)), name='cap_{}-{}_{}'.format(i, j, period)) for h in xrange(commodities): for n in xrange(nodes): rhs = 0. if n == origins[h]: rhs = 1. if n == destinations[h]: rhs = -1. in_arcs = get_2d_index(data.arcs, nodes)[1] == n + 1 out_arcs = get_2d_index(data.arcs, nodes)[0] == n + 1 for t in xrange(periods): lhs = grb.quicksum( flow[t, h, out_arcs]) - grb.quicksum( flow[t, h, in_arcs]) model.addConstr( lhs=lhs, rhs=rhs, sense=grb.GRB.EQUAL, name='demand_n{}c{}p{}'.format(n, h, t)) for arc in xrange(arcs): model.addConstr( grb.quicksum(arc_open[t, arc] for t in xrange(periods)) <= 1, name='sum_{}'.format(arc)) # Local branching constraints go here for period in xrange(periods): lhs = grb.quicksum([ arc_open[period, arc] for arc in xrange(arcs) if open_arcs[period, arc] == 0]) lhs += grb.quicksum([ 1. - arc_open[period, arc] for arc in xrange(arcs) if open_arcs[period, arc] == 1]) model.addConstr( lhs <= kappa, name='local_branch.{}'.format(period)) model._capacities = capacities model._open_arcs_vals = open_arcs model._time = time.time() model.setParam('OutputFlag', 0) model.params.TimeLimit = 100. model.params.NodeLimit = 500 model.params.MIPGap = 0.01 model.params.Threads = 2 model.params.Heuristics = 1. model.params.Cutoff = cutoff + 0.0001 else: # model.reset() arc_open = model._arc_open for period in xrange(periods): constr_name = 'local_branch.{}'.format(period) constr = model.getConstrByName(constr_name) constr.rhs = kappa+np.sum(open_arcs[period, :]) for arc in xrange(arcs): val = 1 if open_arcs[period, arc] < 0.001 else -1. model.chgCoeff(constr, arc_open[period, arc], val) model.update() model.optimize(local_branching_callback) # model.write('trial.lp') print 'solutions found: {}'.format(model.SolCount) # print 'best objective value: {}'.format(model.objVal) n_sols = min(model.SolCount, 10) solutions = np.zeros(shape=(n_sols, periods, arcs), dtype=int) for sol in xrange(n_sols): model.setParam('SolutionNumber', sol) # print "--- Solution number: {} ---".format(sol) for period in xrange(periods): for arc in xrange(arcs): if arc_open[period, arc].Xn > 0: solutions[sol, period, arc] = 1 # print 'Period: {} Arc: {}'.format(period, arc) objective = model.ObjVal if n_sols else np.infty return objective, model
def make_master(data, heur_solution): """ Populates the master model :param data: problem data :param heur_solution: initial heuristic solution that is to be added to the model :return: master model """ master_model = grb.Model("Master-Problem") master_model.params.OutputFlag = 0 arcs, nodes, periods, commodities = data.arcs, data.nodes, data.periods, data.commodities master_model._arc_index = np.empty(shape=arcs.size, dtype=object) origins, destinations = get_2d_index(data.od_pairs, nodes) heuristic = np.empty(shape=arcs.size, dtype=object) heur_coeffs = heur_solution.flow for arc in xrange(arcs.size): master_model._arc_index[arc] = array.array('i') heuristic[arc] = master_model.addVar(lb=0.0, ub=1.0, obj=heur_solution.objective, vtype=grb.GRB.CONTINUOUS, name='heur_var{}'.format(arc)) master_model.update() count = 0 for node in xrange(nodes): in_arcs = get_2d_index(arcs, nodes)[1] == node + 1 out_arcs = get_2d_index(arcs, nodes)[0] == node + 1 for commodity in xrange(commodities): for period in xrange(periods): rhs = 0. if node + 1 == origins[commodity]: rhs = 1. if node + 1 == destinations[commodity]: rhs = -1. lhs = grb.quicksum(heur_coeffs[period, commodity, out_arcs] * heuristic[out_arcs]) - \ grb.quicksum(heur_coeffs[period, commodity, in_arcs] * heuristic[in_arcs]) master_model.addConstr(lhs=lhs, sense=grb.GRB.GREATER_EQUAL, rhs=rhs, name='p-n-c{},{},{}'.format( period + 1, node + 1, commodity + 1)) for arc in np.where(out_arcs)[0]: master_model._arc_index[arc].append(count) for arc in np.where(in_arcs)[0]: master_model._arc_index[arc].append(-count) count += 1 for arc in xrange(arcs.size): master_model.addConstr(heuristic[arc] <= 1., name='convexity_{}'.format(arc)) master_model._arc_open = np.zeros(shape=(periods, arcs.size), dtype=float) master_model._convex_duals = np.zeros_like(arcs, dtype=float) master_model._node_duals = np.zeros(shape=(nodes, commodities, periods)) master_model.update() return master_model