def solve_model(scores, parents, solver, cycle_finding, gomory_cut, sink_heuristic, **kwargs): # Variables to be globally used throughout ilp_model.cussens module # Not using class here because we want everything to be accessed as cussens.<name> global scores_input, parents_input, solver_input, cycle_finding_input, gomory_cut_input scores_input = scores parents_input = parents solver_input = solver cycle_finding_input = cycle_finding gomory_cut_input = gomory_cut # Generate initial problem initial_problem = main_model.model_writer(scores_input, parents_input) # Generate solver options for the solver selected solver_options = generate_solver_options(solver_input, gomory_cut_input) # Branch-and-Cut optimal_solution_found = False solver_results = None problem_list = [] objective_upper_bound = float(- sys.maxint) # Arbitrarily small negative number (maximum supported by the system) best_solution = None best_solution_solver_results = None # Add the initial formulation to problem_list problem_list.append(initial_problem) global current_problem if sink_heuristic: best_cutoff_value = - sys.maxint while len(problem_list) > 0: print '' print 'Current best solution = ' + str(objective_upper_bound) if sink_heuristic: print 'Current cutoff value = ' + str(best_cutoff_value) print 'Current number of problems in the list: ' + str(len(problem_list)) # Pop out the first problem on problem_list current_problem = problem_list.pop(0) solver_results = bayene.ilp_solver.call_solver(current_problem.main_model, solver_options, solver = solver, warmstart = True) # If the problem is found infeasible, stop the solving process and go back to the beginning of the loop if solver_results.solver.termination_condition == TerminationCondition.infeasible: continue else: print 'Current problem solved successfully, Objective Value = ' + str(current_problem.main_model.objective()) # New Problem to be added to take all the cuts and heuristics results new_problem = copy.deepcopy(current_problem) ######################## #### Cutting Planes #### ######################## # Cluster (Sub-IP) new_problem_cluster_cut_applied = False print('Searching for CLUSTER CUTS..') # Get all the non-zero variables in the main model # Also at the same time, check the solutions to find a non-integer variable. current_non_zero_solution = {} for key, value in current_problem.main_model.chosen_parent_variable.iteritems(): if float(value.value) > 0: current_non_zero_solution[key] = float(value.value) cluster_cuts_sub_ip_problem = cluster_cut_model.model_writer(current_non_zero_solution, len(scores_input), parents_input) cluster_cuts_sub_ip_solver_options = {} cluster_cuts_sub_ip_solver_options["LogFile"] = '' # Send the cluster cut IP problem to the solver cluster_cuts_sub_ip_solver_results = bayene.ilp_solver.call_solver(cluster_cuts_sub_ip_problem.main_model, cluster_cuts_sub_ip_solver_options, solver=solver) if cluster_cuts_sub_ip_solver_results.solver.termination_condition == TerminationCondition.optimal and cluster_cuts_sub_ip_problem.main_model.objective() > -1: # Check if found cluster size > 0 cluster_members = [cluster_node for cluster_node in range(len(scores_input)) if cluster_cuts_sub_ip_problem.main_model.cluster_member_variable[cluster_node].value > 0] if len(cluster_members) > 0: print('Adding CLUSTER cuts to new problem.') new_problem.add_cluster_cuts(cluster_members) new_problem_cluster_cut_applied = True else: print('NO CLUSTER cuts applicable.') else: print('NO CLUSTER cuts applicable. Cluster Cut Sub-IP could not be solved.') # Cycle cuts new_problem_cycle_cut_applied = False if cycle_finding_input: print('Searching for CYCLE cuts..') cycles_found = find_cycles(current_problem.main_model) if len(cycles_found) > 0: print('Adding CYCLE cuts to new problem.') new_problem.add_cycle_cuts(cycles_found) new_problem_cycle_cut_applied = True else: print('NO CYCLE cuts applicable.') #################### #### Heuristics #### #################### # Sink-Finding Heuristic if sink_heuristic: heuristic_total_score, heuristic_solutions, sink_heuristic_found = find_sink_heuristic(current_problem) if new_problem_cluster_cut_applied or new_problem_cycle_cut_applied: if sink_heuristic and sink_heuristic_found: print 'Sink heuristic solution score = ' + str(heuristic_total_score) # Use the total score obtained to be used as cutoff value if heuristic_total_score > best_cutoff_value: best_cutoff_value = heuristic_total_score solver_options["Cutoff"] = best_cutoff_value # Clear all the current variable values new_problem.main_model.chosen_parent_variable.reset() # Insert the solutions found from sink-finding for warmstart for heuristic_key, heuristic_value in heuristic_solutions.iteritems(): new_problem.main_model.chosen_parent_variable[(heuristic_key[0], heuristic_key[1])].set_value(heuristic_value) problem_list.append(new_problem) continue # If current_problem's objective value is lower than objective_upper_bound, move on to the next problem if float(current_problem.main_model.objective()) <= objective_upper_bound: print('Objective value is LOWER than or EQUAL to the incumbent.') continue all_solutions_integer = True variable_to_branch_key = -1 non_integer_closeness_to_one = sys.maxint for parent_key, parent_value in current_problem.main_model.chosen_parent_variable.iteritems(): if parent_value.value > 0: if parent_value.value < 1: all_solutions_integer = False # Smaller the closer if (1.0 - float(parent_value.value)) < non_integer_closeness_to_one: variable_to_branch_key = copy.deepcopy(parent_key) non_integer_closeness_to_one = 1.0 - float(parent_value.value) if all_solutions_integer == True: print('INTEGER solution found!') objective_upper_bound = float(current_problem.main_model.objective()) best_solution = copy.deepcopy(current_problem) best_solution_solver_results = solver_results else: # Get the variable to branch on: choose the one that has the closest value to 1 print 'Branching on ' + str(variable_to_branch_key) + ', ' + str(current_problem.main_model.chosen_parent_variable[variable_to_branch_key].value) print('BRANCHING..') new_problem_branch_1 = copy.deepcopy(current_problem) new_problem_branch_2 = copy.deepcopy(current_problem) # One problem with less than or equal to floor(non-integer) new_problem_branch_1.add_branching(variable_to_branch_key, 'leq') new_problem_branch_2.add_branching(variable_to_branch_key, 'geq') problem_list.append(new_problem_branch_1) problem_list.append(new_problem_branch_2) print 'Final Objective Value: ' + str(best_solution.main_model.objective()) return best_solution, best_solution_solver_results
def solve_model(scores, parents, solver, cycle_finding, gomory_cut, sink_heuristic, **kwargs): # Variables to be globally used throughout ilp_model.cussens module # Not using class here because we want everything to be accessed as cussens.<name> global scores_input, parents_input, solver_input, cycle_finding_input, gomory_cut_input scores_input = scores parents_input = parents solver_input = solver cycle_finding_input = cycle_finding gomory_cut_input = gomory_cut # Generate initial problem current_problem = main_model.model_writer(scores_input, parents_input) # Generate solver options for the solver selected solver_options = generate_solver_options(solver_input, gomory_cut_input) # Solution Process Control optimal_solution_found = False solver_results = None best_solution = None best_cutoff_value = - sys.maxint objective_progress = [] heuristic_progress = [] while optimal_solution_found == False: # Print empty lines between each iteration for better readability print '' if sink_heuristic: print 'Current cutoff value = ' + str(best_cutoff_value) # Send the current problem to the solver solver_results = bayene.ilp_solver.call_solver(current_problem.main_model, solver_options, solver = solver, warmstart = True) # If the problem is found infeasible, stop the solving process if solver_results.solver.termination_condition == TerminationCondition.infeasible: optimal_solution_found = True continue else: print 'Current problem solved successfully, Objective Value = ' + str(current_problem.main_model.objective()) # Get all the non-zero variables in the main model current_non_zero_solution = {} # We need float() and .value because non_zero_value here is a Pyomo object (Don't make a fuss with Pyomo functions) for non_zero_key, non_zero_value in current_problem.main_model.chosen_parent_variable.iteritems(): if float(non_zero_value.value) > 0.0: current_non_zero_solution[(non_zero_key[0], non_zero_key[1])] = float(non_zero_value.value) ######################## #### Cutting Planes #### ######################## # Cluster (Sub-IP) # Generate IP Cluster Cut Finding Model print 'Searching for CLUSTER CUTS..' cluster_cut_applied = False # Generate cluster cut sub-ip problem cluster_cuts_sub_ip_problem = cluster_cut_model.model_writer(current_non_zero_solution, len(scores_input), parents_input) cluster_cuts_sub_ip_solver_options = {} cluster_cuts_sub_ip_solver_options["LogFile"] = '' # Send the cluster cut IP problem to the solver cluster_cuts_sub_ip_solver_results = bayene.ilp_solver.call_solver(cluster_cuts_sub_ip_problem.main_model, cluster_cuts_sub_ip_solver_options, solver=solver) # If the problem was properly solved (objective should be strictly larger than -1) if cluster_cuts_sub_ip_solver_results.solver.termination_condition == TerminationCondition.optimal \ and cluster_cuts_sub_ip_problem.main_model.objective() > -1: # Check if found cluster size > 0 cluster_members = [node_key for node_key in xrange(len(scores_input)) if float(cluster_cuts_sub_ip_problem.main_model.cluster_member_variable[node_key].value) > 0] if len(cluster_members) > 0: print 'Adding CLUSTER cuts to new problem.' current_problem.add_cluster_cuts(cluster_members) cluster_cut_applied = True else: print 'NO CLUSTER cuts applicable.' else: print 'NO CLUSTER cuts applicable. Cluster Cut Sub-IP could not be solved.' # Cycle cuts cycle_cut_applied = False if cycle_finding_input: print 'Searching for CYCLE cuts..' cycles_found = find_cycles(current_problem.main_model) if len(cycles_found) > 0: print 'Adding CYCLE cuts to new problem.' current_problem.add_cycle_cuts(cycles_found) cycle_cut_applied = True else: print 'NO CYCLE cuts applicable.' #################### #### Heuristics #### #################### # Sink-Finding Heuristic if sink_heuristic: print 'Performing Sink-finding algorithm..' heuristic_total_score, heuristic_solutions, sink_heuristic_found = find_sink_heuristic(current_problem) ##################################### #### Moving on to Next Iteration #### ##################################### # If either cluster or cycle cut got applied, we should solve the problem again if cluster_cut_applied or cycle_cut_applied: objective_progress.append(current_problem.main_model.objective()) # If we have a heuristic solution, substitute current_problem with new_problem (which contains a heuristic solution) # to allow the solver to make use of the solution. if sink_heuristic and sink_heuristic_found: print 'Sink heuristic solution score = ' + str(heuristic_total_score) # Use the total score obtained to be used as cutoff value if heuristic_total_score > best_cutoff_value: best_cutoff_value = heuristic_total_score # solver_options["Cutoff"] = best_cutoff_value heuristic_progress.append(best_cutoff_value) # Clear all the current variable values current_problem.main_model.chosen_parent_variable.reset() # Insert the solutions found from sink-finding for warmstart for heuristic_key, heuristic_value in heuristic_solutions.iteritems(): current_problem.main_model.chosen_parent_variable[(heuristic_key[0], heuristic_key[1])].set_value(heuristic_value) # Go to the next iteration of this loop continue # If we don't we need to solve the problem again, the optimal solution is found. print 'INTEGER solution found!' best_solution = copy.deepcopy(current_problem) optimal_solution_found = True print 'Final Objective Value: ' + str(best_solution.main_model.objective()) print 'Number of Cluster Cut Iteration: ' + str(best_solution.add_cluster_cuts_count) print 'Number of Cycle Cut Iteration: ' + str(best_solution.add_cycle_cuts_count) print 'Number of Total Cycle Cuts: ' + str(best_solution.add_cycle_total_count) return best_solution, solver_results, objective_progress, heuristic_progress