def hyperheuristicSolverHH(kp: Knapsack, items: List[Item], hh: Hyperheuristic, stopCritaria = 10): # Prepare the HH variables hh.reset() mh = Metaheuristic() kp_best = kp.copy() mh_best = mh.copy() countNone = 0 while countNone < stopCritaria: # Choice the next heuristic nextHeuristic = hh.getHeuristic(items) # Apply the heuristic nextItem = SimpleHeuristic(nextHeuristic).apply(kp, items) if nextItem == None: # Reject the heuristic countNone += 1 continue countNone = 0 # Accept the heuristic mh.addHeuristic(nextHeuristic) # Save the best solution reached if kp_best.getValue() < kp.getValue(): kp_best = kp.copy() mh_best = mh.copy() # Return the best solution reached return kp_best, mh_best
def RandomSearch(kp: Knapsack, items: List[Item], stopCriteria=10): # Random Search implementation # Initialize the variables mh = Metaheuristic() heuristics = list(heuristicComparison.keys()) countNone = 0 while countNone < stopCriteria: # Choice randomly the next heuristic nextHeuristic = np.random.choice(heuristics) kp_candidate = kp.copy() items_candidate = items.copy() nextItem = SimpleHeuristic(nextHeuristic).apply( kp_candidate, items_candidate) if nextItem == None or kp_candidate.getValue() <= kp.getValue(): # Reject the heuristic countNone += 1 continue countNone = 0 # Accept the heuristic kp = kp_candidate items = items_candidate mh.addHeuristic(nextHeuristic) return kp, mh
def SimulatedAnnealing(kp: Knapsack, items: List[Item], n_iterations=100, temp=200, stopCriteria=10): # Simulated Annealing implementation # Initialization of the variables mh = Metaheuristic() heuristics = list(heuristicComparison.keys()) countNone = 0 kp_best = kp.copy() mh_best = Metaheuristic() n_iterations = max(n_iterations, 2 * len(items)) for i in range(n_iterations): if countNone == stopCriteria: # Stop criteria met break # Choice randomly the next heuristic nextHeuristic = np.random.choice(heuristics) kp_candidate = kp.copy() items_candidate = items.copy() nextItem = SimpleHeuristic(nextHeuristic).apply( kp_candidate, items_candidate) if nextItem == None: # Heuristic does not change the instance countNone += 1 continue countNone = 0 if kp_best.getValue() < kp_candidate.getValue(): # Heuristic improve the performance of the solution kp_best = kp_candidate.copy() mh_best = mh.copy() mh_best.addHeuristic(nextHeuristic) # Calculate the metropolis variable diff = kp.getValue() - kp_candidate.getValue() t = temp / (i + 1) if -10 <= -diff / t and -diff / t <= 0: metropolis = np.exp(-diff / t) elif -diff / t <= -10: metropolis = 0 else: metropolis = 1 # Acceptance criteria if diff < 0 or np.random.rand() <= metropolis: kp = kp_candidate items = items_candidate mh.addHeuristic(nextHeuristic) else: countNone += 1 # Return the best solution reached return kp_best, mh_best
def solveMetaheuristic(method: str, kp: Knapsack, items: List[Item], saveMetaheuristic=False, fileName='traindata.csv', overwrite=False): W = kp.getCapacity() items_copy = items.copy() # Execute the chosen method if method == 'SimulatedAnnealing': kp, mh = SimulatedAnnealing(kp, items) elif method == 'RandomSearch': kp, mh = RandomSearch(kp, items) else: return 0 # Save the sequence of heuristics if saveMetaheuristic: mh.saveMetaheuristic(W, items_copy, fileName, overwrite) return kp.getValue()
# Obtain the path to each test instance instances = obtainFilenames(tapia_path, testDataset) for instance in instances: # Read the instance n, W, weights, profits = loadInstance(instance) for method, iterations in zip(methods, methodIterations): sumResults, sumTime = 0, 0 for i in range(iterations): kp = Knapsack(W) items = generateItemList(weights, profits) start = perf_counter() if method == 'Hyperheuristic': # Run the respective HH kp, mh = hyperheuristicSolverHH(kp, items, HH[i]) result = kp.getValue() else: # Run the respective solver method result = solver(method, kp, items) end = perf_counter() sumResults += result sumTime += end-start # Store the average results and average time per method resultsTestDict[method].append(sumResults/iterations) resultsTestDict[f'{method}_time'].append(sumTime/iterations) # Save the results saveDictCSV(resultPath, resultsTestDict)