def run_rbf_optimization( mus, sigmas, ids, num_clusters=2, num_objectives=3, verbose=False, optimization_iterations=10, ): try: import rbfopt settings = rbfopt.RbfoptSettings( minlp_solver_path='/home/amarildo/Bonmin-stable/build/bin/bonmin', nlp_solver_path='/home/amarildo/Bonmin-stable/build/bin/ipopt') num_agents = len(mus) weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=verbose, num_iters=optimization_iterations) # The objective function. def obj_func(assignment): assignment = assignment.reshape((num_clusters, num_agents)) obj = 0 for i in range(assignment.shape[0]): w, loss = weight_calculator(assignment[i]) obj += loss return obj num_opt_params = num_clusters * num_agents bb = rbfopt.RbfoptUserBlackBox(num_opt_params, np.array([0] * num_opt_params), np.array([1] * num_opt_params), np.array(['R'] * num_opt_params), obj_func) alg = rbfopt.RbfoptAlgorithm(settings, bb) val, x, itercount, evalcount, fast_evalcount = alg.optimize() print("Value:", val) print("Assignment:") print(x) print("Iteration Count:", itercount) print("Evaluation Count:", evalcount) print("Fast Evaluation Count:", fast_evalcount) loss_value = 0. best_weights = np.zeros(shape=(num_clusters, num_objectives)) best_assignment = x.reshape((num_clusters, num_agents)) for i in range(num_clusters): w, loss = weight_calculator(best_assignment[i]) best_weights[i] = w loss_value += loss return best_assignment, best_weights, loss_value except ImportError: print("RBF optimizer not installed") return 0, 0, 0
def run_hierarchical_clustering(mus, sigmas, ids, num_clusters=2, num_objectives=3, verbose=False, optimization_iterations=10, display=False, criterion='maxclust_monocrit'): num_agents = len(mus) weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=verbose, num_iters=optimization_iterations) # one hot encoding of agents X = np.eye(num_agents) def distance_func(i, j): assignment = np.logical_or(i, j) w, dist = weight_calculator(assignment) return dist Z = linkage( X, method= 'complete', # dissimilarity metric: max distance across all pairs of # records between two clusters metric=distance_func ) # you can peek into the Z matrix to see how clusters are if display: # calculate full dendrogram and visualize it plt.figure(figsize=(30, 10)) dendrogram(Z, labels=list(agent_to_data.keys())) plt.show() print(criterion) clusters = fcluster(Z, num_clusters, criterion='maxclust') # calculate weights and loss value best_assignment = np.zeros(shape=(num_clusters, num_agents)) for i in range(num_agents): best_assignment[clusters[i] - 1, i] = 1. best_weights = np.zeros(shape=(num_clusters, num_objectives)) loss_value = 0. for i in range(num_clusters): w, loss = weight_calculator(best_assignment[i]) best_weights[i] = w loss_value += loss return best_assignment, best_weights, loss_value
def solve_exact(mus, sigmas, ids, num_clusters=2, num_objectives=3, verbose=False, optimization_iterations=10): num_agents = len(mus) assert num_clusters <= num_agents weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=verbose, num_iters=optimization_iterations) min_loss = np.inf candidate_assignment = np.zeros((num_clusters, num_agents)) candidate_omega = np.zeros((num_clusters, num_objectives)) agents = np.array([i for i in range(num_agents)]) for partition in algorithm_u(agents, num_clusters): candidate_loss = 0 for i, p in enumerate(partition): candidate_assignment[i] = 0 for index in p: candidate_assignment[i, index] = 1 w, loss = weight_calculator(candidate_assignment[i]) candidate_omega[i] = w candidate_loss += loss if candidate_loss < min_loss: min_loss = candidate_loss best_assignment = np.copy(candidate_assignment) best_omega = np.copy(candidate_omega) return best_assignment, best_omega, min_loss
def em_clustering(mus, sigmas, ids, lamb=1, num_clusters=2, num_objectives=3, max_iterations=100, tolerance=1e-7, verbose=False, optimization_iterations=10, cluster_iterations=1): num_agents = len(mus) loss_functions = [] for i, mu in enumerate(mus): loss_functions.append(make_loss_function(mu, sigmas[i], ids[i])) weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=verbose, num_iters=optimization_iterations) min_loss = np.inf best_assignment = None for c_it in range(cluster_iterations): # initial assignment p = np.random.uniform(size=(num_clusters, num_agents)) p = p / np.sum(p, axis=0) # Best Assignment p2 = np.ones(num_clusters) p2 /= np.sum(p2) omega = np.zeros(shape=(num_clusters, num_objectives)) it = 0 diff = np.inf prev_assignment = None while it < max_iterations: it += 1 # find best omega for assignment # equal to minimizing a function for each separate cluster for i in range(num_clusters): w, loss = weight_calculator(p[i]) omega[i] = w #print("Omegas at iteration %d: " % it) # print(omega) # find best assignment for given Omega # best assignment is hard # p2 = np.zeros_like(p) loss_value = 0 for j, mu in enumerate(mus): p[:, j] = 0. minimum = np.inf index = -1 for i in range(num_clusters): loss = loss_functions[j](omega[i]) if loss < minimum: index = i minimum = loss p[i, j] = -lamb * loss p[:, j] -= np.max(p[:, j]) p[:, j] = np.exp(p[:, j]) p[:, j] /= np.sum(p[:, j]) loss_value += minimum p2 = np.mean(p, axis=1) # print(p2) # print("Assignment at iteration %d: " % it) # print(p) # print("Loss Value ", loss_value) if loss_value < min_loss: min_loss = loss_value best_assignment = np.copy(p) if prev_assignment is not None: diff = np.max(np.abs(prev_assignment - p)) if diff < tolerance: print("Converged at Iteration %d:" % it) break # perturbation of assignment prev_assignment = np.copy(p) # p += np.random.normal(scale=0.1, size=p.shape) # p = np.clip(p, 0, 1) # p = p / np.sum(p, axis=0) if it == max_iterations: print("Finished %d iterations without converging" % max_iterations) best_weights = np.zeros(shape=(num_clusters, num_objectives)) for i in range(num_clusters): w, loss = weight_calculator(best_assignment[i]) best_weights[i] = w return best_assignment, best_weights, min_loss
def run_soft_optimization(mus, sigmas, ids, num_clusters=2, num_objectives=3, verbose=False, optimization_iterations=10, num_iters=10): num_agents = len(mus) weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=False, num_iters=optimization_iterations) # The objective function. def obj_func(assignment): # assignment = assignment.reshape((num_clusters, num_agents)) obj = 0 for i in range(num_clusters): w, loss = weight_calculator(assignment[i * num_agents:(i + 1) * num_agents]) obj += loss return np.sqrt(obj) e = np.ones(num_clusters) e_2 = np.ones(num_agents) # The constraints. def simplex(p): p = p.reshape((num_clusters, num_agents)) res = np.dot(e, p) - e_2 return np.sum(res**2) cons = ({'type': 'eq', 'fun': simplex}) bound = (0, 1) bounds = [bound] * num_clusters * num_agents evaluations = [] i = 0 pbar = tqdm(total=num_iters, desc="Assignment Optimization") while i < num_iters: p = np.random.uniform(size=(num_clusters, num_agents)) p = p / np.sum(p, axis=0) x0 = p.flatten() # x0 = np.array([[1, 0, 1], [0, 1, 0]]) res = optimize.minimize(obj_func, x0, method='SLSQP', constraints=cons, bounds=bounds, options={ 'ftol': 1e-2, 'disp': verbose }) if res.success: evaluations.append([res.x, obj_func(res.x)]) pbar.update(1) i += 1 evaluations = np.array(evaluations) min_index = np.argmin(evaluations[:, 1]) x, y = evaluations[min_index, :] best_assignment = x loss_value = 0. best_weights = np.zeros(shape=(num_clusters, num_objectives)) best_assignment = best_assignment.reshape((num_clusters, num_agents)) for i in range(num_clusters): w, loss = weight_calculator(best_assignment[i]) best_weights[i] = w loss_value += loss pbar.close() return best_assignment, best_weights, loss_value
def run_pso_clustering(mus, sigmas, ids, num_clusters=2, num_objectives=3, verbose=False, optimization_iterations=10, num_particles=10): try: from psopy import minimize except ImportError: print("Psopy not installed: ") return 0, 0, 0 num_agents = len(mus) weight_calculator = make_weights_assignment_function( mus, sigmas, ids, num_objectives=num_objectives, verbose=verbose, num_iters=optimization_iterations) # The objective function. def obj_func(assignment): assignment = assignment.reshape((num_clusters, num_agents)) obj = 0 for i in range(assignment.shape[0]): w, loss = weight_calculator(assignment[i]) obj += loss return obj # The constraints. def simplex(p): p = p.reshape((num_clusters, num_agents)) if np.isclose(np.sum(p, axis=0) - 1., 0.).all(): return 0 else: return -1 cons = ({ 'type': 'ineq', 'fun': lambda p: np.min(1 - p) }, { 'type': 'ineq', 'fun': lambda p: np.min(p) }, { 'type': 'eq', 'fun': simplex }) x0 = np.zeros((num_particles, num_clusters * num_agents)) for i in range(num_particles): p = np.random.uniform(size=(num_clusters, num_agents)) p = p / np.sum(p, axis=0) p = p.flatten() x0[i] = p res = minimize(obj_func, x0, constraints=cons, options={ 'g_rate': 1., 'l_rate': 1., 'max_velocity': 4., 'stable_iter': 50 }) print(res.x) best_assignment = res.x loss_value = 0. best_weights = np.zeros(shape=(num_clusters, num_objectives)) best_assignment = best_assignment.reshape((num_clusters, num_agents)) for i in range(num_clusters): w, loss = weight_calculator(best_assignment[i]) best_weights[i] = w loss_value += loss return best_assignment, best_weights, loss_value