def __init__(self, sid, pid, channels, pump): self.ssid = sid[0] self.parties = sid[1] self.delta = sid[2] self.pump = pump self.internal_run_queue = {} self.internal_delay = 0 self.pid_to_queue = {} # TODO change the SID to be the same so that the leaks are the same sid stupid! #print('Simulated') self.sim_channels, static, _pump = createWrappedUC( [('F_chan', Syn_Channel)], ProtocolWrapper, Syn_FWrapper, Syn_Bracha_Protocol, DummyWrappedAdversary) self.sim_sid = (sid[0], sid[1], sid[2]) self.sim_pump = _pump static.write((('sid', self.sim_sid), ('crupt', ))) handlers = { channels['p2a']: self.party_msg, channels['z2a']: self.env_msg, channels['w2a']: self.wrapper_msg, channels['f2a']: self.func_msg, self.sim_channels['p2z']: self.sim_party_msg, self.sim_channels['a2z']: self.sim_adv_msg, self.sim_channels['f2z']: self.sim_func_msg, self.sim_channels['w2z']: self.sim_wrapper_msg, } ITM.__init__(self, sid, pid, channels, handlers)
def __init__(self, sid, pid, channels, pump, prot, poly, importargs): self.ssid = sid[0] self.parties = sid[1] self.pump = pump self.prot = prot # Maintain a copy of the ideal world wrapper queue self.internal_run_queue = [] self.internal_delay = 0 self.sim_run_queue = {} # Track idx in queue for each party's output self.pid_to_queue = {} # whether input was provided to the functionality self.dealer_input = None self.total_extra_delay_added = 0 self.log = logging.getLogger("\033[1mRBC_Simulator\033[0m") self.sim_leaks = [] self.party_output_value = None self.expect_output = False handlers = { channels['p2a']: self.party_msg, channels['z2a']: self.env_msg, channels['w2a']: self.wrapper_msg, channels['f2a']: self.func_msg, } ITM.__init__(self, sid, pid, channels, handlers, poly, importargs) # Spawn UC experiment of real world (local to the simulator) self.sim_channels, static, _pump = createWrappedUC( [('F_chan', Async_Channel)], wrappedProtocolWrapper(prot), Async_FWrapper, DummyWrappedAdversary, poly, importargs={ 'ctx': self.ctx, 'impflag': False }) # Forward the same 'sid' to the simulation # TODO forward crupt parties as well # TODO so far the comm.py enforces cruption in the simulation as well # TODO possibly wait to do the `static.write` below until execuc.py # tells us who the crupted parties are self.sim_sid = (sid[0], sid[1], sid[2]) self.sim_pump = _pump static.write((('sid', self.sim_sid), ('crupt', ))) self.handlers.update({ self.sim_channels['p2z']: self.sim_party_msg, self.sim_channels['a2z']: self.sim_adv_msg, self.sim_channels['f2z']: self.sim_func_msg, self.sim_channels['w2z']: self.sim_wrapper_msg, })
def do_experiments(dataset): X, y = dataset.data, dataset.target dataset_name = dataset.DESCR.split('\n')[0] if dataset_name.startswith("Iris"): # iris has duplicate data points. That messes up our # MeanNN implementation. from scipy.spatial.distance import pdist, squareform dist = squareform(pdist(X)) doubles = np.unique(np.where(np.tril(dist - 1, -1) == -1)[0]) mask = np.ones(X.shape[0], dtype=np.bool) mask[doubles] = False X = X[mask] y = y[mask] n_clusters = len(np.unique(y)) print("\n\nDataset %s samples: %d, features: %d, clusters: %d" % (dataset_name, X.shape[0], X.shape[1], n_clusters)) print("=" * 70) classes = [ITM(n_clusters=n_clusters, infer_dimensionality=False), ITM(n_clusters=n_clusters, infer_dimensionality=True), # AgglomerativeClustering(linkage='ward', n_clusters=n_clusters), KMeans(n_clusters=n_clusters)] names = ["ITM", "ITM ID", "KMeans"] for clusterer, method in zip(classes, names): start = time() clusterer.fit(X) y_pred = clusterer.labels_ ari = adjusted_rand_score(y, y_pred) ami = adjusted_mutual_info_score(y, y_pred) nmi = normalized_mutual_info_score(y, y_pred) objective = tree_information(X, y_pred) runtime = time() - start print("%-15s ARI: %.3f, AMI: %.3f, NMI: %.3f objective: %.3f time:" "%.2f" % (method, ari, ami, nmi, objective, runtime)) # confusion matrix, compare actual y and y predict # Compute confusion matrix cnf_matrix = confusion_matrix(y, y_pred) np.set_printoptions(precision=2) # Plot confusion matrix plt.figure() plot_confusion_matrix(cnf_matrix, classes = sorted(np.unique(y)), title='Confusion Matrix for '+method) plt.savefig('Confusion Matrix for '+ dataset_name + ' '+ method) plt.show() i_gt = tree_information(X, y) print("GT objective: %.3f" % i_gt)
def do_experiments(dataset): X, y = dataset.data, dataset.target dataset_name = dataset.DESCR.split('\n')[0] if dataset_name.startswith("Iris"): # iris has duplicate data points. That messes up our # MeanNN implementation. from scipy.spatial.distance import pdist, squareform dist = squareform(pdist(X)) doubles = np.unique(np.where(np.tril(dist - 1, -1) == -1)[0]) mask = np.ones(X.shape[0], dtype=np.bool) mask[doubles] = False X = X[mask] y = y[mask] n_clusters = len(np.unique(y)) print("\n\nDataset %s samples: %d, features: %d, clusters: %d" % (dataset_name, X.shape[0], X.shape[1], n_clusters)) print("=" * 70) classes = [ ITM(n_clusters=n_clusters), ITM(n_clusters=n_clusters, infer_dimensionality=True), Ward(n_clusters=n_clusters), KMeans(n_clusters=n_clusters) ] names = ["ITM", "ITM ID", "Ward", "KMeans"] for clusterer, method in zip(classes, names): start = time() clusterer.fit(X) y_pred = clusterer.labels_ ari = adjusted_rand_score(y, y_pred) ami = adjusted_mutual_info_score(y, y_pred) nmi = normalized_mutual_info_score(y, y_pred) objective = tree_information(X, y_pred) runtime = time() - start print("%-15s ARI: %.3f, AMI: %.3f, NMI: %.3f objective: %.3f time:" "%.2f" % (method, ari, ami, nmi, objective, runtime)) i_gt = tree_information(X, y) print("GT objective: %.3f" % i_gt)
def __init__(self, k, bits, crupt, sid, pid, channels, pump, poly, importargs): self.crupt = crupt self.ssid = sid[0] self.committer = sid[1] self.receiver = sid[2] self.table = {} self.revtable = {} self.receiver_random = None self.receiver_state = 1 handlers = { channels['p2a']: self.party_msg, channels['f2a']: self.func_msg, channels['z2a']: self.env_msg, } ITM.__init__(self, k, bits, sid, pid, channels, handlers, poly, pump, importargs)
def do_experiments(X,y): n_clusters = len(np.unique(y)) classes = [ITM(n_clusters=n_clusters, infer_dimensionality=False), KMeans(n_clusters=n_clusters)] names = ["ITM","KMeans"] for clusterer, method in zip(classes, names): clusterer.fit(X) y_pred = clusterer.labels_ title = 'Clustering Using '+method plt.scatter(X[:, 0], X[:, 1], c=y_pred, s=50, cmap='viridis') plt.title(title) plt.show()