def main(): root = "./data/" trans = transforms.Compose([ transforms.ToTensor(), ]) train_set = dset.FashionMNIST(root=root, train=True, transform=trans, download=True) torch.manual_seed(4) n_samples = 1600 dt_range = np.linspace(start=0.05, num=20, stop=1) s_range = np.linspace(start=0.01, num=20, stop=0.6) X_train_val, Y_train_val = filter_by_label( x=train_set.data, y=train_set.targets, labels=[6, 9], n_samples=n_samples, device=DEVICE, ) X, Y, X_transf, Y_transf = split_train_val(X_train_val, Y_train_val, n_samples) k = len(Y.unique()) print(Y.sum(), Y_transf.sum()) models = [ KMeans(n_clusters=k), SpectralClustering( n_clusters=k, lmode="rw", similarity="gaussian_zp", assign_labels="kmeans", ), Hierarchical(n_clusters=k), ] test_robustness( X, Y, X_transf, Y_transf, models=models, dt_range=dt_range, s_range=s_range, lb=1 / 255, mutation_rate=0.1, path=PATH + "/TransferfashionMNIST/", )
def main(): root = "./data/" trans = transforms.Compose([ transforms.ToTensor(), ]) train_set = dset.FashionMNIST(root=root, train=True, transform=trans, download=True) torch.manual_seed(4) n_samples = 800 dt_range = np.linspace(start=0.1, num=19, stop=1) s_range = np.linspace(start=0.1, num=21, stop=0.6) X, Y = filter_by_label( x=train_set.data, y=train_set.targets, labels=[6, 9], n_samples=n_samples, device=DEVICE, ) k = len(Y.unique()) models = [ KMeans(n_clusters=k), SpectralClustering( n_clusters=k, lmode="rw", similarity="gaussian_zp", assign_labels="kmeans", ), Hierarchical(n_clusters=k), ] test_robustness( X, Y, models=models, dt_range=dt_range, s_range=s_range, lb=1 / 255, path="./exportfashionMNIST/", )
def main(): device = DEVICE x = torch.load("./data/cifar10/cifar_features_classes016.pt") y = torch.load("./data/cifar10/cifar_labels_classes016.pt") X = x.unsqueeze(2).to(device) Y = y.to(device) dt_range = np.linspace(start=0.01, stop=1.5, num=20) s_range = np.linspace(start=0.01, num=20, stop=0.6) k = len(Y.unique()) models = [ KMeans(n_clusters=k, max_tol=1e-05, max_iter=500), SpectralClustering( n_clusters=k, lmode="rw", similarity="gaussian_zp", assign_labels="kmeans", ), Hierarchical(n_clusters=k), ] test_robustness( X, Y, y_target=6, models=models, dt_range=dt_range, box=(0, 6), s_range=s_range, lb=1 / 255, path=PATH + "cifar/", )
"--dataset", type=str, default="89", help="DIGIT dataset to analyze. Available: '89' or '14']", ) (opts, args) = op.parse_args(sys.argv[1:]) X = np.load("comparison/DIGIT/X_org_{}.npy".format(opts.dataset)) Xadv_s = np.load("comparison/DIGIT/X_adv_{}.npy".format(opts.dataset)) X = torch.from_numpy(X).unsqueeze(2) Xadv_s = torch.from_numpy(Xadv_s).unsqueeze(2) eps_s = Xadv_s - X h = Hierarchical(n_clusters=2) model = ClusteringWrapper3Dto2D(h) yhat = model.fit_predict(X) yadv_s = model.fit_predict(Xadv_s) set_seed(55) T = ConstrainedAdvPoisoningGlobal( delta=(Xadv_s - X).norm(float("inf")) / 2, s=1, clst_model=model, lb=1.0, G=150, mutation_rate=0.2, crossover_rate=0.85, zero_rate=0.35, domain_cons=[0, 16],
X = dt_x[perm].flatten(1) Y = dt_y[perm].flatten(0) return X, Y X = torch.load( "./src/classification/cifar_features/cifar_features_resnet18.pt" ).detach() Y = torch.load("./src/classification/cifar_features/cifar_labels.pt").detach() # best_tp = (6, 1, 7) # best_tp = (4, 6, 9) best_tp = (0, 1, 6) i, j, k = best_tp set_seed(4) x, y = filter_data(X, Y, i, j, k, n=1600) k = len(y.unique()) models = [ KMeans(n_clusters=k, max_tol=1e-05, max_iter=500), SpectralClustering( n_clusters=k, lmode="rw", similarity="gaussian_zp", assign_labels="kmeans" ), Hierarchical(n_clusters=k), ] for m in models: cls = m.fit_predict(x) print(adjusted_mutual_info_score(cls, y)) torch.save(x, "./data/cifar10/cifar_features_classes016.pt") torch.save(y, "./data/cifar10/cifar_labels_classes016.pt")