def __init__(self, params): self.P, self.crs, self.vkI, self.ek, self.H = params self.order, self.G, self.g1, self.g2, self.e = self.P self.param_sig = (self.order, self.G, self.g1, self.g2, self.e) self.param_sok = (self.order, self.G, self.g1) self.Sig = USPS() self.sok = SOK()
def test_usps_proof(): gsp = GSProof() gsp.ExtGen() params = gsp.P sps = USPS() sk, pk = sps.keygen(params) gz, gr = pk[0], pk[1] pki = pk[2:] m = [ gsp.G.hashG1(b"Hello World!"), gsp.G.hashG1(b"Hello me!"), gsp.G.hashG1(b"Hello us!") ] if len(m) < len(pk) - 2: for i in range(len(pk) - 2 - len(m)): m.append(G1Elem.inf(gsp.G)) elif len(pk) - 2 < len(m): return sig = sps.sign(params, sk, m) print("Does the signature verify?", sps.verify(params, pk, m, sig)) PK = [] for i in range(len(pk)): PK.append(gsp.Commit({"group": 2, "type": "com", "value": pk[i]})) M = [ gsp.Commit({ "group": 1, "type": "com", "value": m[0] }), gsp.Commit({ "group": 1, "group": 1, "type": "pub", "value": m[1] }), gsp.Commit({ "group": 1, "type": "pub", "value": m[2] }) ] for i in range(len(pk) - 2 - len(m)): M.append( gsp.Commit({ "group": 1, "type": "pub", "value": G1Elem.inf(gsp.G) })) SIG = [] for i in range(len(sig)): SIG.append(gsp.Commit({"group": 1, "type": "com", "value": sig[i]})) verify, res = proof_usps_hidesigandsigner(gsp, PK, M, SIG) assert verify
class GSPO_prov(): def __init__(self, params): self.P, self.crs, self.vkI, self.ek, self.H = params self.order, self.G, self.g1, self.g2, self.e = self.P self.param_sig = (self.order, self.G, self.g1, self.g2, self.e) self.param_sok = (self.order, self.G, self.g1) self.Sig = USPS() self.sok = SOK() def KeyGen(self): #print("GSPO_prov: KeyGen") self.sk, self.vk = self.Sig.keygen(self.param_sig) def GJoin(self, auth): #print("GSPO_prov: GJoin") self.hi, self.vk, self.sig0 = auth.GRegister(self.vk) self.pk = (self.hi, self.vk, self.sig0) def GJoint(self, auth, t): #print("GSPO_prov: GJoint") self.t, self.sigt = auth.GCertify(self.pk, t) def GIssue(self, m, pkui, pi_ui): #print("GSPO_prov: GIssue") if self.sok.verify(self.param_sok, pkui, self.hi, pi_ui): msg = [pkui] msg.extend(m) sig = self.Sig.sign(self.param_sig, self.sk, msg) return sig, (self.t, self.sigt) #print("--------------------------------------------- GSPO_prov: authentication error") return -1 def GVerify(self, auth, pk_uv, pi_uv, pi_iuv): #print("GSPO_prov: GVerify") res = 0 if self.sok.verify(self.param_sok, pk_uv, self.hi, pi_uv, pi_iuv): res = 1 import time tverify = time.time() for i in range(len(pi_iuv)): #print("proof #"+str(i)+":", pi_iuv[i][1][0]) proof = pi_iuv[i] pi, equation = proof pi2_v1, pi2_w1, pi1_v2, pi1_w2 = pi eq, X, Y, C, T = equation tt1 = time.time() boolean = auth.GS.Verify(eq, X, Y, C, pi2_v1, pi2_w1, pi1_v2, pi1_w2) #print("---", time.time()-tt1) if (boolean != 1): print("error proof", i, boolean) res *= boolean tverifyend = time.time() #print("proof verification time:", tverifyend - tverify) return res
def __init__(self, params): self.P, self.crs, self.vkI, self.ek, self.H = params self.order, self.G, self.g1, self.g2, self.e = self.P _, self.v1, _, _, _, _ = self.crs self.param_sig = (self.order, self.G, self.g1, self.g2, self.e) self.param_sok = (self.order, self.G, self.g1) self.param_enc = (self.order, self.G, self.v1[0], self.v1[1]) self.Sig_auth = BSPS() self.Sig_prov = USPS() self.sok = SOK() self.CSEnc = CSEnc()
def get_data_loader(ds_name, root_path, batch_size=32, tfs=None, train_flag=True, dss_name=None): global _DS_NAME, _DSS_NAME if ds_name not in _DS_NAME: raise Exception('Unsupported data set') if ds_name == 'mnist': # ds = datasets.MNIST(os.path.join(root_path, ds_name), train=train_flag, transform=tfs, download=True) if train_flag: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/training/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "_png/training/"), transform=tfs) else: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/testing/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "_png/testing/"), transform=tfs) elif ds_name == 'svhn': # ds = datasets.SVHN(os.path.join(root_path, ds_name), split='train' if train_flag else 'test', # transform=tfs, download=True) if train_flag: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/training/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "_png/training/"), transform=tfs) else: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/testing/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "_png/testing/"), transform=tfs) elif ds_name == 'svhn_transfer': # ds = datasets.SVHN(os.path.join(root_path, ds_name), split='train' if train_flag else 'test', # transform=tfs, download=True) if train_flag: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/training/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "/training/"), transform=tfs) else: # ds = datasets.ImageFolder(os.path.join(root_path, ds_name + "_png/testing/"), transform=tfs) ds = ImageFolderWithPaths(os.path.join(root_path, ds_name + "/testing/"), transform=tfs) elif ds_name == 'usps': ds = USPS(root_path, train=train_flag, transform=tfs, download=True) elif ds_name == 'cifar': ds = datasets.CIFAR10(os.path.join(root_path, ds_name), train=train_flag, transform=tfs, download=True) elif ds_name == 'office': # Modification may be required, rewrite it via OFFICE class assert dss_name is not None and dss_name in _DSS_NAME tmp_path = os.path.join(root_path, ds_name) tmp_path = os.path.join(tmp_path, 'train' if train_flag else 'test') ds = datasets.ImageFolder(os.path.join(tmp_path, dss_name), transform=tfs) return DataLoader(ds, batch_size=batch_size, shuffle=True)
tf.app.flags.DEFINE_integer('log_step', 10000, 'Logging period in terms of iteration') NUM_CLASSES = 10 TRAIN_FILE = 'mnist' TEST_FILE = 'usps' print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE np.random.seed(hash('mnist2000_usps1800') & 0xffffffff) mnist_select = np.random.choice(60000, size=2000, replace=False) usps_select = np.random.choice(7291, size=1800, replace=False) TRAIN = MNIST('data/mnist', select=mnist_select, split='train', shuffle=True) VALID = USPS('data/usps', select=usps_select, split='train', shuffle=True) TEST = USPS('data/usps', select=usps_select, split='train', shuffle=False) FLAGS = tf.app.flags.FLAGS MAX_STEP = 10000 def decay(start_rate, epoch, num_epochs): return start_rate / pow(1 + 0.001 * epoch, 0.75) def adaptation_factor(x): #return 1.0 #return 0.25 den = 1.0 + math.exp(-10 * x) lamb = 2.0 / den - 1.0
TRAIN_FILE = 'usps' TEST_FILE = 'mnist' print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE print TRAIN_FILE + ' ---------------------------------------> ' + TEST_FILE ''' np.random.seed(hash('mnist')&0xffffffff) mnist_select=np.random.choice(60000,size=60000,replace=False) np.random.seed(hash('usps')&0xffffffff) svhn_select=np.random.choice(73257,size=73257,replace=False) ''' #TRAIN=SVHN('data/svhn',split='train',unk=False,shuffle=True) #VALID=MNIST('data/mnist',split='train',unk=True,shuffle=True) #TEST=MNIST('data/mnist',split='train',unk=True,shuffle=False) TRAIN = USPS('data/usps', split='train', unk=False, shuffle=True) VALID = MNIST('data/mnist', split='train', unk=True, shuffle=True) TEST = MNIST('data/mnist', split='train', unk=True, shuffle=False) FLAGS = tf.app.flags.FLAGS MAX_STEP = 10000 def decay(start_rate, epoch, num_epochs): return start_rate / pow(1 + 0.001 * epoch, 0.75) def adaptation_factor(x): #return 1.0 #return 0.25 den = 1.0 + math.exp(-10 * x)
elif dset_name == "mnist": dataset = torchvision.datasets.MNIST(root=dset_dir, train=True) print(list(dataset.train_data.size())) print(dataset.train_data.float().mean() / 255) print(dataset.train_data.float().std() / 255) elif dset_name == "mnistm": from mnistm import MNISTM dataset = MNISTM(root=dset_dir, train=True) print(list(dataset.train_data.size())) for dim in range(3): print(dim) print(dataset.train_data[:, :, :, dim].float().mean() / 255) print(dataset.train_data[:, :, :, dim].float().std() / 255) elif dset_name == "svhn": dataset = torchvision.datasets.SVHN(root=dset_dir, download=True, split='train') print(dataset.data.shape) print(dataset.data.mean(axis=(0, 2, 3)) / 255) print(dataset.data.std(axis=(0, 2, 3)) / 255) elif dset_name == "usps": from usps import USPS dataset = USPS(root=dset_dir, train=True, download=True) dataset = np.asarray([np.asarray(img) for img in dataset.images]) print(dataset.shape) print(dataset.mean() / 255) print(dataset.std() / 255)
def proof_usps_hidesigandsigner(self, M=[ b"Hello World!", b"Hello me!", b"Hello us!" ]): """" creates GS proof that sig verifies with pk, signature and first message secret""" #print("SPS: Prove") self.ExtGen() params = (self.G, self.order, self.g1, self.g2, self.e) sps = USPS() sk, pk = sps.keygen(params) gz, gr, pki = pk m = [] for i in range(len(M)): if type(M[i]) == bytes: m.append(self.G.hashG1(M[i])) elif type(M[i]) == G1Elem: m.append(M[i]) else: return 0, [] if len(m) < sps.n: for i in range(sps.nb_msg - len(m)): m.append(G1Elem.inf(self.G)) elif sps.nb_msg < len(m): return sig = sps.sign(params, sk, m) if sps.verify(params, pk, m, sig) == 0: print("Signature does not verify") return print(len(m)) X = [{ "type": "com", "value": sig[0] }, { "type": "com", "value": sig[1] }] B = [{ "type": "pub", "value": G2Elem.inf(self.G) }, { "type": "pub", "value": G2Elem.inf(self.G) }] A = [{ "type": "pub", "value": G1Elem.inf(self.G) }, { "type": "pub", "value": G1Elem.inf(self.G) }] Y = [{"type": "com", "value": gz}, {"type": "com", "value": gr}] for i in range(len(m)): if i == 0: X.append({"type": "pub", "value": m[i]}) else: X.append({"type": "com", "value": m[i]}) B.append({"type": "pub", "value": G2Elem.inf(self.G)}) for j in range(len(pki)): Y.append({"type": "com", "value": pki[j]}) A.append({"type": "pub", "value": G1Elem.inf(self.G)}) C = [] for i in range(len(X)): row = [] for j in range(len(Y)): var = Bn(0) if i == j: var = Bn(1) row.append(var) C.append(row) print(C) success, res = self.CommitProof_eq("PPE", X, B, A, Y, C, GTElem.zero(self.G)) verify = 0 if success: eq_type, X1, Y1, C1, T_eq, pi2_v1_ap, pi2_w1_ap, pi1_v2_ap, pi1_w2_ap = res pi2_v1, pi2_w1, pi1_v2, pi1_w2 = self.Randomize( eq_type, pi2_v1_ap, pi2_w1_ap, pi1_v2_ap, pi1_w2_ap) verify = self.Verify(eq_type, X1, Y1, C1, pi2_v1, pi2_w1, pi1_v2, pi1_w2) if verify: res = [[pi2_v1, pi2_w1, pi1_v2, pi1_w2], [eq_type, X1, Y1, C1, T_eq]] print(success, verify) print() return verify, res
class GSPO_user(): def __init__(self, params): self.P, self.crs, self.vkI, self.ek, self.H = params self.order, self.G, self.g1, self.g2, self.e = self.P _, self.v1, _, _, _, _ = self.crs self.param_sig = (self.order, self.G, self.g1, self.g2, self.e) self.param_sok = (self.order, self.G, self.g1) self.param_enc = (self.order, self.G, self.v1[0], self.v1[1]) self.Sig_auth = BSPS() self.Sig_prov = USPS() self.sok = SOK() self.CSEnc = CSEnc() def GKeyGen(self): #print("GSPO_user: GKeyGen") self.sku = self.order.random() def GDerive(self, pk): #print("GSPO_user: GDerive") h, vk, sig0 = pk if self.Sig_auth.verify(self.param_sig, self.vkI, [self.g1 * 0, h], vk, sig0): return h * self.sku def GAuthenticate(self, pk_x, pk_ux, m=""): #print("GSPO_user: Gauthenticate") h, vk, sig0 = pk_x if self.Sig_auth.verify(self.param_sig, self.vkI, [self.g1 * 0, h], vk, sig0) and pk_ux == h * self.sku: pi_ux = self.sok.prove(self.param_sok, pk_ux, h, self.sku, m) return pi_ux def GFinalize(self, auth, pki, m, sig_t, pkv, t): #print("GSPO_user: GFinalize") h_i, vk_i, sig0_i = pki h_v, vk_v, sig0_v = pkv sig_i, c_it = sig_t ti, sigt_i = c_it pk_ui = self.GDerive(pki) pk_uv = self.GDerive(pkv) bool_i0 = self.Sig_auth.verify(self.param_sig, self.vkI, [self.g1 * 0, h_i], vk_i, sig0_i) #print("bool_i0", bool_i0) bool_it = self.Sig_auth.verify(self.param_sig, self.vkI, [self.g1 * t, h_i], vk_i, sigt_i) #print("bool_it", bool_it) bool_v = self.Sig_auth.verify(self.param_sig, self.vkI, [self.g1 * 0, h_v], vk_v, sig0_v) #print("bool_v", bool_v) msg_sig_i = [pk_ui] msg_sig_i.extend(m) bool_sig_i = self.Sig_prov.verify(self.param_sig, vk_i, msg_sig_i, sig_i) #print("bool_sig_i", bool_sig_i) if bool_i0 and bool_it and bool_v and bool_sig_i and ti == t: #print("--- all signatures verify") c_pk, r = self.CSEnc.enc(self.param_enc, auth.ek, h_i) from proofs_aggreg import prepare_proofs verify, pi_iuv = prepare_proofs(auth, self.vkI, pki, pkv, m, sig_t, t, pk_ui, pk_uv, self.sku, c_pk, auth.ek, r) #authentication proof on pk_uv pi_uv = self.GAuthenticate(pkv, pk_uv, pi_iuv) return pk_uv, pi_uv, pi_iuv
train=False, download=True, transform=transforms.Compose([ transforms.Resize(32), transforms.RandomAffine(rot, translate=(trans, trans)), transforms.ToTensor(), transforms.Normalize((0.5, ), (0.5, )) ])), batch_size=64, shuffle=True) usps_tr = torch.utils.data.DataLoader(USPS( '../data', train=True, download=True, transform=transforms.Compose([ transforms.Resize(32), transforms.RandomAffine(rot, translate=(trans, trans)), transforms.ToTensor(), transforms.Normalize((0.5, ), (0.5, )) ])), batch_size=64, shuffle=True) usps_te = torch.utils.data.DataLoader(USPS( '../data', train=False, download=True, transform=transforms.Compose([ transforms.Resize(32), transforms.RandomAffine(rot, translate=(trans, trans)), transforms.ToTensor(),