def test_misleading_signature(): # Makes sure the verification code fails when there's no metadata, but # commas elsewhere in the visibility label table_prefix = 'table' num_tables = 5 pubkey, _ = SIGNATURES[0].test_keys() verifier = AccumuloVerifier(pubkey) for i in range(0, NUM_ITERS): conn = FakeConnection() tables = [] inputs = [(table_prefix + str(random.randint(0, num_tables)), _random_mutation( default_vis=('",%s,%s,"' % (str(random.randint(0, 10000000)), str(random.randint(0, 10000000)))))) for _ in range(SIZE)] for table, mutation in inputs: if not conn.table_exists(table): tables.append(table) conn.create_table(table) conn.write(table, mutation) for table in tables: for entry in conn.scan(table): try: verifier.verify_entry(entry) ok_(False, 'unsigned entry somehow verified') except VerificationException as ve: print ve.msg ok_(True, 'success')
def test_no_metadata(): # Makes sure the verification code fails when there's no metadata table_prefix = 'table' num_tables = 5 pubkey, _ = SIGNATURES[0].test_keys() verifier = AccumuloVerifier(pubkey) for i in range(0, NUM_ITERS): conn = FakeConnection() tables = [] inputs = [(table_prefix + str(random.randint(0, num_tables)), _random_mutation()) for _ in range(SIZE)] for table, mutation in inputs: if not conn.table_exists(table): tables.append(table) conn.create_table(table) conn.write(table, mutation) for table in tables: for entry in conn.scan(table): try: verifier.verify_entry(entry) ok_(False, 'unsigned entry somehow verified') except VerificationException: ok_(True, 'success')
def test_signer_id_and_table(): # Make sure writing with the signer ID and signing the table work together table_prefix = 'table' num_tables = 5 signers = dict( (sc.name + 'ID', (sc.test_keys()[0], sc)) for sc in SIGNATURES) verifier = AccumuloVerifier(DummySignaturePKI()) for i in range(0, NUM_ITERS): all_inputs = [] tables = [] conn = FakeConnection() for sc in SIGNATURES: pubkey, privkey = sc.test_keys() signer = AccumuloSigner(privkey, sig_f=sc, signerID=sc.name + 'ID') inputs = [(table_prefix + str(random.randint(0, num_tables)), _random_mutation()) for _ in range(SIZE)] all_inputs += inputs for table, mutation in inputs: if not conn.table_exists(table): tables.append(table) conn.create_table(table) signer.sign_mutation(mutation, table=table) conn.write(table, mutation) for table in tables: for entry in conn.scan(table): try: verifier.verify_entry(entry, table=table) except VerificationException as ve: ok_(False, 'entry failed to verify: %s' % ve.msg)
def test_acc_store(self): """ Test that the AccumuloAttrKeyStore correctly returns attributes and users on a small, hard-coded test case. """ conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) self.assertEqual(store.users_by_attribute('attr A'), ['user1']) self.assertEqual(set(store.users_by_attribute('attr B')), set(['user1', 'user2'])) self.assertEqual(store.users_by_attribute('attr C'), ['user2']) self.assertEqual(store.users_by_attribute('attr D'), ['user2']) self.assertEqual(set(store.attributes_by_user('user1')), set(['attr A', 'attr B'])) self.assertEqual(set(store.attributes_by_user('user2')), set(['attr B', 'attr C', 'attr D']))
def test_aliasing_acc(self): """ Make sure aliasing isn't a problem (mostly relevant for local maps, but testing it on AccumuloAttrKeyStore for completeness) """ # Problem is as follows: # 1) call x = users_by_attribute() # 2) call delete_user() # 3) use x conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) users = store.users_by_attribute('attr B') self.assertEqual(set(['user1', 'user2']), set(users)) store.delete_user('attr B', 'user1') store.delete_attr('user1', 'attr B') self.assertEqual(set(['user1', 'user2']), set(users))
def test_del_acc_store(self): """ Make sure AccumuloAttrKeyStores correctly delete users and attributes. """ conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) store.delete_user('attr B', 'user1') store.delete_attr('user1', 'attr B') self.assertEqual(store.users_by_attribute('attr A'), ['user1']) self.assertEqual(store.users_by_attribute('attr B'), ['user2']) self.assertEqual(store.users_by_attribute('attr C'), ['user2']) self.assertEqual(store.users_by_attribute('attr D'), ['user2']) self.assertEqual(store.attributes_by_user('user1'), ['attr A']) self.assertEqual(set(store.attributes_by_user('user2')), set(['attr B', 'attr C', 'attr D']))
def test_acc_store_many(self): """ Test that the AccumuloAttrKeyStore correctly returns attributes and users on randomly-generated data. """ for _ in xrange(self.num_iters): conn = FakeConnection() store = AccumuloAttrKeyStore(conn) users_by_attr = {} attrs_by_user = {} # Generate some random user IDs names = ('user'+str(random.randint(0,1000000000)) for _ in xrange(self.names)) for name in names: if name not in attrs_by_user: attrs_by_user[name] = set([]) name_batch = [] # Generate some random metadata metadatas = ('meta'+str(random.randint(0,1000000000)) for _ in xrange(self.metadatas)) for metadata in metadatas: # Generate some random attributes attrs = ('attr'+str(random.randint(0,1000000000)) for _ in xrange(self.attrs)) for attr in attrs: if attr not in users_by_attr: users_by_attr[attr] = set([]) users_by_attr[attr].add(name) attrs_by_user[name].add(attr) # Generate some random versions vers = (random.randint(0,1000000000) for _ in xrange(self.vers)) for vrs in vers: # Generate a random keywrap keywrap = 'key'+str(random.randint(0,1000000000)) info = KeyInfo(attr, vrs, metadata, keywrap, 0) name_batch.append(info) store.batch_insert(name, name_batch) for attr, users in users_by_attr.iteritems(): self.assertEqual(users, set(store.users_by_attribute(attr))) for user, attrs in attrs_by_user.iteritems(): self.assertEqual(attrs, set(store.attributes_by_user(user)))
def new(cls, elems, lbound, rbound, coin=BaseCoin(), conn_info=ConnInfo('localhost', 42424, 'root', 'secret'), table='__ADS_metadata___', elemclass=IntElem): """ Create a new skiplist that stores all of its data inside an Accumulo instance. Arguments: cls - the class implementing this class method elems - the elements to create the skiplist over lbound, rbound - the left and right boundary elements of the list coin - the source of randomness to use (see pace.ads.skiplist.coin) conn_info - how to connect to the Accumulo instance being used table - the name of the table to store the ADS in elemclass - the class to use to store the elements in the skiplist """ sl = cls(None, lbound, rbound, coin) if conn_info is not None: # For connecting to a live Accumulo instance host, port, user, password = conn_info conn = Accumulo(host=conn_info.host, port=conn_info.port, user=conn_info.user, password=conn_info.password) else: # For testing/debug conn = FakeConnection() sl.conn = conn sl.table = table sl.elemclass = elemclass if not conn.table_exists(table): conn.create_table(table) right = cls.nodeclass.newnode(sl, None, None, rbound, True) left = cls.nodeclass.newnode(sl, None, right, lbound, True) sl.root = left for elem in elems: sl.insert(elem) return sl
def _check_sign_table(sigClass, cfg_file): # Make sure including the table's name in the signature works correctly table_prefix = 'table' num_tables = 5 pubkey, privkey = sigClass.test_keys() for i in range(0, NUM_ITERS): conn = FakeConnection() conf = new_config(cfg_file, conn) signer = AccumuloSigner(privkey, sig_f=sigClass, conf=conf) verifier = AccumuloVerifier(pubkey, conf=conf) inputs = [(table_prefix + str(random.randint(0, num_tables)), _random_mutation()) for _ in range(SIZE)] conf.start_batch() for table, mutation in inputs: if not conn.table_exists(table): conn.create_table(table) signer.sign_mutation(mutation, table=table) conn.write(table, mutation) conf.end_batch() tables = set(table for table, _ in inputs) for table in tables: if table != '__metadata_table__': for entry in conn.scan(table): try: verifier.verify_entry(entry, table=table) except VerificationException as ve: print ve.msg errmsg = 'entry failed to verify' if ve.cell is not None: errmsg += ':\nrow: %s\nval: %s' % (ve.cell.row, ve.cell.val) ok_(False, errmsg) # reset the file so it can be reused in the next iteration cfg_file.seek(0)
def _check_sign_and_read(sigClass, cfg_file): # Make sure writing & reading a signature verifies correctly with no # extra features, using a FakeConn in place of a live Accumulo instance. table_prefix = 'table' num_tables = 5 pubkey, privkey = sigClass.test_keys() for i in range(0, NUM_ITERS): conn = FakeConnection() conf = new_config(cfg_file, conn) signer = AccumuloSigner(privkey, sig_f=sigClass, conf=conf) verifier = AccumuloVerifier(pubkey, conf=conf) inputs = [(table_prefix + str(random.randint(0, num_tables)), _random_mutation()) for _ in range(SIZE)] conf.start_batch() for table, mutation in inputs: if not conn.table_exists(table): conn.create_table(table) signer.sign_mutation(mutation) conn.write(table, mutation) conf.end_batch() tables = set(table for table, _ in inputs) for table in tables: if table != '__metadata_table__': for entry in conn.scan(table): try: verifier.verify_entry(entry) except VerificationException: ok_(False, 'entry failed to verify') # reset the file so it can be reused in the next iteration cfg_file.seek(0)
def test_empty_store(self): """ Make sure each attribute/user store correctly returns the empty list when appropriate. """ conn = FakeConnection() acc_store = AccumuloAttrKeyStore(conn) loc_attr_user_map = LocalAttrUserMap({}) loc_user_attr_map = LocalUserAttrMap({}) self.assertEqual(loc_attr_user_map.users_by_attribute('not found'), []) self.assertEqual(loc_user_attr_map.attributes_by_user('not found'), []) self.assertEqual(acc_store.users_by_attribute('not found'), []) self.assertEqual(acc_store.attributes_by_user('not found'), []) self.assertEqual(loc_attr_user_map.users_by_attribute(''), []) self.assertEqual(loc_user_attr_map.attributes_by_user(''), []) self.assertEqual(acc_store.users_by_attribute(''), []) self.assertEqual(acc_store.attributes_by_user(''), []) loc_attr_user_map2 = LocalAttrUserMap({'a' : []}) self.assertEqual(loc_attr_user_map2.users_by_attribute('a'), []) loc_user_attr_map2 = LocalUserAttrMap({'a' : []}) self.assertEqual(loc_user_attr_map2.attributes_by_user('a'), [])
def _run_search(self, config, row, cols, correct_cells): ''' Tests the encrypting search functionality ''' #create range & mutation to search for mut1 = Mutation('arow') mut1.put(cf='cf1', cq='cq1', cv='', ts=1, val='val1') mut1.put(cf='cf2', cq='cq2', cv='', ts=2, val='val2') mut1.put(cf='cf1', cq='cq1', cv='', ts=3, val='val3') mut1.put(cf='cf2', cq='cq3', cv='', ts=4, val='val4') mut1.put(cf='cf3', cq='cq4', cv='', ts=5, val='val5') mut2 = Mutation('brow') mut2.put(cf='cf1', cq='cq1', cv='', ts=6, val='val1') mut2.put(cf='cf2', cq='cq2', cv='', ts=7, val='val2') ae = AccumuloEncrypt(StringIO(config), self.pki) enc_muts1 = ae.encrypt(mut1) enc_muts2 = ae.encrypt(mut2) enc_row, enc_cols = ae.encrypt_search(row, cols) #write mutation along fake connection conn = FakeConnection() conn.create_table('enc_test') for mut in enc_muts1 + enc_muts2: conn.write('enc_test', mut) #retrieve encrypted mutation with search dec_cells = [] for c in conn.scan('enc_test', scanrange=Range(srow=enc_row, erow=enc_row, sinclude=True, einclude=True), cols=enc_cols): dec_cells.append(ae.decrypt(c)) self.assertEqual(sorted(dec_cells), sorted(correct_cells))
def test_with_accumulo_conn(self): ''' Tests the interplay with a fake accumulo connection ''' all_sections = '[row]\n'+\ 'key_id = table1\n'+\ 'encryption = Pycrypto_AES_CFB\n'+\ '[colQualifier]\n'+\ 'key_id = table1\n'+\ 'encryption = Pycrypto_AES_CFB\n'+\ '[colFamily]\n'+\ 'key_id = Pycrypto_AES_CFB\n'+\ 'encryption = Pycrypto_AES_CFB\n'+\ '[colVisibility]\n'+\ 'key_id = table1\n'+\ 'encryption = Pycrypto_AES_CFB\n'+\ '[value]\n'+\ 'key_id = Pycrypto_AES_CFB\n'+\ 'encryption = Pycrypto_AES_CFB' #create mutation mut = Mutation('row1') mut.put(cf='cf1', cq='cq1', cv='cv1', ts=12345, val='val1') mut.put(cf='cf2', cq='cq2', cv='', ts=67890, val='val2') ae = AccumuloEncrypt(StringIO(all_sections), self.pki) enc_muts = ae.encrypt(mut) #write mutation along fake connection conn = FakeConnection() conn.create_table('enc_test') conn.write('enc_test', enc_muts[0]) conn.write('enc_test', enc_muts[1]) #create ground truth conn.create_table('ground') conn.write('ground', mut) #retrieve encrypted mutation dec_cells = [] for c in conn.scan('enc_test'): dec_cells.append(ae.decrypt(c)) gt_cells = [] for c in conn.scan('ground'): gt_cells.append(c) self.assertEqual(sorted(gt_cells), sorted(dec_cells))
def _attr_gen(): conn = FakeConnection() return AccumuloAttrKeyStore(conn)
def _acc_gen(): conn = FakeConnection() return AccumuloKeyStore(conn)
def __init__(self, conn=None, terms=None): """ Arguments: conn - connection to AccumuloKeyStore. Can be connection to a live Accumulo instance. Defaults to a FakeConnection() terms - (list) attributes to insert keys for - can be 'a'-'e'. Allows for the creation of a 'limited' PKI for demos and testing. Defaults to ['a','b',c','d','e'] Note: For key names we recommend to use a combination of the algorithm being used and the specific table name, along the lines of AES_CBC__table__, not the names listed below. """ # Initialize the new FakeConnection here so Python doesn't # create a new hidden global variable for the default argument if conn is None: conn = FakeConnection() if terms is None: terms = ['a', 'b', 'c', 'd', 'e'] SYM_KEYS_TO_INSERT = { "table1": [(1, b'Sixteen by1e key')], "Pycrypto_AES_CFB": [(1, b'Sixteen by1e key'), (2, b'Sixteen by2e key'), (3, b'Sixteen by3e key')], "Pycrypto_AES_CBC": [(1, b'Sixteen bb1e key')], "Pycrypto_AES_OFB": [(1, b'Sixteen bc1e key'), (2, b'Sixteen bc2e key'), (3, b'Sixteen bc3e key')], "Pycrypto_AES_CTR": [(1, b'Sixteen bd1e key')], "Pycrypto_AES_GCM": [(1, b'Sixteen be1e key'), (2, b'Sixteen be2e key')], "Pycrypto_AES_SIV": [(1, b'Sixteen byte keySixteen byte key')] } ATTR_KEYS_TO_INSERT = { "VIS_Identity": [('a', 1, b'Sixteen bate k1y'), ('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('c', 1, b'Sixteen bcte key'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('d', 3, b'Sixteen bdte k3y'), ('d', 4, b'Sixteen bdte k4y'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CFB": [('a', 1, b'Sixteen bate key'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 1, b'Sixteen bcte key'), ('d', 2, b'Sixteen bdte key'), ('d', 3, b'Sixteen bdte key'), ('d', 4, b'Sixteen bdte key'), ('d', 5, b'Sixteen bdte key'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CBC": [('a', 1, b'Sixteen bate k1y'), ('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('a', 5, b'Sixteen bate k5y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 3, b'Sixteen bcte k3y'), ('c', 4, b'Sixteen bcte k4y'), ('c', 5, b'Sixteen bcte k5y'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('e', 1, b'Sixteen bete key')], "VIS_AES_OFB": [('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('a', 4, b'Sixteen bate k4y'), ('a', 5, b'Sixteen bate k5y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('b', 4, b'Sixteen bbte k4y'), ('c', 2, b'Sixteen bcte k2y'), ('c', 3, b'Sixteen bcte k3y'), ('d', 2, b'Sixteen bdte key'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CTR": [('a', 1, b'Sixteen bate k1y'), ('a', 3, b'Sixteen bate k3y'), ('a', 4, b'Sixteen bate k4y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k3y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 2, b'Sixteen bcte key'), ('d', 1, b'Sixteen bdte key'), ('e', 3, b'Sixteen bete k3y'), ('e', 5, b'Sixteen bete k5y')], "VIS_AES_GCM": [('a', 1, b'Sixteen bate key'), ('b', 2, b'Sixteen bbte key'), ('c', 1, b'Sixteen bcte k1y'), ('c', 2, b'Sixteen bcte k2y'), ('c', 3, b'Sixteen bcte k3y'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('d', 4, b'Sixteen bdte k4y'), ('e', 5, b'Sixteen bete key')] } #remove existing symmetric key tables for metadata in SYM_KEYS_TO_INSERT.keys(): if conn.table_exists(metadata): conn.delete_table(metadata) #remove existing attribute key tables for metadata in ATTR_KEYS_TO_INSERT.keys(): if conn.table_exists(metadata): conn.delete_table(metadata) #generate RSA key RSA_key = RSA.generate(3072) super(DummyEncryptionPKI, self).__init__(conn, 'one', RSA_key) #add symmetric keys for (algorithm, keys) in SYM_KEYS_TO_INSERT.iteritems(): for ver, key in keys: self._acc_keystore.insert( str(self._user_id), KeyInfo(attr='', vers=ver, metadata=algorithm, keywrap=key_utils.wrap_key(key, self._rsa_key), keylen=len(key))) #add attribute keys keys_to_insert = [] for (algorithm, keys) in ATTR_KEYS_TO_INSERT.iteritems(): for attr, vers, key in keys: if attr in terms: keys_to_insert.append( KeyInfo(attr=attr, vers=vers, metadata=algorithm, keywrap=key_utils.wrap_key(key, self._rsa_key), keylen=len(key))) self._acc_keystore.batch_insert(str(self._user_id), keys_to_insert)