def _check_avoid_aliasing(self, ks): """ Make sure values returned by key stores don't magically change when their key store changes (mostly an issue for the dummy implementation) """ # The dangerous set of operations is as follows: # 1) call x = get_metadatas() # 2) call remove_revoked_keys() # 3) interact with x keys = [ KeyInfo('A', 1, 'metadata', 'wrap1', 0), KeyInfo('A', 2, 'metadata', 'wrap2', 0), KeyInfo('A', 3, 'metadata', 'wrap3', 0), KeyInfo('A', 4, 'metadata', 'wrap4', 0), KeyInfo('A', 1, 'betadata', 'wrap5', 0), KeyInfo('A', 2, 'betadata', 'wrap6', 0), KeyInfo('A', 3, 'betadata', 'wrap7', 0), KeyInfo('B', 1, 'metadata', 'wrap8', 0), KeyInfo('B', 2, 'metadata', 'wrap9', 0), KeyInfo('B', 3, 'metadata', 'wrap0', 0) ] ks.batch_insert('user1', keys) ks.batch_insert('user2', keys) ks.batch_insert('user3', keys) metas = ks.get_metadatas('user1', 'A') self.assertEqual(set(metas), set(['metadata', 'betadata'])) ks.remove_revoked_keys('user1', 'metadata', 'A') self.assertEqual(set(metas), set(['metadata', 'betadata']))
def _check_repeat_cell_key(self, ks): """ Make sure the second key is returned after two consecutive writes to the same ID and metadata. Arguments: self - a simulator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ ks.insert('test', KeyInfo('', 1, 'metadata', 'keywrap1', 0)) ks.insert('test', KeyInfo('', 1, 'metadata', 'keywrap2', 0)) wrap = ks.retrieve('test', '', 1, 'metadata') self.assertEqual(wrap, 'keywrap2')
def _check_writes_reads(self, ks_gen): """ Make sure reads & writes work with iterated, random data. Arguments: self - a simulator of a test object (see DummyTest above) ks_gen - a function of no arguments that, when called, generates a fresh object matching the AbstractKeyStore interface """ for _ in xrange(ITERS): values = [] ks = ks_gen() attr = '' # no attr vers = 1 # static version # Generate some random user IDs names = ('user' + str(random.randint(0, 1000000000)) for _ in xrange(NAMES)) for name in names: # Generate some random metadata metadatas = ('meta' + str(random.randint(0, 1000000000)) for _ in xrange(METADATAS)) for metadata in metadatas: # Generate a random keywrap keywrap = 'key' + str(random.randint(0, 1000000000)) values.append(((name, metadata), keywrap)) ks.insert(name, KeyInfo(attr, vers, metadata, keywrap, 0)) for ((name, metadata), keywrap) in values: self.assertEqual(ks.retrieve(name, attr, vers, metadata), keywrap)
def _check_not_found(self, ks): """ Make sure a key store without an element in it raises an exception Arguments: self - a simulator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ try: ks.insert('user', KeyInfo('invalid', '1', 'version', 'string', 0)) self.assertTrue(False, 'failed to raise error') except PKIStorageError: pass try: ks.retrieve('user', 0, 'not', 'found') self.assertTrue(False, 'failed to raise error') except PKILookupError: pass try: ks.retrieve('user', 0, 'still_not', 'found') self.assertTrue(False, 'failed to raise error') except PKILookupError: pass # Try looking for non-existent data in a real table ks.insert('user', KeyInfo('attr', 0, 'meta', 'keywrap', 0)) try: ks.retrieve('user', 'newattr', 12, 'meta') self.assertTrue(False, 'failed to raise error') except PKILookupError: pass # Try fetching the latest version of a nonexistent key try: ks.retrieve_latest_version('nope', 'not', 'found') self.assertTrue(False, 'failed to raise error') except PKILookupError: pass try: ks.retrieve_latest_version_number('not', 'found') self.assertTrue(False, 'failed to raise error') except PKILookupError: pass
def _check_batch_insert_double(self, ks): """ Check that batch adding of attribute keys works with two elements. Arguments: self - a simulator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ keys = [ KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0) ] ks.batch_insert('test', keys) wrap = ks.retrieve('test', 'attr A', 1, 'metadata') self.assertEqual(wrap, 'keywrap') wrap = ks.retrieve('test', 'attr A', 2, 'metadata') self.assertEqual(wrap, 'keywarp')
def _check_remkeys_multi_meta(self, ks): """ Make sure basic key removal functionality works when there are multiple metadatas. """ keys = [ KeyInfo('A', 1, 'metadata', 'wrap1', 0), KeyInfo('A', 2, 'metadata', 'wrap2', 0), KeyInfo('A', 3, 'metadata', 'wrap3', 0), KeyInfo('A', 4, 'metadata', 'wrap4', 0), KeyInfo('A', 1, 'betadata', 'wrap5', 0), KeyInfo('A', 2, 'betadata', 'wrap6', 0), KeyInfo('A', 3, 'betadata', 'wrap7', 0) ] ks.batch_insert('user', keys) ks.remove_revoked_keys('user', 'metadata', 'A') try: res = ks.batch_retrieve('user', 'metadata', 'A') self.assertTrue(False, 'Should fail to retrieve deleted keys') except PKILookupError: pass res = ks.batch_retrieve('user', 'betadata', 'A') self.assertEqual(set(res), set(keys[4:]))
def _check_empty_fields(self, ks): """ Make sure key stores work with empty strings. Arguments: self - a simulator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ ks.insert('', KeyInfo('', 0, '', '', 0)) wrap = ks.retrieve('', '', 0, '') self.assertEqual(wrap, '')
def _check_remkeys(self, ks): """ Make sure basic key removal functionality works. """ keys = [ KeyInfo('A', 1, 'metadata', 'wrap1', 0), KeyInfo('A', 2, 'metadata', 'wrap2', 0), KeyInfo('A', 3, 'metadata', 'wrap3', 0), KeyInfo('A', 4, 'metadata', 'wrap4', 0) ] ks.batch_insert('user', keys) ks.remove_revoked_keys('user', 'metadata', 'A') try: res = ks.batch_retrieve('user', 'metadata', 'A') self.assertTrue(False, 'Should fail to retrieve deleted keys') except PKILookupError: pass
def _check_most_recent_num(self, ks): keys = [ KeyInfo('A', 1, 'metadata', 'wrap1', 0), KeyInfo('A', 2, 'metadata', 'wrap2', 0), KeyInfo('A', 3, 'metadata', 'wrap3', 0), KeyInfo('A', 4, 'metadata', 'wrap4', 0), KeyInfo('A', 5, 'metadata', 'wrap5', 0), KeyInfo('A', 6, 'metadata', 'wrap6', 0), KeyInfo('A', 7, 'metadata', 'wrap7', 0), KeyInfo('A', 8, 'metadata', 'wrap8', 0), KeyInfo('A', 9, 'metadata', 'wrap9', 0), KeyInfo('A', 10, 'metadata', 'wrap10', 0) ] random.shuffle(keys) ks.batch_insert('user', keys) vers = ks.retrieve_latest_version_number(metadata='metadata', attr='A') self.assertEqual(vers, 10)
def _check_write_read_attr(self, ks): """ Make sure that when we write something, we can read it back. Arguments: self - a simulator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ ks.insert('test', KeyInfo('attr A', 1, 'metadata', 'keywrap', 0)) wrap = ks.retrieve('test', 'attr A', 1, 'metadata') self.assertEqual(wrap, 'keywrap')
def test_acc_store_many(self): """ Test that the AccumuloAttrKeyStore correctly returns attributes and users on randomly-generated data. """ for _ in xrange(self.num_iters): conn = FakeConnection() store = AccumuloAttrKeyStore(conn) users_by_attr = {} attrs_by_user = {} # Generate some random user IDs names = ('user'+str(random.randint(0,1000000000)) for _ in xrange(self.names)) for name in names: if name not in attrs_by_user: attrs_by_user[name] = set([]) name_batch = [] # Generate some random metadata metadatas = ('meta'+str(random.randint(0,1000000000)) for _ in xrange(self.metadatas)) for metadata in metadatas: # Generate some random attributes attrs = ('attr'+str(random.randint(0,1000000000)) for _ in xrange(self.attrs)) for attr in attrs: if attr not in users_by_attr: users_by_attr[attr] = set([]) users_by_attr[attr].add(name) attrs_by_user[name].add(attr) # Generate some random versions vers = (random.randint(0,1000000000) for _ in xrange(self.vers)) for vrs in vers: # Generate a random keywrap keywrap = 'key'+str(random.randint(0,1000000000)) info = KeyInfo(attr, vrs, metadata, keywrap, 0) name_batch.append(info) store.batch_insert(name, name_batch) for attr, users in users_by_attr.iteritems(): self.assertEqual(users, set(store.users_by_attribute(attr))) for user, attrs in attrs_by_user.iteritems(): self.assertEqual(attrs, set(store.attributes_by_user(user)))
def test_acc_store(self): """ Test that the AccumuloAttrKeyStore correctly returns attributes and users on a small, hard-coded test case. """ conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) self.assertEqual(store.users_by_attribute('attr A'), ['user1']) self.assertEqual(set(store.users_by_attribute('attr B')), set(['user1', 'user2'])) self.assertEqual(store.users_by_attribute('attr C'), ['user2']) self.assertEqual(store.users_by_attribute('attr D'), ['user2']) self.assertEqual(set(store.attributes_by_user('user1')), set(['attr A', 'attr B'])) self.assertEqual(set(store.attributes_by_user('user2')), set(['attr B', 'attr C', 'attr D']))
def test_del_acc_store(self): """ Make sure AccumuloAttrKeyStores correctly delete users and attributes. """ conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) store.delete_user('attr B', 'user1') store.delete_attr('user1', 'attr B') self.assertEqual(store.users_by_attribute('attr A'), ['user1']) self.assertEqual(store.users_by_attribute('attr B'), ['user2']) self.assertEqual(store.users_by_attribute('attr C'), ['user2']) self.assertEqual(store.users_by_attribute('attr D'), ['user2']) self.assertEqual(store.attributes_by_user('user1'), ['attr A']) self.assertEqual(set(store.attributes_by_user('user2')), set(['attr B', 'attr C', 'attr D']))
def test_aliasing_acc(self): """ Make sure aliasing isn't a problem (mostly relevant for local maps, but testing it on AccumuloAttrKeyStore for completeness) """ # Problem is as follows: # 1) call x = users_by_attribute() # 2) call delete_user() # 3) use x conn = FakeConnection() store = AccumuloAttrKeyStore(conn) keys1 = [KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0)] store.batch_insert('user1', keys1) keys2 = [KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0), KeyInfo('attr C', 12, 'metadata', 'otherwrap', 0), KeyInfo('attr D', 10, 'meatdata', 'newwrap', 0)] store.batch_insert('user2', keys2) users = store.users_by_attribute('attr B') self.assertEqual(set(['user1', 'user2']), set(users)) store.delete_user('attr B', 'user1') store.delete_attr('user1', 'attr B') self.assertEqual(set(['user1', 'user2']), set(users))
def _check_batch_insert_many(self, ks_gen): """ Check that batch adding attribute keys works for randomly generated data. Arguments: self - a simulator of a test object (see DummyTest above) ks_gen - a function of no arguments that, when called, generates a fresh object matching the AbstractKeyStore interface """ for _ in xrange(ITERS): values = [] ks = ks_gen() # Generate some random user IDs names = ('user' + str(random.randint(0, 1000000000)) for _ in xrange(NAMES)) for name in names: name_batch = [] # Generate some random metadata metadatas = ('meta' + str(random.randint(0, 1000000000)) for _ in xrange(METADATAS)) for metadata in metadatas: # Generate some random attributes attrs = ('attr' + str(random.randint(0, 1000000000)) for _ in xrange(ATTRS)) for attr in attrs: # Generate some random versions vers = (random.randint(0, 1000000000) for _ in xrange(VERS)) for vrs in vers: # Generate a random keywrap keywrap = 'key' + str(random.randint(0, 1000000000)) info = KeyInfo(attr, vrs, metadata, keywrap, 0) values.append((name, info)) name_batch.append(info) ks.batch_insert(name, name_batch) for (usr, keyinfo) in values: self.assertEqual( ks.retrieve(usr, keyinfo.attr, keyinfo.vers, keyinfo.metadata), keyinfo.keywrap)
def retrieve_info(self, userid, attr, vers, metadata): """ Attempt to retrieve a wrapped key from the key store. Arguments: self - the KeyStore object being retrieved from userid : string - the ID of the user for whom the key is wrapped attr : string - the attribute for the key being retrieved, if any. If this field is the empty string '', it denotes that this is not an attribute key. vers : string - the version identifier for the key metadata : string - metadata about the key (e.g. the mode of operation with which it is intended to be used) Returns: keyinfo - the KeyInfo object corresponding to the provided values Raises: PKILookupError - when no key is found """ if type(vers) is not IntType: raise PKILookupError('version to search for must be an integer') tabname = metadata if not self.conn.table_exists(tabname): raise PKILookupError('No such table %s' %tabname) row = userid cf = attr cq = str(vers) cell = get_single_entry(self.conn, tabname, row=row, cf=cf, cq=cq) if cell is not None: keywrap, raw_keylen = cell.val.rsplit(',', 1) try: keylen = int(raw_keylen) except ValueError: raise PKILookupError('Error: found non-integer key length') return KeyInfo(attr, vers, metadata, keywrap, keylen) else: raise PKILookupError('No keywrap found')
def _check_overlap_values_attr(self, ks_gen): """ Make sure elements with some of the same identifying information (user ID, metadata, attribute, etc) are still correctly stored. Includes attribute keys. Arguments: self - a simulator of a test object (see DummyTest above) ks_gen - a function of no arguments that, when called, generates a fresh object matching the AbstractKeyStore interface """ for _ in xrange(ITERS): values = [] ks = ks_gen() # Generate some random user IDs names = [ 'user' + str(random.randint(0, 1000000000)) for _ in xrange(NAMES) ] # Generate some random metadata metadatas = [ 'meta' + str(random.randint(0, 1000000000)) for _ in xrange(METADATAS) ] # Generate some random attrs attrs = [ 'attr' + str(random.randint(0, 1000000000)) for _ in xrange(ATTRS) ] # Generate some random versions verss = [random.randint(0, 1000000000) for _ in xrange(VERS)] for name in names: for metadata in metadatas: for attr in attrs: for vrs in verss: keywrap = 'key_' + name + metadata + attr + str(vrs) values.append((name, attr, vrs, metadata, keywrap)) ks.insert(name, KeyInfo(attr, vrs, metadata, keywrap, 0)) for (name, attr, vrs, metadata, keywrap) in values: self.assertEqual(ks.retrieve(name, attr, vrs, metadata), keywrap)
def _check_most_recent_num_many(self, ks_gen): user = '******' metadata = 'metadata' attr = 'A' for _ in xrange(ITERS): ks = ks_gen() versions = [] for _ in xrange(50): vers = random.randint(0, 10000000000000) keywrap = 'wrap' + str(vers) versions.append(vers) ks.insert(user, KeyInfo(attr, vers, metadata, keywrap, 0)) vers = ks.retrieve_latest_version_number(metadata, attr) self.assertEqual(vers, max(versions))
def initialize_users(self, users, keystore): """ Generates users' keys, wraps them with their public keys, and stores the keywraps and associated info in the key store. Arguments: users ({string: (_RSAobj, [(string, string, string, integer)])}) - a dictionary mapping user IDs to (RSA_pk, info) tuples, where RSA_pk is the user's RSA public key, and info is a list of (attr, vers, metadata, keylen) tuples describing the attribute, version, metadata, and key length (in bytes) of the keys to generate, wrap, and store. Note: attr, vers, and metadata strings must not contain the '|' character. keystore (AbstractKeyStore) - the key store to be written to """ for userid, (RSA_pk, info) in users.iteritems(): keywraps = [] for attr, vers, metadata, keylen in info: sk = self._generate_key(attr, vers, metadata, keylen) keywrap = utils.wrap_key(sk, RSA_pk) keywraps.append(KeyInfo(attr, vers, metadata, keywrap, keylen)) keystore.batch_insert(userid, keywraps)
def _check_overlap_values(self, ks_gen): """ Make sure elements with some of the same identifying information (user ID, metadata, etc) are still correctly stored. Arguments: self - a simulator of a test object (see DummyTest above) ks_gen - a function of no arguments that, when called, generates a fresh object matching the AbstractKeyStore interface """ for _ in xrange(ITERS): values = [] ks = ks_gen() attr = '' vers = 1 # Generate some random user IDs names = [ 'key' + str(random.randint(0, 1000000000)) for _ in xrange(NAMES) ] # Generate some random metadata metadatas = [ 'meta' + str(random.randint(0, 1000000000)) for _ in xrange(METADATAS) ] for name in names: for metadata in metadatas: # Generate a random keywrap keywrap = 'key' + str(random.randint(0, 1000000000)) values.append((name, metadata, keywrap)) ks.insert(name, KeyInfo(attr, vers, metadata, keywrap, 0)) for (name, metadata, keywrap) in values: self.assertEqual(ks.retrieve(name, attr, vers, metadata), keywrap)
def _check_batch_insert_several(self, ks_gen): """ Check that batch adding of attribute keys works with several related elements with a few edge cases. Arguments: self - a simulator of a test object (see DummyTest above) ks_gen - a function of no arguments that, when called, generates a fresh object matching the AbstractKeyStore interface """ # Case 1: two attr A keys, one attr B key ks = ks_gen() keys = [ KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 23, 'meatdata', 'wheycap', 0) ] ks.batch_insert('test', keys) wrap = ks.retrieve('test', 'attr A', 1, 'metadata') self.assertEqual(wrap, 'keywrap') wrap = ks.retrieve('test', 'attr A', 2, 'metadata') self.assertEqual(wrap, 'keywarp') wrap = ks.retrieve('test', 'attr B', 23, 'meatdata') self.assertEqual(wrap, 'wheycap') # Case 2: attr B key has the same version string as an attr A key ks = ks_gen() keys = [ KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 1, 'metadata', 'newwrap', 0) ] ks.batch_insert('test', keys) wrap = ks.retrieve('test', 'attr A', 1, 'metadata') self.assertEqual(wrap, 'keywrap') wrap = ks.retrieve('test', 'attr A', 2, 'metadata') self.assertEqual(wrap, 'keywarp') wrap = ks.retrieve('test', 'attr B', 1, 'metadata') self.assertEqual(wrap, 'newwrap')
def _check_get_metas_remove(self, ks): """ Test interaction of get_metadatas and remove_revoked_keys """ keys = [ KeyInfo('A', 1, 'meta1', 'wrap1', 0), KeyInfo('A', 1, 'meta1', 'wrap2', 0), KeyInfo('A', 1, 'meta2', 'wrap3', 0), KeyInfo('A', 1, 'meta2', 'wrap4', 0), KeyInfo('B', 1, 'meta1', 'wrap5', 0), KeyInfo('B', 1, 'meta1', 'wrap6', 0) ] ks.batch_insert('user1', keys) ks.batch_insert('user2', keys) metas1a = ks.get_metadatas('user1', 'A') metas2a = ks.get_metadatas('user2', 'A') metas1b = ks.get_metadatas('user1', 'B') metas2b = ks.get_metadatas('user2', 'B') self.assertEqual(metas1a, set(['meta' + str(i) for i in xrange(1, 3)])) self.assertEqual(metas2a, set(['meta' + str(i) for i in xrange(1, 3)])) self.assertEqual(metas1b, set(['meta' + str(i) for i in xrange(1, 2)])) self.assertEqual(metas2b, set(['meta' + str(i) for i in xrange(1, 2)])) # Remove a set of revoked keys and make sure the metadata correctly # reflects this change ks.remove_revoked_keys('user1', 'meta1', 'A') metas1a = ks.get_metadatas('user1', 'A') metas2a = ks.get_metadatas('user2', 'A') metas1b = ks.get_metadatas('user1', 'B') metas2b = ks.get_metadatas('user2', 'B') self.assertEqual(metas1a, set(['meta' + str(i) for i in xrange(2, 3)])) self.assertEqual(metas2a, set(['meta' + str(i) for i in xrange(1, 3)])) self.assertEqual(metas1b, set(['meta' + str(i) for i in xrange(1, 2)])) self.assertEqual(metas2b, set(['meta' + str(i) for i in xrange(1, 2)]))
def _check_get_metas(self, ks): """ Test basic get_metadatas functionality """ # Test 1: one user, one metadata keys = [ KeyInfo('A', 1, 'meta1', 'wrap1', 0), KeyInfo('A', 1, 'meta2', 'wrap2', 0), KeyInfo('A', 1, 'meta3', 'wrap3', 0), KeyInfo('A', 1, 'meta4', 'wrap4', 0) ] ks.batch_insert('user1', keys) metas = ks.get_metadatas('user1', 'A') self.assertEqual(metas, set(['meta' + str(i) for i in xrange(1, 5)])) # Test 2: add another user ks.batch_insert('user2', keys) metas1 = ks.get_metadatas('user1', 'A') metas2 = ks.get_metadatas('user2', 'A') # Make sure old metadata info is unchanged self.assertEqual(metas1, set(['meta' + str(i) for i in xrange(1, 5)])) self.assertEqual(metas2, set(['meta' + str(i) for i in xrange(1, 5)])) # Test 3: add another attribute keys = [ KeyInfo('B', 1, 'meta1', 'wrap1', 0), KeyInfo('B', 1, 'meta2', 'wrap2', 0) ] ks.batch_insert('user1', keys) metas = ks.get_metadatas('user1', 'B') self.assertEqual(metas, set(['meta' + str(i) for i in xrange(1, 3)]))
def batch_retrieve(self, userid, metadata, attr=None): """ Fetch all of a user's keys at once. Optionally, fetch only their keys either for a specified attribute or with no attribute at all. Arguments: self - the KeyStore object being read from userid : string - the ID of the user whose keys to fetch metadata : string - the metadata of the keys to search for attr : optional string - the attribute to search for. Default value: None. If this argument is None, this method should return all of the given user's keys. If this argument is the empty string, it should return that user's non-attribute keys. If this argument is a non-empty string, it should return all of that user's keys for that attribute, including all versions and metadata options. Returns: [KeyInfo] - a non-empty list of KeyInfo objects (that is, (attr, vers, metadata, keywrap, keylen) tuples). If the attr argument was None, the attr field of each tuple will be the attribute corresponding to the returned version, metadata, and keywrap; otherwise, the attr field of each tuple will be equal to the attr argument. Raises: PKILookupError - if there is no information to be returned """ tabname = metadata if not self.conn.table_exists(tabname): raise PKILookupError('Error: no such table %s' %tabname) if attr is None: # Get everything! scan_range = Range(srow=userid, erow=userid) else: # Get only things from the corresponding row scan_range = Range(srow=userid, erow=userid, scf=attr, ecf=attr) ret = [] for c in self.conn.scan(tabname, scan_range): try: vers = int(c.cq) except ValueError: raise PKILookupException('Retrieved version must be int') keywrap, raw_keylen = c.val.rsplit(',', 1) try: keylen = int(raw_keylen) except ValueError: raise PKILookupError('Error: found non-integer key length') ret.append(KeyInfo(metadata=metadata, attr=c.cf, vers=vers, keywrap=keywrap, keylen=keylen)) if not ret: # If we found no elements, that's an error raise PKILookupError( 'Error: no results found for batch key retrieval') else: return ret
def test_revocation_all_attrs(self): """ Tests that revoking all of a user's attributes removes the appropriate entries from the keystore and the user/attr maps and generates wraps of new keys for the correct set of users. """ keygen = KeyGen('Sixteen byte key') ks = DummyKeyStore() attr_user_dict = { 'A': ['user1', 'user2', 'user3'], 'B': ['user1', 'user2'], 'C': ['user1', 'user3'] } attr_user_map = LocalAttrUserMap(attr_user_dict) user_attr_dict = { 'user1': ['A', 'B', 'C'], 'user2': ['A', 'B'], 'user3': ['A', 'C'] } user_attr_map = LocalUserAttrMap(user_attr_dict) user_sks = {} user_pks = {} for i in range(1, 4): userid = 'user' + str(i) RSA_key = RSA.generate(3072) user_sks[userid] = RSA_key RSA_pk = RSA_key.publickey() user_pks[userid] = RSA_pk key_infos = { 'user1': [ KeyInfo('A', 1, 'meta1', 'keywrap1', 16), KeyInfo('A', 1, 'meta2', 'keywrap2', 16), KeyInfo('B', 1, 'meta1', 'keywrap4', 16), KeyInfo('B', 1, 'meta2', 'keywrap5', 16), KeyInfo('C', 1, 'meta1', 'keywrap6', 16), KeyInfo('C', 1, 'meta2', 'keywrap7', 16) ], 'user2': [ KeyInfo('A', 1, 'meta2', 'keywrap8', 16), KeyInfo('A', 1, 'meta3', 'keywrap9', 16), KeyInfo('B', 1, 'meta1', 'keywrap10', 16), KeyInfo('B', 1, 'meta2', 'keywrap11', 16) ], 'user3': [ KeyInfo('A', 1, 'meta1', 'keywrap12', 16), KeyInfo('A', 1, 'meta2', 'keywrap13', 16), KeyInfo('A', 1, 'meta3', 'keywrap14', 16), KeyInfo('C', 1, 'meta1', 'keywrap15', 16), KeyInfo('C', 1, 'meta2', 'keywrap16', 16) ] } for user in key_infos: ks.batch_insert(user, key_infos[user]) #revoke all attributes from a user keygen.revoke_all_attrs('user1', ks, attr_user_map, user_attr_map, user_pks) #check that keywraps for revoked user were removed from keystore for i in range(1, 3): self.assertRaises(PKILookupError, ks.retrieve, 'user1', 'A', 1, 'meta' + str(i)) self.assertRaises(PKILookupError, ks.retrieve, 'user1', 'B', 1, 'meta' + str(i)) self.assertRaises(PKILookupError, ks.retrieve, 'user1', 'C', 1, 'meta' + str(i)) #check that updated user/attr maps are correct self.assertEqual(set(attr_user_map.users_by_attribute('A')), set(['user2', 'user3'])) self.assertEqual(attr_user_map.users_by_attribute('B'), ['user2']) self.assertEqual(attr_user_map.users_by_attribute('C'), ['user3']) self.assertEqual(user_attr_map.attributes_by_user('user1'), []) self.assertEqual(set(user_attr_map.attributes_by_user('user2')), set(['A', 'B'])) self.assertEqual(set(user_attr_map.attributes_by_user('user3')), set(['A', 'C'])) #check that metadatas for revoked attrs have correct version numbers self.assertEqual(ks.retrieve_latest_version_number('meta1', 'A'), 2) self.assertEqual(ks.retrieve_latest_version_number('meta2', 'A'), 2) self.assertEqual(ks.retrieve_latest_version_number('meta3', 'A'), 1) self.assertEqual(ks.retrieve_latest_version_number('meta1', 'B'), 2) self.assertEqual(ks.retrieve_latest_version_number('meta2', 'B'), 2) #check that other users with revoked attrs got new keywraps that decrypt #to keys of the correct length kw_2_2_A = ks.retrieve_latest_version('user2', 'meta2', 'A').keywrap kw_3_1_A = ks.retrieve_latest_version('user3', 'meta1', 'A').keywrap kw_3_2_A = ks.retrieve_latest_version('user3', 'meta2', 'A').keywrap kw_2_1_B = ks.retrieve_latest_version('user2', 'meta1', 'B').keywrap kw_2_2_B = ks.retrieve_latest_version('user2', 'meta2', 'B').keywrap kw_3_1_C = ks.retrieve_latest_version('user3', 'meta1', 'C').keywrap kw_3_2_C = ks.retrieve_latest_version('user3', 'meta2', 'C').keywrap self.assertEqual(len(utils.unwrap_key(kw_2_2_A, user_sks['user2'])), 16) self.assertEqual(len(utils.unwrap_key(kw_3_1_A, user_sks['user3'])), 16) self.assertEqual(len(utils.unwrap_key(kw_3_2_A, user_sks['user3'])), 16) self.assertEqual(len(utils.unwrap_key(kw_2_1_B, user_sks['user2'])), 16) self.assertEqual(len(utils.unwrap_key(kw_2_2_B, user_sks['user2'])), 16) self.assertEqual(len(utils.unwrap_key(kw_3_1_C, user_sks['user3'])), 16) self.assertEqual(len(utils.unwrap_key(kw_3_2_C, user_sks['user3'])), 16) #revoke all attributes from a user and update some key lengths keylens = {'meta2': 24} keygen.revoke_all_attrs('user2', ks, attr_user_map, user_attr_map, user_pks, keylens) #check that new keys for other users have correct key lengths kw_3_2_A = ks.retrieve_latest_version('user3', 'meta2', 'A').keywrap kw_3_3_A = ks.retrieve_latest_version('user3', 'meta3', 'A').keywrap self.assertEqual(len(utils.unwrap_key(kw_3_2_A, user_sks['user3'])), 24) self.assertEqual(len(utils.unwrap_key(kw_3_3_A, user_sks['user3'])), 16)
def _check_remkeys_random(self, ks_gen): """ Make sure basic key removal functionality works when the dataset is heterogenous and there are multiple remove operations. """ for _ in xrange(ITERS): ks = ks_gen() values = [] # Generate some random user IDs names = ('user' + str(random.randint(0, 1000000000)) for _ in xrange(NAMES)) for name in names: to_insert = [] name_batch = [] # Generate some random metadata metadatas = ('meta' + str(random.randint(0, 1000000000)) for _ in xrange(METADATAS)) for metadata in metadatas: meta_batch = [] # Generate some random attributes attrs = ('attr' + str(random.randint(0, 1000000000)) for _ in xrange(ATTRS)) for attr in attrs: attr_batch = [] # Generate some random versions vers = (random.randint(0, 1000000000) for _ in xrange(VERS)) for vrs in vers: # Generate a random keywrap keywrap = 'key' + str(random.randint(0, 1000000000)) info = KeyInfo(attr, vrs, metadata, keywrap, 0) to_insert.append(info) attr_batch.append(info) meta_batch.append((attr, attr_batch)) name_batch.append((metadata, meta_batch)) values.append((name, name_batch)) ks.batch_insert(name, to_insert) for name, name_batch in values: for metadata, meta_batch in name_batch: for attr, infos in meta_batch: # Flip a coin to determine whether we remove this batch if random.randint(0, 1) == 0: ks.remove_revoked_keys(name, metadata, attr) try: res = ks.batch_retrieve(name, metadata, attr) self.assertTrue( False, 'Should fail to retrieve deleted keys') except PKILookupError: pass else: res = ks.batch_retrieve(name, metadata, attr) self.assertEqual(set(res), set(infos))
def _check_remkeys_all(self, ks): """ Make sure basic key removal functionality works when the dataset is heterogenous and there are multiple remove operations. """ keys = [ KeyInfo('A', 1, 'metadata', 'wrap1', 0), KeyInfo('A', 2, 'metadata', 'wrap2', 0), KeyInfo('A', 3, 'metadata', 'wrap3', 0), KeyInfo('A', 4, 'metadata', 'wrap4', 0), KeyInfo('A', 1, 'betadata', 'wrap5', 0), KeyInfo('A', 2, 'betadata', 'wrap6', 0), KeyInfo('A', 3, 'betadata', 'wrap7', 0), KeyInfo('B', 1, 'metadata', 'wrap8', 0), KeyInfo('B', 2, 'metadata', 'wrap9', 0), KeyInfo('B', 3, 'metadata', 'wrap0', 0) ] ks.batch_insert('user1', keys) ks.batch_insert('user2', keys) ks.batch_insert('user3', keys) ks.remove_revoked_keys('user1', 'metadata', 'A') try: res = ks.batch_retrieve('user1', 'metadata', 'A') self.assertTrue(False, 'Should fail to retrieve deleted keys') except PKILookupError: pass res = ks.batch_retrieve('user1', 'betadata', 'A') self.assertEqual(set(res), set(keys[4:7])) res = ks.batch_retrieve('user1', 'metadata', 'B') self.assertEqual(set(res), set(keys[7:])) res = ks.batch_retrieve('user2', 'metadata', 'A') self.assertEqual(set(res), set(keys[:4])) res = ks.batch_retrieve('user2', 'betadata', 'A') self.assertEqual(set(res), set(keys[4:7])) res = ks.batch_retrieve('user2', 'metadata', 'B') self.assertEqual(set(res), set(keys[7:])) ks.remove_revoked_keys('user2', 'metadata', 'A') try: res = ks.batch_retrieve('user1', 'metadata', 'A') self.assertTrue(False, 'Should fail to retrieve deleted keys') except PKILookupError: pass try: res = ks.batch_retrieve('user2', 'metadata', 'A') self.assertTrue(False, 'Should fail to retrieve deleted keys') except PKILookupError: pass res = ks.batch_retrieve('user1', 'betadata', 'A') self.assertEqual(set(res), set(keys[4:7])) res = ks.batch_retrieve('user1', 'metadata', 'B') self.assertEqual(set(res), set(keys[7:])) res = ks.batch_retrieve('user2', 'betadata', 'A') self.assertEqual(set(res), set(keys[4:7])) res = ks.batch_retrieve('user2', 'metadata', 'B') self.assertEqual(set(res), set(keys[7:]))
def revoke(self, userid, attr, keystore, attr_user_map, user_attr_map, user_pks, metas_keylens={}): """ Revoke an attribute from a user. Specifically, delete all of the user's keys for that attribute for all supported metadatas, generate new keys for all supported metadatas, wrap them for all other users with that attribute, and insert them into the keystore. Optionally, new keys can have new key lengths. If the user to revoke does not have the specified attribute, this function does nothing. Arguments: userid (string) - the ID of the user whose keys to revoke attr (string) - the attribute of the keys to revoke keystore (AbstractKeyStore) - the key store to which to write the new keywraps for all other users with the given attribute attr_user_map (AbstractAttrUserMap) - an attribute-to-user map that can return a list of all users with the given attribute user_attr_map (AbstractUserAttrMap) - a user-to-attribute map that can return a list of all attributes of a given user user_pks ({string: RSA._RSA_obj}) - a dictionary that maps user IDs to users' RSA public keys metas_keylens (optional {string: int}) - an optional dictionary that maps metadatas to new key lengths. For any metadata not in the dictionary, the new key will have the same length as the current key. """ #If revoked user does not have the specified attribute, do nothing cur_users = attr_user_map.users_by_attribute(attr) if userid not in cur_users: return #Delete revoked user/attribute from maps attr_user_map.delete_user(attr, userid) user_attr_map.delete_attr(userid, attr) metas = keystore.get_metadatas(userid, attr) new_keys = {} #For each supported metadata, generate a new attribute key and delete #revoked user's keys for meta in metas: cur_info = keystore.retrieve_latest_version(userid, meta, attr) new_vers = cur_info.vers + 1 if meta in metas_keylens: new_keylen = metas_keylens[meta] else: new_keylen = cur_info.keylen new_key = self._generate_key(attr, new_vers, meta, new_keylen) new_keys[meta] = new_key keystore.remove_revoked_keys(userid, meta, attr) #Wrap newly generated keys for other users and add to keystore cur_users.remove(userid) for user in cur_users: for meta in keystore.get_metadatas(user, attr): if meta in new_keys: sk = new_keys[meta] RSA_pk = user_pks[user] keywrap = utils.wrap_key(sk, RSA_pk) keystore.insert( user, KeyInfo(attr, new_vers, meta, keywrap, new_keylen))
def __init__(self, conn=None, terms=None): """ Arguments: conn - connection to AccumuloKeyStore. Can be connection to a live Accumulo instance. Defaults to a FakeConnection() terms - (list) attributes to insert keys for - can be 'a'-'e'. Allows for the creation of a 'limited' PKI for demos and testing. Defaults to ['a','b',c','d','e'] Note: For key names we recommend to use a combination of the algorithm being used and the specific table name, along the lines of AES_CBC__table__, not the names listed below. """ # Initialize the new FakeConnection here so Python doesn't # create a new hidden global variable for the default argument if conn is None: conn = FakeConnection() if terms is None: terms = ['a', 'b', 'c', 'd', 'e'] SYM_KEYS_TO_INSERT = { "table1": [(1, b'Sixteen by1e key')], "Pycrypto_AES_CFB": [(1, b'Sixteen by1e key'), (2, b'Sixteen by2e key'), (3, b'Sixteen by3e key')], "Pycrypto_AES_CBC": [(1, b'Sixteen bb1e key')], "Pycrypto_AES_OFB": [(1, b'Sixteen bc1e key'), (2, b'Sixteen bc2e key'), (3, b'Sixteen bc3e key')], "Pycrypto_AES_CTR": [(1, b'Sixteen bd1e key')], "Pycrypto_AES_GCM": [(1, b'Sixteen be1e key'), (2, b'Sixteen be2e key')], "Pycrypto_AES_SIV": [(1, b'Sixteen byte keySixteen byte key')] } ATTR_KEYS_TO_INSERT = { "VIS_Identity": [('a', 1, b'Sixteen bate k1y'), ('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('c', 1, b'Sixteen bcte key'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('d', 3, b'Sixteen bdte k3y'), ('d', 4, b'Sixteen bdte k4y'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CFB": [('a', 1, b'Sixteen bate key'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 1, b'Sixteen bcte key'), ('d', 2, b'Sixteen bdte key'), ('d', 3, b'Sixteen bdte key'), ('d', 4, b'Sixteen bdte key'), ('d', 5, b'Sixteen bdte key'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CBC": [('a', 1, b'Sixteen bate k1y'), ('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('a', 5, b'Sixteen bate k5y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 3, b'Sixteen bcte k3y'), ('c', 4, b'Sixteen bcte k4y'), ('c', 5, b'Sixteen bcte k5y'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('e', 1, b'Sixteen bete key')], "VIS_AES_OFB": [('a', 2, b'Sixteen bate k2y'), ('a', 3, b'Sixteen bate k3y'), ('a', 4, b'Sixteen bate k4y'), ('a', 5, b'Sixteen bate k5y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k2y'), ('b', 3, b'Sixteen bbte k3y'), ('b', 4, b'Sixteen bbte k4y'), ('c', 2, b'Sixteen bcte k2y'), ('c', 3, b'Sixteen bcte k3y'), ('d', 2, b'Sixteen bdte key'), ('e', 1, b'Sixteen bete key')], "VIS_AES_CTR": [('a', 1, b'Sixteen bate k1y'), ('a', 3, b'Sixteen bate k3y'), ('a', 4, b'Sixteen bate k4y'), ('b', 1, b'Sixteen bbte k1y'), ('b', 2, b'Sixteen bbte k3y'), ('b', 3, b'Sixteen bbte k3y'), ('c', 2, b'Sixteen bcte key'), ('d', 1, b'Sixteen bdte key'), ('e', 3, b'Sixteen bete k3y'), ('e', 5, b'Sixteen bete k5y')], "VIS_AES_GCM": [('a', 1, b'Sixteen bate key'), ('b', 2, b'Sixteen bbte key'), ('c', 1, b'Sixteen bcte k1y'), ('c', 2, b'Sixteen bcte k2y'), ('c', 3, b'Sixteen bcte k3y'), ('d', 1, b'Sixteen bdte k1y'), ('d', 2, b'Sixteen bdte k2y'), ('d', 4, b'Sixteen bdte k4y'), ('e', 5, b'Sixteen bete key')] } #remove existing symmetric key tables for metadata in SYM_KEYS_TO_INSERT.keys(): if conn.table_exists(metadata): conn.delete_table(metadata) #remove existing attribute key tables for metadata in ATTR_KEYS_TO_INSERT.keys(): if conn.table_exists(metadata): conn.delete_table(metadata) #generate RSA key RSA_key = RSA.generate(3072) super(DummyEncryptionPKI, self).__init__(conn, 'one', RSA_key) #add symmetric keys for (algorithm, keys) in SYM_KEYS_TO_INSERT.iteritems(): for ver, key in keys: self._acc_keystore.insert( str(self._user_id), KeyInfo(attr='', vers=ver, metadata=algorithm, keywrap=key_utils.wrap_key(key, self._rsa_key), keylen=len(key))) #add attribute keys keys_to_insert = [] for (algorithm, keys) in ATTR_KEYS_TO_INSERT.iteritems(): for attr, vers, key in keys: if attr in terms: keys_to_insert.append( KeyInfo(attr=attr, vers=vers, metadata=algorithm, keywrap=key_utils.wrap_key(key, self._rsa_key), keylen=len(key))) self._acc_keystore.batch_insert(str(self._user_id), keys_to_insert)
def _check_batch_retrieve(self, ks): """ Make sure batch retrieval works Arguments: self - a simuluator of a test object (see DummyTest above) ks - a fresh object matching the AbstractKeyStore interface """ keys = [ KeyInfo('attr A', 1, 'metadata', 'keywrap', 0), KeyInfo('attr A', 2, 'metadata', 'keywarp', 0), KeyInfo('attr B', 1, 'metadata', 'newwrap', 0), KeyInfo('attr B', 2, 'metadata', 'dewwrap', 0), KeyInfo('', 12, 'metadata', 'gluwrap', 0), KeyInfo('', 1, 'betadata', 'foowrap', 0), KeyInfo('', 1, 'gammadata', 'barwrap', 0) ] ks.batch_insert('user', keys) # Case 1: test getting everything res = ks.batch_retrieve('user', 'metadata') mkeys = keys[:5] self.assertTrue(len(res) == len(mkeys)) self.assertTrue(all(x in mkeys for x in res)) self.assertTrue(all(x in res for x in mkeys)) res = ks.batch_retrieve('user', 'betadata') bkeys = keys[5:6] self.assertTrue(len(res) == len(bkeys)) self.assertTrue(all(x in bkeys for x in res)) self.assertTrue(all(x in res for x in bkeys)) res = ks.batch_retrieve('user', 'gammadata') gkeys = keys[6:] self.assertTrue(len(res) == len(gkeys)) self.assertTrue(all(x in gkeys for x in res)) self.assertTrue(all(x in res for x in gkeys)) # Case 2: test non-attribute keys res = ks.batch_retrieve('user', 'metadata', '') self.assertTrue(len(res) == 1) self.assertTrue(all(x.attr == '' for x in res)) self.assertTrue(keys[4] in res) # Case 3: test attr A res = ks.batch_retrieve('user', 'metadata', 'attr A') self.assertTrue(len(res) == 2) self.assertTrue(all(x.attr == 'attr A' for x in res)) self.assertTrue(keys[0] in res) self.assertTrue(keys[1] in res) # Case 4: test attr B res = ks.batch_retrieve('user', 'metadata', 'attr B') self.assertTrue(len(res) == 2) self.assertTrue(all(x.attr == 'attr B' for x in res)) self.assertTrue(keys[2] in res) self.assertTrue(keys[3] in res) # Case 5: test non-existent attr C try: res = ks.batch_retrieve('user', 'metadata', 'attr C') self.assertTrue(False, 'Should fail to retrieve non-existent attr') except PKILookupError: pass