def test_read_private_key_file(): path = os.path.join(os.path.dirname(__file__), 'test_id_rsa') with open(path) as f: key = read_private_key_file(f) assert isinstance(key, RSAKey) assert key.get_base64() == ( 'AAAAB3NzaC1yc2EAAAADAQABAAABAQC7+fDpQ9sQKIdzXvqT3TzrPp2OpUCOJtUW3k0oi' 'trqqHe1XiCke++DSpAv56poCppTj9qo3N1HyhZhSv/jH7/ejZ8NZdtvLIZGOCQZVdKNy0' 'cg7jlimrWA2s8X201Yn3hYpUrYJYbhAAuQM5flvbyBtn5/miONQ8NVimgjG6UVANVqX4W' 'H9kqdr4SBf45/+BAdenf2j5DC3xceOOW8wZfe2rOJpQ0msVxMeXExGqF9DS2E3bqOwE1C' 'MPEGYr5KZCx7IeJ/4udBuKc/gOXb8tPiTTNxtYXEBcqhBdCa/M6pEdW5LiHxxoF5b6xY9' 'q0nmi7Rn0weXK0SufhGgKrpSH+B')
def test_read_private_key_file(): path = os.path.join(os.path.dirname(__file__), 'test_id_rsa') with open(path) as f: key = read_private_key_file(f) assert isinstance(key, RSAKey) assert key.get_base64() == ( 'AAAAB3NzaC1yc2EAAAADAQABAAABAQC7+fDpQ9sQKIdzXvqT3TzrPp2OpUCOJtUW3k0oi' 'trqqHe1XiCke++DSpAv56poCppTj9qo3N1HyhZhSv/jH7/ejZ8NZdtvLIZGOCQZVdKNy0' 'cg7jlimrWA2s8X201Yn3hYpUrYJYbhAAuQM5flvbyBtn5/miONQ8NVimgjG6UVANVqX4W' 'H9kqdr4SBf45/+BAdenf2j5DC3xceOOW8wZfe2rOJpQ0msVxMeXExGqF9DS2E3bqOwE1C' 'MPEGYr5KZCx7IeJ/4udBuKc/gOXb8tPiTTNxtYXEBcqhBdCa/M6pEdW5LiHxxoF5b6xY9' 'q0nmi7Rn0weXK0SufhGgKrpSH+B' )
def test_cloud_master_key_store_s3(request, tmpdir): try: access_key = request.config.getoption('--aws-access-key') secret_key = request.config.getoption('--aws-secret-key') bucket_name = request.config.getoption('--aws-s3-bucket') except ValueError: access_key = secret_key = bucket_name = None if access_key is None or secret_key is None or bucket_name is None: skip( '--aws-access-key/--aws-secret-key/--aws-s3-bucket are not ' 'provided; skipped' ) driver_cls = get_driver(Provider.S3) driver = driver_cls(access_key, secret_key) container = driver.get_container(container_name=bucket_name) tmpname = ''.join(map('{:02x}'.format, os.urandom(16))) s = CloudMasterKeyStore(driver, container, tmpname) key = RSAKey.generate(1024) # load() -- when not exists with raises(EmptyStoreError): s.load() try: # save() s.save(key) obj = driver.get_object(container.name, tmpname) dest = tmpdir / tmpname obj.download(str(dest)) saved = read_private_key_file(dest.open()) assert isinstance(saved, RSAKey) assert saved.get_base64() == key.get_base64() # load() -- when exists loaded = s.load() assert isinstance(loaded, RSAKey) assert loaded.get_base64() == key.get_base64() finally: try: o = driver.get_object(container.name, tmpname) except ObjectDoesNotExistError: pass else: o.delete()
def test_cloud_master_key_store_s3(request, tmpdir): try: access_key = request.config.getoption('--aws-access-key') secret_key = request.config.getoption('--aws-secret-key') bucket_name = request.config.getoption('--aws-s3-bucket') except ValueError: access_key = secret_key = bucket_name = None if access_key is None or secret_key is None or bucket_name is None: skip('--aws-access-key/--aws-secret-key/--aws-s3-bucket are not ' 'provided; skipped') driver_cls = get_driver(Provider.S3) driver = driver_cls(access_key, secret_key) container = driver.get_container(container_name=bucket_name) tmpname = ''.join(map('{:02x}'.format, os.urandom(16))) s = CloudMasterKeyStore(driver, container, tmpname) key = RSAKey.generate(1024) # load() -- when not exists with raises(EmptyStoreError): s.load() try: # save() s.save(key) obj = driver.get_object(container.name, tmpname) dest = tmpdir / tmpname obj.download(str(dest)) saved = read_private_key_file(dest.open()) assert isinstance(saved, RSAKey) assert saved.get_base64() == key.get_base64() # load() -- when exists loaded = s.load() assert isinstance(loaded, RSAKey) assert loaded.get_base64() == key.get_base64() finally: try: o = driver.get_object(container.name, tmpname) except ObjectDoesNotExistError: pass else: o.delete()