Exemplo n.º 1
0
def test_cloud_master_key_store():
    driver = DummyStorageDriver('', '')
    container = driver.create_container('geofront-test')
    s = CloudMasterKeyStore(driver, container, 'test_id_rsa')
    with raises(EmptyStoreError):
        s.load()
    key = RSAKey.generate(1024)
    s.save(key)
    driver.get_object(container.name, 'test_id_rsa')  # assert object exists
    # Mocking implementation
    with io.StringIO() as mock:
        key.write_private_key(mock)
        mock.seek(0)
        dummy.DummyFileObject = lambda *a, **k: mock
        stored_key = s.load()
        assert isinstance(stored_key, RSAKey)
        assert stored_key.get_base64() == stored_key.get_base64()
Exemplo n.º 2
0
def test_cloud_master_key_store():
    driver = DummyStorageDriver('', '')
    container = driver.create_container('geofront-test')
    s = CloudMasterKeyStore(driver, container, 'test_id_rsa')
    with raises(EmptyStoreError):
        s.load()
    key = RSAKey.generate(1024)
    s.save(key)
    driver.get_object(container.name, 'test_id_rsa')  # assert object exists
    # Mocking implementation
    with io.StringIO() as mock:
        key.write_private_key(mock)
        mock.seek(0)
        dummy.DummyFileObject = lambda *a, **k: mock
        stored_key = s.load()
        assert isinstance(stored_key, RSAKey)
        assert stored_key.get_base64() == stored_key.get_base64()
Exemplo n.º 3
0
def test_cloud_master_key_store_s3(request, tmpdir):
    try:
        access_key = request.config.getoption('--aws-access-key')
        secret_key = request.config.getoption('--aws-secret-key')
        bucket_name = request.config.getoption('--aws-s3-bucket')
    except ValueError:
        access_key = secret_key = bucket_name = None
    if access_key is None or secret_key is None or bucket_name is None:
        skip('--aws-access-key/--aws-secret-key/--aws-s3-bucket are not '
             'provided; skipped')
    driver_cls = get_driver(Provider.S3)
    driver = driver_cls(access_key, secret_key)
    container = driver.get_container(container_name=bucket_name)
    tmpname = ''.join(map('{:02x}'.format, os.urandom(16)))
    s = CloudMasterKeyStore(driver, container, tmpname)
    key = RSAKey.generate(1024)
    # load() -- when not exists
    with raises(EmptyStoreError):
        s.load()
    try:
        # save()
        s.save(key)
        obj = driver.get_object(container.name, tmpname)
        dest = tmpdir / tmpname
        obj.download(str(dest))
        saved = read_private_key_file(dest.open())
        assert isinstance(saved, RSAKey)
        assert saved.get_base64() == key.get_base64()
        # load() -- when exists
        loaded = s.load()
        assert isinstance(loaded, RSAKey)
        assert loaded.get_base64() == key.get_base64()
    finally:
        try:
            o = driver.get_object(container.name, tmpname)
        except ObjectDoesNotExistError:
            pass
        else:
            o.delete()
Exemplo n.º 4
0
def test_cloud_master_key_store_s3(request, tmpdir):
    try:
        access_key = request.config.getoption('--aws-access-key')
        secret_key = request.config.getoption('--aws-secret-key')
        bucket_name = request.config.getoption('--aws-s3-bucket')
    except ValueError:
        access_key = secret_key = bucket_name = None
    if access_key is None or secret_key is None or bucket_name is None:
        skip(
            '--aws-access-key/--aws-secret-key/--aws-s3-bucket are not '
            'provided; skipped'
        )
    driver_cls = get_driver(Provider.S3)
    driver = driver_cls(access_key, secret_key)
    container = driver.get_container(container_name=bucket_name)
    tmpname = ''.join(map('{:02x}'.format, os.urandom(16)))
    s = CloudMasterKeyStore(driver, container, tmpname)
    key = RSAKey.generate(1024)
    # load() -- when not exists
    with raises(EmptyStoreError):
        s.load()
    try:
        # save()
        s.save(key)
        obj = driver.get_object(container.name, tmpname)
        dest = tmpdir / tmpname
        obj.download(str(dest))
        saved = read_private_key_file(dest.open())
        assert isinstance(saved, RSAKey)
        assert saved.get_base64() == key.get_base64()
        # load() -- when exists
        loaded = s.load()
        assert isinstance(loaded, RSAKey)
        assert loaded.get_base64() == key.get_base64()
    finally:
        try:
            o = driver.get_object(container.name, tmpname)
        except ObjectDoesNotExistError:
            pass
        else:
            o.delete()
Exemplo n.º 5
0
# as your public key store.
from geofront.backends.github import GitHubKeyStore

KEY_STORE = GitHubKeyStore()

# Unlike public keys, the master key ideally ought to be accessible by
# only Geofront.  Assume you use Amazon Web Services.  So you'll store
# the master key to the your private S3 bucket named your_team_master_key.
from geofront.backends.cloud import CloudMasterKeyStore
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver

driver_cls = get_driver(Provider.S3)
driver = driver_cls('aws access key', 'aws secret key')
container = driver.get_container(container_name='your_team_master_key')
MASTER_KEY_STORE = CloudMasterKeyStore(driver, container, 'id_rsa')

# You have to let Geofront know what to manage remote servers.
# Although the list can be hard-coded in the configuration file,
# but you'll get the list dynamically from EC2 API.  Assume our all
# AMIs are Amazon Linux, so the usernames are always ec2-user.
# If you're using Ubuntu AMIs it should be ubuntu instead.
from geofront.backends.cloud import CloudRemoteSet
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver

driver_cls = get_driver(Provider.EC2)
driver = driver_cls('aws access id', 'aws secret key', region='uest-east-1')
REMOTE_SET = CloudRemoteSet(driver, user='******')

# Suppose your team is divided by several subgroups, and these subgroups are