コード例 #1
0
def user_certs_load_cached( config, user_names_or_ids ):
    """
    Load and return the list of valid cached user certificates from the cert bundle.
    """
    ret = []
    for uid in user_names_or_ids:
        user_cert = object_stub.load_user_cert( config, uid )
        if user_cert is None:
            continue

        ret.append( user_cert )

    return ret
コード例 #2
0
ファイル: certs.py プロジェクト: iychoi/syndicate-core
def user_certs_load_cached(config, user_names_or_ids):
    """
    Load and return the list of valid cached user certificates from the cert bundle.
    """
    ret = []
    for uid in user_names_or_ids:
        user_cert = object_stub.load_user_cert(config, uid)
        if user_cert is None:
            continue

        ret.append(user_cert)

    return ret
コード例 #3
0
ファイル: certs.py プロジェクト: iychoi/syndicate-core
def get_user_cert(config, user_name_or_id, check_cache=True):
    """
    Load a user cert from local disk.
    If not local, then go fetch it.
    Return the user cert on success.
    Raise an exception on error.
    """

    user_cert = None

    if check_cache:
        user_cert = object_stub.load_user_cert(config, user_name_or_id)

    if user_cert is None:
        downloader = config['helpers']['fetch_user_cert']
        ms_url = config['MS_url']
        user_cert = user_cert_fetch(ms_url, user_name_or_id, downloader)

        if user_cert is None:
            raise Exception("Failed to obtain user certificate")

    return user_cert
コード例 #4
0
def get_user_cert( config, user_name_or_id, check_cache=True ):
    """
    Load a user cert from local disk.
    If not local, then go fetch it.
    Return the user cert on success.
    Raise an exception on error.
    """

    user_cert = None 

    if check_cache:
        user_cert = object_stub.load_user_cert( config, user_name_or_id )

    if user_cert is None:

        downloader = config['helpers']['fetch_user_cert']
        ms_url = config['MS_url']
        user_cert = user_cert_fetch( ms_url, user_name_or_id, downloader )

        if user_cert is None:
            raise Exception("Failed to obtain user certificate")

    return user_cert
コード例 #5
0
def broadcast_reload( config, user_id, volume_id, cert_bundle_version=None, volume_version=None, gateway_names=None ):
    """
    Generate and broadcast a set of requests to all gateways that:
    * are write-capable
    * can receive writes
    * can coordinate writes.
    
    The message will have them synchronously reload their configurations.
    If gateway_names is given, then send to those gateways instead.
    Send it off and wait for their acknowledgements (or timeouts).

    This method is used when adding/removing gateways, and updating volume capability information.

    We'll need the volume private key.

    Return {"gateway_name": True|False|None} on success
        None indicates "unknown"
    """

    import grequests
    logging.getLogger("requests").setLevel(logging.CRITICAL)
    logging.getLogger("grequests").setLevel(logging.CRITICAL)

    gateway_certs = None 
    gateway_status = {}

    # sanity check--volume key is on file 
    volume_cert = object_stub.load_volume_cert( config, str(volume_id) )
    if volume_cert is None:
        raise MissingCertException("No volume cert for '%s'" % str(volume_id))

    owner_cert = object_stub.load_user_cert( config, str(volume_cert.owner_id))
    if owner_cert is None:
        raise MissingCertException("Missing user cert for %s, owner of volume '%s'" % (volume_cert.owner_id, volume_cert.name))

    volume_pkey = storage.load_private_key( config, "user", owner_cert.email )
    if volume_pkey is None:
        raise MissingKeyException("No volume key for owner '%s' of '%s'" % (owner_cert.email, volume_cert.name ))

    if gateway_names is None:
        writer_certs = list_volume_writers( config, volume_id )
        coord_certs = list_volume_coordinators( config, volume_id )
        recver_certs = list_gateways_by_type( config, volume_id, "RG" ) 
        gateway_certs = writer_certs + coord_certs + recver_certs

    else:
        gateway_certs = []
        for gateway_name in gateway_names:
            gateway_cert = object_stub.load_gateway_cert( config, gateway_name )
            if gateway_cert is None:
                raise MissingCertException("No gateway cert for '%s'" % gateway_name )

            gateway_certs.append( gateway_cert )

    for gateway_cert in gateway_certs:
        gateway_status[gateway_cert.name] = None

    gateway_url_names = dict( [('http://%s:%s' % (cert.host, cert.port), cert.name) for cert in gateway_certs] )
    urls = gateway_url_names.keys()

    msg = make_reload_request( config, user_id, volume_id, cert_bundle_version=cert_bundle_version, volume_version=volume_version )
    if msg is None:
        raise Exception("BUG: failed to generate config-reload request")

    def req_exception(request, exception):
        log.info("Caught exception on broadcast to '%s'" % request.url)
        log.info( traceback.format_exception(type(exception), exception, None) )
        gateway_name = gateway_url_names[request.url]
        gateway_status[gateway_name] = False

    msg_txt = msg.SerializeToString()
    reqs = [grequests.post(url, data={"control-plane": msg_txt}) for url in urls]

    # send all!
    iresps = grequests.imap( reqs, exception_handler=req_exception ) 
    for resp in iresps:
        url = resp.url
        purl = urlparse.urlparse(url)
        hostname = purl.hostname
        port = purl.port

        gateway_name = gateway_url_names.get('http://%s:%s' % (hostname,port), None)
        if gateway_name is None:
            log.warn("Unknown URL '%s'" % url)
            
        if resp.status_code == 200:
            gateway_status[gateway_name] = True
        else:
            gateway_status[gateway_name] = False
            log.warn("HTTP %s on broadcast to '%s'" % (resp.status_code, gateway_name))

    return gateway_status
コード例 #6
0
def make_reload_request( config, user_id, volume_id, gateway_id=None, gateway_name=None, cert_bundle_version=None, volume_version=None ):
    """
    Make a signed, serialized gateway-reload request.
    If gateway_id or gateway_name is not None, then the request will be destined to a particular gateway, and will be signed with the owner's private key.
    Otherwise, the request will be destined to all write/coordinate gateways in the volume, and will be signed with the volume owner's private key.
    Return the signed request.
    Raise on error.
    """

    signing_key = None
    gateway_cert_version = None

    # need either volume key or gateway key 
    if gateway_id is None and gateway_name is not None:
        gateway_id = object_stub.load_gateway_id( config, gateway_name )

    if gateway_name is None and gateway_id is not None:
        gateway_name = object_stub.load_gateway_name( config, gateway_id )

    if gateway_name is not None:
        # look up the gateway's cert--its version it must match gateway_cert_version
        gateway_cert = object_stub.load_gateway_cert( config, gateway_name )
        if gateway_cert is None:
            raise MissingCertException("Missing gateway certificate for %s" % gateway_name )

        assert volume_id == gateway_cert.volume_id, "Gateway '%s' is not in volume %s (but %s)" % (gateway_cert.name, volume_id, gateway_cert.volume_id)
        gateway_cert_version = gateway_cert.version
    
        # look up the owner's user 
        user_cert = object_stub.load_user_cert( config, str(gateway_cert.owner_id) )
        if user_cert is None:
            raise MissingCertException("Missing user certificate for %s, owner of '%s'" % (gateway_cert.owner_id, gateway_cert.name))

        # look up the user's private key, to sign with that 
        user_pkey = storage.load_private_key( config, "user", user_cert.email )
        if user_pkey is None:
            raise MissingCertException("Missing user private key for '%s'" % user_cert.email)

        log.debug("Sign reload request with private key of user '%s' for gateway '%s' in volume %s" % (user_cert.email, gateway_cert.name, volume_id))
        signing_key = user_pkey 

    else:
        # send to volume
        volume_cert = object_stub.load_volume_cert( config, str(volume_id) )
        if volume_cert is None:
            raise MissingCertException("Missing cert for volume %s" % (volume_id))

        owner_cert = object_stub.load_user_cert( config, str(volume_cert.owner_id) )
        if owner_cert is None:
            raise MissingCertException("Missing cert for user %s" % volume_cert.owner_id)

        volume_pkey = storage.load_private_key( config, "user", owner_cert.email )
        if volume_pkey is None:
            raise MissingKeyException("Missing both gateway and volume private keys")

        log.debug("Sign reload request with private key of volume owner '%s' in volume %s" % (owner_cert.email, volume_cert.name))
        signing_key = volume_pkey 

    if volume_version is None:
        # look up volume cert version 
        volume_cert = object_stub.load_volume_cert( config, str(volume_id) )
        if volume_cert is None:
            raise MissingCertException("Missing volume cert, and volume cert version is not given")

        volume_version = volume_cert.volume_version 

    if cert_bundle_version is None:
        # look up version vector; cross-check with volume version
        version_vector_txt = object_stub.load_object_file( config, "volume", str(volume_id) + ".bundle.version" )
        if version_vector_txt is None:
            raise MissingCertException("No cert bundle version information for volume '%s'" % volume_name)

        try:
            version_vector = json.loads(version_vector_txt)
        except:
            raise MissingCertException("Invalid version vector JSON")

        cert_bundle_version = version_vector.get('bundle_version', None)
        onfile_volume_version = version_vector.get('volume_version', None)

        assert cert_bundle_version is not None, "Missing bundle version in cert bundle version vector"
        assert onfile_volume_version is not None, "Missing volume version in cert bundle version vector"
        
        try:
            cert_bundle_version = int(cert_bundle_version)
            onfile_volume_version = int(onfile_volume_version)
        except:
            raise MissingCertException("Missing valid version information for cert bundle")

        assert onfile_volume_version == volume_version, "BUG: On-file cert bundle volume version (%s) does not match given volume version (%s)" % (onfile_volume_version, volume_version)
        

    req = sg_pb2.Request()
    
    req.request_type = sg_pb2.Request.RELOAD
    req.user_id = user_id
    req.volume_id = volume_id

    if gateway_id is not None:
        req.coordinator_id = gateway_id
    else:
        req.coordinator_id = 0

    req.src_gateway_id = libsyndicate.Syndicate.GATEWAY_TOOL
    req.message_nonce = random.randint(0, 2**64-1)

    req.volume_version = volume_version
    req.cert_version = cert_bundle_version
    req.file_id = 0             # ignored
    req.file_version = 0        # ignored
    req.fs_path = ""            # ignored

    if gateway_cert_version is not None:
        req.gateway_cert_version = gateway_cert_version

    # sign 
    req.signature = ""
    reqstr = req.SerializeToString()
    sig = crypto.sign_data( signing_key, reqstr )
    req.signature = base64.b64encode( sig )
    return req
コード例 #7
0
ファイル: provisioning.py プロジェクト: iychoi/syndicate-core
def make_host_provision_plan(
        config,
        sender_privkey_pem,
        host_pubkey_pem,
        hostname,
        volumes,
        gateway_pkey_generator=default_gateway_pkey_generator):
    """
    Generate a signed host-specific volume and gateway listing.

    @config: client configuration
    @sender_privkey_pem:  sender's private key, to sign the listing
    @host_pubkey_pem:  recipient's public key, to encrypt initial gateway keys and user private keys
    @hostname:  name of host on which the gateways run
    @volumes: list of volume names
    @gateway_pkey_generator: a callback that takes (config, volume name) and returns the gateway's initial public key

    Return a dict with the structure:
    {
        "volume_name": {
            "gateways": {
                "__pkey__":  "encrypted pkey",
                "gateway_name": "gateway_cert_b64",
                "gateway_name": "gateway_cert_b64",
                ...
            },
            "users": {
                user_id: {
                    "pkey": "encrypted pkey",
                    "cert": "user_cert_b64"
                }
                ...
            }
        }
        ...
    }
    where each volume named has only the gateway
    certificates on this particular host.

    Because we create one initial gateway private key
    per volume, we only need to give that singular private
    key back.  The automounter will change the public key
    once it gets the private key.
    """

    ret = {}
    for volume_name in volumes:

        # get the volume ID
        volume_cert = object_stub.load_volume_cert(config, volume_name)
        if volume_cert is None:
            log.error("No such volume '%s'" % volume_name)
            return {}

        volume_id = volume_cert.volume_id

        # find all gateways in this volume, on this host
        gateway_certs = list_volume_gateways_by_host(config, volume_id,
                                                     hostname)
        if gateway_certs is None:
            log.error("Failed to load gateway certs for '%s'" % volume_name)
            return {}

        if len(gateway_certs) == 0:
            # no relevant gateways
            continue

        # find all associated user certs and their private keys, and serialize them
        user_certs = {}
        serialized_gateway_certs = {}
        for gateway_cert in gateway_certs:

            # user cert
            user_cert = object_stub.load_user_cert(config,
                                                   gateway_cert.owner_id)
            if user_cert is None:
                log.error("No such user '%s'" % gateway_cert.owner_id)
                return {}

            log.debug("User '%s' owns gateway '%s'" %
                      (user_cert.email, gateway_cert.name))

            # user private key
            user_pkey = storage.load_private_key(config, "user",
                                                 user_cert.email)
            if user_pkey is None:
                log.error("No such user private key '%s'" % user_cert.email)
                return {}

            user_pkey_pem = user_pkey.exportKey()
            rc, user_pkey_pem_enc = libsyndicate.encrypt_data(
                sender_privkey_pem, host_pubkey_pem, user_pkey_pem)
            if rc != 0:
                log.error("Failed to encrypt key for '%s', rc = %s" %
                          (user_cert.email, rc))
                return {}

            user_certs[str(gateway_cert.owner_id)] = {
                "cert": base64.b64encode(user_cert.SerializeToString()),
                "pkey": base64.b64encode(user_pkey_pem_enc)
            }

            serialized_gateway_certs[gateway_cert.name] = base64.b64encode(
                gateway_cert.SerializeToString())

        # encrypt the private key for this volume's gateways...
        pkey_pem = gateway_pkey_generator(config, volume_name)
        if pkey_pem is None:
            log.debug("Failed to generate gateway key for '%s'" % volume_name)
            continue

        rc, pkey_pem_enc = libsyndicate.encrypt_data(sender_privkey_pem,
                                                     host_pubkey_pem, pkey_pem)
        if rc != 0:
            log.error("Failed to encrypt response; rc = %d\n", rc)
            return {}

        serialized_gateway_certs["__pkey__"] = base64.b64encode(pkey_pem_enc)

        # done with this volume
        ret[volume_name] = {
            "gateways": serialized_gateway_certs,
            "users": user_certs
        }

    return ret
コード例 #8
0
def broadcast_reload(config, user_id, volume_id, cert_bundle_version=None, volume_version=None, gateway_names=None):
    """
    Generate and broadcast a set of requests to all gateways that:
    * are write-capable
    * can receive writes
    * can coordinate writes.

    The message will have them synchronously reload their configurations.
    If gateway_names is given, then send to those gateways instead.
    Send it off and wait for their acknowledgements (or timeouts).

    This method is used when adding/removing gateways, and updating volume
    capability information.

    We'll need the volume private key.

    Return {"gateway_name": True|False|None} on success
        None indicates "unknown"
    """

    import grequests
    logging.getLogger("requests").setLevel(logging.CRITICAL)
    logging.getLogger("grequests").setLevel(logging.CRITICAL)

    gateway_certs = None
    gateway_status = {}

    # sanity check--volume key is on file
    volume_cert = object_stub.load_volume_cert(config, str(volume_id))
    if volume_cert is None:
        raise MissingCertException("No volume cert for '%s'" % str(volume_id))

    owner_cert = object_stub.load_user_cert(config, str(volume_cert.owner_id))
    if owner_cert is None:
        raise MissingCertException("Missing user cert for %s, owner of volume '%s'" % (volume_cert.owner_id, volume_cert.name))

    volume_pkey = storage.load_private_key(config, "user", owner_cert.email)
    if volume_pkey is None:
        raise MissingKeyException("No volume key for owner '%s' of '%s'" % (owner_cert.email, volume_cert.name))

    if gateway_names is None:
        writer_certs = list_volume_writers(config, volume_id)
        coord_certs = list_volume_coordinators(config, volume_id)
        recver_certs = list_gateways_by_type(config, volume_id, "RG")
        gateway_certs = writer_certs + coord_certs + recver_certs

    else:
        gateway_certs = []
        for gateway_name in gateway_names:
            gateway_cert = object_stub.load_gateway_cert(config, gateway_name)
            if gateway_cert is None:
                raise MissingCertException("No gateway cert for '%s'" % gateway_name)

            gateway_certs.append(gateway_cert)

    for gateway_cert in gateway_certs:
        gateway_status[gateway_cert.name] = None

    gateway_url_names = dict([('http://%s:%s' % (cert.host, cert.port), cert.name) for cert in gateway_certs])
    urls = gateway_url_names.keys()

    msg = make_reload_request(config, user_id, volume_id, cert_bundle_version=cert_bundle_version, volume_version=volume_version)
    if msg is None:
        raise Exception("BUG: failed to generate config-reload request")

    def req_exception(request, exception):
        log.info("Caught exception on broadcast to '%s'" % request.url)
        log.info(traceback.format_exception(type(exception), exception, None))
        gateway_name = gateway_url_names[request.url]
        gateway_status[gateway_name] = False

    msg_txt = msg.SerializeToString()
    reqs = [grequests.post(url, data={"control-plane": msg_txt}) for url in urls]

    # send all!
    iresps = grequests.imap(reqs, exception_handler=req_exception)
    for resp in iresps:
        url = resp.url
        purl = urlparse.urlparse(url)
        hostname = purl.hostname
        port = purl.port

        gateway_name = gateway_url_names.get('http://%s:%s' % (hostname,port), None)
        if gateway_name is None:
            log.warn("Unknown URL '%s'" % url)

        if resp.status_code == 200:
            gateway_status[gateway_name] = True
        else:
            gateway_status[gateway_name] = False
            log.warn("HTTP %s on broadcast to '%s'" % (resp.status_code, gateway_name))

    return gateway_status
コード例 #9
0
def make_reload_request(config, user_id, volume_id, gateway_id=None, gateway_name=None, cert_bundle_version=None, volume_version=None):
    """
    Make a signed, serialized gateway-reload request.
    If gateway_id or gateway_name is not None, then the request will be destined to a particular gateway, and will be signed with the owner's private key.
    Otherwise, the request will be destined to all write/coordinate gateways in the volume, and will be signed with the volume owner's private key.
    Return the signed request.
    Raise on error.
    """

    signing_key = None
    gateway_cert_version = None

    # need either volume key or gateway key
    if gateway_id is None and gateway_name is not None:
        gateway_id = object_stub.load_gateway_id(config, gateway_name)

    if gateway_name is None and gateway_id is not None:
        gateway_name = object_stub.load_gateway_name(config, gateway_id)

    if gateway_name is not None:
        # look up the gateway's cert--its version it must match gateway_cert_version
        gateway_cert = object_stub.load_gateway_cert(config, gateway_name)
        if gateway_cert is None:
            raise MissingCertException("Missing gateway certificate for %s" % gateway_name)

        assert volume_id == gateway_cert.volume_id, "Gateway '%s' is not in volume %s (but %s)" % (gateway_cert.name, volume_id, gateway_cert.volume_id)
        gateway_cert_version = gateway_cert.version

        # look up the owner's user
        user_cert = object_stub.load_user_cert(config, str(gateway_cert.owner_id))
        if user_cert is None:
            raise MissingCertException("Missing user certificate for %s, owner of '%s'" % (gateway_cert.owner_id, gateway_cert.name))

        # look up the user's private key, to sign with that
        user_pkey = storage.load_private_key(config, "user", user_cert.email)
        if user_pkey is None:
            raise MissingCertException("Missing user private key for '%s'" % user_cert.email)

        log.debug("Sign reload request with private key of user '%s' for gateway '%s' in volume %s" % (user_cert.email, gateway_cert.name, volume_id))
        signing_key = user_pkey

    else:
        # send to volume
        volume_cert = object_stub.load_volume_cert(config, str(volume_id))
        if volume_cert is None:
            raise MissingCertException("Missing cert for volume %s" % (volume_id))

        owner_cert = object_stub.load_user_cert(config, str(volume_cert.owner_id))
        if owner_cert is None:
            raise MissingCertException("Missing cert for user %s" % volume_cert.owner_id)

        volume_pkey = storage.load_private_key(config, "user", owner_cert.email)
        if volume_pkey is None:
            raise MissingKeyException("Missing both gateway and volume private keys")

        log.debug("Sign reload request with private key of volume owner '%s' in volume %s" % (owner_cert.email, volume_cert.name))
        signing_key = volume_pkey

    if volume_version is None:
        # look up volume cert version
        volume_cert = object_stub.load_volume_cert(config, str(volume_id))
        if volume_cert is None:
            raise MissingCertException("Missing volume cert, and volume cert version is not given")

        volume_version = volume_cert.volume_version

    if cert_bundle_version is None:
        # look up version vector; cross-check with volume version
        version_vector_txt = object_stub.load_object_file(config, "volume", str(volume_id) + ".bundle.version")
        if version_vector_txt is None:
            raise MissingCertException("No cert bundle version information for volume '%s'" % volume_cert.name)

        try:
            version_vector = json.loads(version_vector_txt)
        except:
            raise MissingCertException("Invalid version vector JSON")

        cert_bundle_version = version_vector.get('bundle_version', None)
        onfile_volume_version = version_vector.get('volume_version', None)

        assert cert_bundle_version is not None, "Missing bundle version in cert bundle version vector"
        assert onfile_volume_version is not None, "Missing volume version in cert bundle version vector"

        try:
            cert_bundle_version = int(cert_bundle_version)
            onfile_volume_version = int(onfile_volume_version)
        except:
            raise MissingCertException("Missing valid version information for cert bundle")

        assert onfile_volume_version == volume_version, "BUG: On-file cert bundle volume version (%s) does not match given volume version (%s)" % (onfile_volume_version, volume_version)


    req = sg_pb2.Request()

    req.request_type = sg_pb2.Request.RELOAD
    req.user_id = user_id
    req.volume_id = volume_id

    if gateway_id is not None:
        req.coordinator_id = gateway_id
    else:
        req.coordinator_id = 0

    req.src_gateway_id = libsyndicate.Syndicate.GATEWAY_TOOL
    req.message_nonce = random.randint(0, 2**64-1)

    req.volume_version = volume_version
    req.cert_version = cert_bundle_version
    req.file_id = 0             # ignored
    req.file_version = 0        # ignored
    req.fs_path = ""            # ignored

    if gateway_cert_version is not None:
        req.gateway_cert_version = gateway_cert_version

    # sign
    req.signature = ""
    reqstr = req.SerializeToString()
    sig = crypto.sign_data(signing_key, reqstr)
    req.signature = base64.b64encode(sig)
    return req
コード例 #10
0
def upload_keys(new_emails, user_infos):
    """
    Save all private keys for the users we just provisioned
    (user and volume keys)

    Return True if they all succeed
    Return False if at least one fails
    """
    syndicate_config = conf.get_config_from_argv(sys.argv)
    user_bundles = {}

    # make initial user bundles
    for user_name in new_emails:
        user_pkey = storage.load_private_key( syndicate_config, "user", user_name )
        if user_pkey is None:
            log.error("Automount daemon failed to produce key for {}".format(user_name))
            return False

        user_cert = object_stub.load_user_cert(syndicate_config, user_name)
        if user_cert is None:
            log.error("Automount daemon failed to produce cert for {}".format(user_name))
            return False
       
        ug_name = provisioning.make_gateway_name('demo', 'UG', sanitize_name('volume-{}'.format(user_name)), 'localhost')
        rg_name = provisioning.make_gateway_name('demo', 'RG', sanitize_name('volume-{}'.format(user_name)), 'localhost')

        ug_pkey = storage.load_private_key( syndicate_config, "gateway", ug_name)
        if ug_pkey is None:
            log.error("Automount daemon failed to produce key for {}".format(ug_name))
            return False

        rg_pkey = storage.load_private_key( syndicate_config, "gateway", rg_name)
        if rg_pkey is None:
            log.error("Automount daemon failed to produce key for {}".format(rg_name))
            return False

        ug_cert = object_stub.load_gateway_cert(syndicate_config, ug_name)
        if ug_cert is None:
            log.error("Automount daemon failed to produce cert for {}".format(ug_name))
            return False

        rg_cert = object_stub.load_gateway_cert(syndicate_config, rg_name)
        if rg_cert is None:
            log.error("Automount daemon failed to produce cert for {}".format(rg_name))
            return False

        # gateway keys for the same volume must be the same, initially
        if ug_pkey.exportKey() != rg_pkey.exportKey():
            log.error("Automount daemon did not produce the same initial key for {} and {}".format(ug_name, rg_name))
            return False

        user_bundles[user_name] = {
            'user_pkey': user_pkey.exportKey(),
            'user_cert': base64.b64encode(user_cert.SerializeToString()),
            'ug_cert': base64.b64encode(ug_cert.SerializeToString()),
            'rg_cert': base64.b64encode(rg_cert.SerializeToString()),
            'gateway_pkey': ug_pkey.exportKey()
        }

    # encrypt private keys
    for user_info in user_infos:
        user_name = user_info['email']
        user_password = user_info['password']
        if user_name not in new_emails:
            continue
       
        if len(user_password) == 0:
            # skip this user 
            log.debug("Skipping already-processed user {}".format(user_name))
            del user_bundles[user_name]
            continue

        user_password = base64.urlsafe_b64encode(base64.b64decode(user_password))
        
        for keyname in ['user_pkey', 'gateway_pkey']:
            f = Fernet(user_password)
            user_bundles[user_name][keyname] = f.encrypt(user_bundles[user_name][keyname])
            user_bundles[user_name][keyname] = base64.b64encode( base64.urlsafe_b64decode(user_bundles[user_name][keyname]) )

    log.debug("Upload key bundles for {} users".format(len(user_bundles.keys())))

    for user_name in user_bundles.keys():
        # send encrypted keys 
        try:
            log.debug("Upload keys for {}".format(user_name))
            
            data = {
                'demo_payload': json.dumps(user_bundles[user_name])
            }

            req = requests.post(SIGNUP_URL + '/provision/{}'.format(urllib.quote(user_name)), headers=make_auth_headers(), data=data)

            if req.status_code != 200:
                if req.status_code != 202:
                    log.error("Failed to provision {}: HTTP {} ({})".format(user_name, req.status_code, req.text))
                
        except Exception as e:
            if DEBUG:
                log.exception(e)

            return False

    return True
コード例 #11
0
ファイル: provisioning.py プロジェクト: iychoi/syndicate-core
def make_host_provision_plan(config, sender_privkey_pem, host_pubkey_pem, hostname, volumes, gateway_pkey_generator=default_gateway_pkey_generator):
    """
    Generate a signed host-specific volume and gateway listing.

    @config: client configuration
    @sender_privkey_pem:  sender's private key, to sign the listing
    @host_pubkey_pem:  recipient's public key, to encrypt initial gateway keys and user private keys
    @hostname:  name of host on which the gateways run
    @volumes: list of volume names
    @gateway_pkey_generator: a callback that takes (config, volume name) and returns the gateway's initial public key

    Return a dict with the structure:
    {
        "volume_name": {
            "gateways": {
                "__pkey__":  "encrypted pkey",
                "gateway_name": "gateway_cert_b64",
                "gateway_name": "gateway_cert_b64",
                ...
            },
            "users": {
                user_id: {
                    "pkey": "encrypted pkey",
                    "cert": "user_cert_b64"
                }
                ...
            }
        }
        ...
    }
    where each volume named has only the gateway
    certificates on this particular host.

    Because we create one initial gateway private key
    per volume, we only need to give that singular private
    key back.  The automounter will change the public key
    once it gets the private key.
    """

    ret = {}
    for volume_name in volumes:

        # get the volume ID
        volume_cert = object_stub.load_volume_cert(config, volume_name)
        if volume_cert is None:
            log.error("No such volume '%s'" % volume_name)
            return {}

        volume_id = volume_cert.volume_id

        # find all gateways in this volume, on this host
        gateway_certs = list_volume_gateways_by_host(config, volume_id, hostname)
        if gateway_certs is None:
            log.error("Failed to load gateway certs for '%s'" % volume_name)
            return {}

        if len(gateway_certs) == 0:
            # no relevant gateways
            continue

        # find all associated user certs and their private keys, and serialize them
        user_certs = {}
        serialized_gateway_certs = {}
        for gateway_cert in gateway_certs:

            # user cert
            user_cert = object_stub.load_user_cert(config, gateway_cert.owner_id)
            if user_cert is None:
                log.error("No such user '%s'" % gateway_cert.owner_id)
                return {}

            log.debug("User '%s' owns gateway '%s'" % (user_cert.email, gateway_cert.name))

            # user private key
            user_pkey = storage.load_private_key(config, "user", user_cert.email)
            if user_pkey is None:
                log.error("No such user private key '%s'" % user_cert.email)
                return {}

            user_pkey_pem = user_pkey.exportKey()
            rc, user_pkey_pem_enc = libsyndicate.encrypt_data(sender_privkey_pem, host_pubkey_pem, user_pkey_pem)
            if rc != 0:
                log.error("Failed to encrypt key for '%s', rc = %s" % (user_cert.email, rc))
                return {}

            user_certs[ str(gateway_cert.owner_id) ] = {
                "cert": base64.b64encode(user_cert.SerializeToString()),
                "pkey": base64.b64encode(user_pkey_pem_enc)
            }

            serialized_gateway_certs[gateway_cert.name] = base64.b64encode(gateway_cert.SerializeToString())

        # encrypt the private key for this volume's gateways...
        pkey_pem = gateway_pkey_generator(config, volume_name)
        if pkey_pem is None:
            log.debug("Failed to generate gateway key for '%s'" % volume_name)
            continue

        rc, pkey_pem_enc = libsyndicate.encrypt_data(sender_privkey_pem, host_pubkey_pem, pkey_pem)
        if rc != 0:
            log.error("Failed to encrypt response; rc = %d\n", rc)
            return {}

        serialized_gateway_certs["__pkey__"] = base64.b64encode(pkey_pem_enc)

        # done with this volume
        ret[volume_name] = {
            "gateways": serialized_gateway_certs,
            "users": user_certs
        }

    return ret