Esempio n. 1
0
    def __init__(self,
                 address,
                 nickserver_uri,
                 soledad,
                 token=None,
                 ca_cert_path=None,
                 api_uri=None,
                 api_version=None,
                 uid=None,
                 gpgbinary=None,
                 combined_ca_bundle=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        create = self._create_combined_bundle_file
        try:
            self._combined_ca_bundle = combined_ca_bundle or create()
        except Exception:
            self.log.warn('Error while creating combined ca bundle')
            self._combined_ca_bundle = ''

        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._nicknym = Nicknym(self._nickserver_uri, self._ca_cert_path,
                                self._token)
        self.refresher = None
        self._init_gpg(soledad, gpgbinary)
Esempio n. 2
0
    def __init__(self,
                 url,
                 source_replica_uid,
                 creds,
                 crypto,
                 cert_file,
                 sync_db=None,
                 sync_enc_pool=None):
        """
        Initialize the sync target.

        :param url: The server sync url.
        :type url: str
        :param source_replica_uid: The source replica uid which we use when
                                   deferring decryption.
        :type source_replica_uid: str
        :param creds: A dictionary containing the uuid and token.
        :type creds: creds
        :param crypto: An instance of SoledadCrypto so we can encrypt/decrypt
                        document contents when syncing.
        :type crypto: soledad.crypto.SoledadCrypto
        :param cert_file: Path to the certificate of the ca used to validate
                          the SSL certificate used by the remote soledad
                          server.
        :type cert_file: str
        :param sync_db: Optional. handler for the db with the symmetric
                        encryption of the syncing documents. If
                        None, encryption will be done in-place,
                        instead of retreiving it from the dedicated
                        database.
        :type sync_db: Sqlite handler
        :param sync_enc_pool: The encryption pool to use to defer encryption.
                              If None is passed the encryption will not be
                              deferred.
        :type sync_enc_pool: leap.soledad.client.encdecpool.SyncEncrypterPool
        """
        if url.endswith("/"):
            url = url[:-1]
        self._url = str(url) + "/sync-from/" + str(source_replica_uid)
        self.source_replica_uid = source_replica_uid
        self._auth_header = None
        self.set_creds(creds)
        self._crypto = crypto
        self._sync_db = sync_db
        self._sync_enc_pool = sync_enc_pool
        self._insert_doc_cb = None
        # asynchronous encryption/decryption attributes
        self._decryption_callback = None
        self._sync_decr_pool = None
        self._http = HTTPClient(cert_file)
Esempio n. 3
0
    def __init__(self, domain, autoconf=False, basedir=None, cert_path=None):
        if not basedir:
            basedir = os.path.join(_preffix, 'leap')
        self._basedir = os.path.expanduser(basedir)
        self._domain = domain
        self._disco = Discovery('https://%s' % domain)
        self._provider_config = None

        self.first_bootstrap = defer.Deferred()
        self.stuck_bootstrap = None

        is_configured = self.is_configured()
        if not cert_path and is_configured:
            cert_path = self._get_ca_cert_path()
        self._http = HTTPClient(cert_path)

        self._load_provider_json()

        if not is_configured:
            if autoconf:
                self.log.debug('BOOTSTRAP: provider %s not initialized, '
                               'downloading files...' % domain)
                self.bootstrap()
            else:
                raise NotConfiguredError("Provider %s is not configured" %
                                         (domain, ))
        else:
            self.log.debug(
                'BOOTSTRAP: Provider is already initialized, checking if '
                'there are newest config files...')
            self.bootstrap(replace_if_newer=True)
Esempio n. 4
0
    def __init__(self,
                 address,
                 nickserver_uri,
                 soledad,
                 token=None,
                 ca_cert_path=None,
                 api_uri=None,
                 api_version=None,
                 uid=None,
                 gpgbinary=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        self._openpgp = OpenPGPScheme(soledad, gpgbinary=gpgbinary)
        self._combined_ca_bundle = self._create_combined_bundle_file()
        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._async_client_pinned = HTTPClient(self._ca_cert_path)
Esempio n. 5
0
    def __init__(self, address, nickserver_uri, soledad, token=None,
                 ca_cert_path=None, api_uri=None, api_version=None, uid=None,
                 gpgbinary=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        # a dict to map key types to their handlers
        self._wrapper_map = {
            OpenPGPKey: OpenPGPScheme(soledad, gpgbinary=gpgbinary),
            # other types of key will be added to this mapper.
        }
        # the following are used to perform https requests
        self._fetcher = requests
        self._combined_ca_bundle = self._create_combined_bundle_file()
        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._async_client_pinned = HTTPClient(self._ca_cert_path)
Esempio n. 6
0
    def __init__(self, url, source_replica_uid, creds, crypto, cert_file,
                 sync_db=None, sync_enc_pool=None):
        """
        Initialize the sync target.

        :param url: The server sync url.
        :type url: str
        :param source_replica_uid: The source replica uid which we use when
                                   deferring decryption.
        :type source_replica_uid: str
        :param creds: A dictionary containing the uuid and token.
        :type creds: creds
        :param crypto: An instance of SoledadCrypto so we can encrypt/decrypt
                        document contents when syncing.
        :type crypto: soledad.crypto.SoledadCrypto
        :param cert_file: Path to the certificate of the ca used to validate
                          the SSL certificate used by the remote soledad
                          server.
        :type cert_file: str
        :param sync_db: Optional. handler for the db with the symmetric
                        encryption of the syncing documents. If
                        None, encryption will be done in-place,
                        instead of retreiving it from the dedicated
                        database.
        :type sync_db: Sqlite handler
        :param verify_ssl: Whether we should perform SSL server certificate
                           verification.
        :type verify_ssl: bool
        """
        if url.endswith("/"):
            url = url[:-1]
        self._url = str(url) + "/sync-from/" + str(source_replica_uid)
        self.source_replica_uid = source_replica_uid
        self._auth_header = None
        self.set_creds(creds)
        self._crypto = crypto
        self._sync_db = sync_db
        self._sync_enc_pool = sync_enc_pool
        self._insert_doc_cb = None
        # asynchronous encryption/decryption attributes
        self._decryption_callback = None
        self._sync_decr_pool = None
        self._http = HTTPClient(cert_file)
Esempio n. 7
0
 def _reload_http_client(self, ret):
     self._http = HTTPClient(self._get_ca_cert_path())
     return ret
Esempio n. 8
0
class SoledadHTTPSyncTarget(SyncTarget):
    """
    A SyncTarget that encrypts data before sending and decrypts data after
    receiving.

    Normally encryption will have been written to the sync database upon
    document modification. The sync database is also used to write temporarily
    the parsed documents that the remote send us, before being decrypted and
    written to the main database.
    """
    def __init__(self,
                 url,
                 source_replica_uid,
                 creds,
                 crypto,
                 cert_file,
                 sync_db=None,
                 sync_enc_pool=None):
        """
        Initialize the sync target.

        :param url: The server sync url.
        :type url: str
        :param source_replica_uid: The source replica uid which we use when
                                   deferring decryption.
        :type source_replica_uid: str
        :param creds: A dictionary containing the uuid and token.
        :type creds: creds
        :param crypto: An instance of SoledadCrypto so we can encrypt/decrypt
                        document contents when syncing.
        :type crypto: soledad.crypto.SoledadCrypto
        :param cert_file: Path to the certificate of the ca used to validate
                          the SSL certificate used by the remote soledad
                          server.
        :type cert_file: str
        :param sync_db: Optional. handler for the db with the symmetric
                        encryption of the syncing documents. If
                        None, encryption will be done in-place,
                        instead of retreiving it from the dedicated
                        database.
        :type sync_db: Sqlite handler
        :param sync_enc_pool: The encryption pool to use to defer encryption.
                              If None is passed the encryption will not be
                              deferred.
        :type sync_enc_pool: leap.soledad.client.encdecpool.SyncEncrypterPool
        """
        if url.endswith("/"):
            url = url[:-1]
        self._url = str(url) + "/sync-from/" + str(source_replica_uid)
        self.source_replica_uid = source_replica_uid
        self._auth_header = None
        self.set_creds(creds)
        self._crypto = crypto
        self._sync_db = sync_db
        self._sync_enc_pool = sync_enc_pool
        self._insert_doc_cb = None
        # asynchronous encryption/decryption attributes
        self._decryption_callback = None
        self._sync_decr_pool = None
        self._http = HTTPClient(cert_file)

    def close(self):
        self._http.close()

    def set_creds(self, creds):
        """
        Update credentials.

        :param creds: A dictionary containing the uuid and token.
        :type creds: dict
        """
        uuid = creds['token']['uuid']
        token = creds['token']['token']
        auth = '%s:%s' % (uuid, token)
        b64_token = base64.b64encode(auth)
        self._auth_header = {'Authorization': ['Token %s' % b64_token]}

    @property
    def _defer_encryption(self):
        return self._sync_enc_pool is not None

    #
    # SyncTarget API
    #

    @defer.inlineCallbacks
    def get_sync_info(self, source_replica_uid):
        """
        Return information about known state of remote database.

        Return the replica_uid and the current database generation of the
        remote database, and its last-seen database generation for the client
        replica.

        :param source_replica_uid: The client-size replica uid.
        :type source_replica_uid: str

        :return: A deferred which fires with (target_replica_uid,
                 target_replica_generation, target_trans_id,
                 source_replica_last_known_generation,
                 source_replica_last_known_transaction_id)
        :rtype: twisted.internet.defer.Deferred
        """
        raw = yield self._http_request(self._url, headers=self._auth_header)
        res = json.loads(raw)
        defer.returnValue(
            (res['target_replica_uid'], res['target_replica_generation'],
             res['target_replica_transaction_id'],
             res['source_replica_generation'], res['source_transaction_id']))

    def record_sync_info(self, source_replica_uid, source_replica_generation,
                         source_replica_transaction_id):
        """
        Record tip information for another replica.

        After sync_exchange has been processed, the caller will have
        received new content from this replica. This call allows the
        source replica instigating the sync to inform us what their
        generation became after applying the documents we returned.

        This is used to allow future sync operations to not need to repeat data
        that we just talked about. It also means that if this is called at the
        wrong time, there can be database records that will never be
        synchronized.

        :param source_replica_uid: The identifier for the source replica.
        :type source_replica_uid: str
        :param source_replica_generation: The database generation for the
                                          source replica.
        :type source_replica_generation: int
        :param source_replica_transaction_id: The transaction id associated
                                              with the source replica
                                              generation.
        :type source_replica_transaction_id: str

        :return: A deferred which fires with the result of the query.
        :rtype: twisted.internet.defer.Deferred
        """
        data = json.dumps({
            'generation': source_replica_generation,
            'transaction_id': source_replica_transaction_id
        })
        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/json']})
        return self._http_request(self._url,
                                  method='PUT',
                                  headers=headers,
                                  body=data)

    @defer.inlineCallbacks
    def sync_exchange(self,
                      docs_by_generation,
                      source_replica_uid,
                      last_known_generation,
                      last_known_trans_id,
                      insert_doc_cb,
                      ensure_callback=None,
                      defer_decryption=True,
                      sync_id=None):
        """
        Find out which documents the remote database does not know about,
        encrypt and send them. After that, receive documents from the remote
        database.

        :param docs_by_generations: A list of (doc_id, generation, trans_id)
                                    of local documents that were changed since
                                    the last local generation the remote
                                    replica knows about.
        :type docs_by_generations: list of tuples

        :param source_replica_uid: The uid of the source replica.
        :type source_replica_uid: str

        :param last_known_generation: Target's last known generation.
        :type last_known_generation: int

        :param last_known_trans_id: Target's last known transaction id.
        :type last_known_trans_id: str

        :param insert_doc_cb: A callback for inserting received documents from
                              target. If not overriden, this will call u1db
                              insert_doc_from_target in synchronizer, which
                              implements the TAKE OTHER semantics.
        :type insert_doc_cb: function

        :param ensure_callback: A callback that ensures we know the target
                                replica uid if the target replica was just
                                created.
        :type ensure_callback: function

        :param defer_decryption: Whether to defer the decryption process using
                                 the intermediate database. If False,
                                 decryption will be done inline.
        :type defer_decryption: bool

        :return: A deferred which fires with the new generation and
                 transaction id of the target replica.
        :rtype: twisted.internet.defer.Deferred
        """

        self._ensure_callback = ensure_callback

        if sync_id is None:
            sync_id = str(uuid4())
        self.source_replica_uid = source_replica_uid

        # save a reference to the callback so we can use it after decrypting
        self._insert_doc_cb = insert_doc_cb

        gen_after_send, trans_id_after_send = yield self._send_docs(
            docs_by_generation, last_known_generation, last_known_trans_id,
            sync_id)

        cur_target_gen, cur_target_trans_id = yield self._receive_docs(
            last_known_generation,
            last_known_trans_id,
            ensure_callback,
            sync_id,
            defer_decryption=defer_decryption)

        # update gen and trans id info in case we just sent and did not
        # receive docs.
        if gen_after_send is not None and gen_after_send > cur_target_gen:
            cur_target_gen = gen_after_send
            cur_target_trans_id = trans_id_after_send

        defer.returnValue([cur_target_gen, cur_target_trans_id])

    #
    # methods to send docs
    #

    def _prepare(self, comma, entries, **dic):
        entry = comma + '\r\n' + json.dumps(dic)
        entries.append(entry)
        return len(entry)

    @defer.inlineCallbacks
    def _send_docs(self, docs_by_generation, last_known_generation,
                   last_known_trans_id, sync_id):

        if not docs_by_generation:
            defer.returnValue([None, None])

        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/x-soledad-sync-put']})
        # add remote replica metadata to the request
        first_entries = ['[']
        self._prepare('',
                      first_entries,
                      last_known_generation=last_known_generation,
                      last_known_trans_id=last_known_trans_id,
                      sync_id=sync_id,
                      ensure=self._ensure_callback is not None)
        idx = 0
        total = len(docs_by_generation)
        for doc, gen, trans_id in docs_by_generation:
            idx += 1
            result = yield self._send_one_doc(headers, first_entries, doc, gen,
                                              trans_id, total, idx)
            if self._defer_encryption:
                self._sync_enc_pool.delete_encrypted_doc(doc.doc_id, doc.rev)

            msg = "%d/%d" % (idx, total)
            content = {'sent': idx, 'total': total}
            emit(SOLEDAD_SYNC_SEND_STATUS, content)
            logger.debug("Sync send status: %s" % msg)

        response_dict = json.loads(result)[0]
        gen_after_send = response_dict['new_generation']
        trans_id_after_send = response_dict['new_transaction_id']
        defer.returnValue([gen_after_send, trans_id_after_send])

    @defer.inlineCallbacks
    def _send_one_doc(self, headers, first_entries, doc, gen, trans_id,
                      number_of_docs, doc_idx):
        entries = first_entries[:]
        # add the document to the request
        content = yield self._encrypt_doc(doc)
        self._prepare(',',
                      entries,
                      id=doc.doc_id,
                      rev=doc.rev,
                      content=content,
                      gen=gen,
                      trans_id=trans_id,
                      number_of_docs=number_of_docs,
                      doc_idx=doc_idx)
        entries.append('\r\n]')
        data = ''.join(entries)
        result = yield self._http_request(self._url,
                                          method='POST',
                                          headers=headers,
                                          body=data)
        defer.returnValue(result)

    def _encrypt_doc(self, doc):
        d = None
        if doc.is_tombstone():
            d = defer.succeed(None)
        elif not self._defer_encryption:
            # fallback case, for tests
            d = defer.succeed(self._crypto.encrypt_doc(doc))
        else:

            def _maybe_encrypt_doc_inline(doc_json):
                if doc_json is None:
                    # the document is not marked as tombstone, but we got
                    # nothing from the sync db. As it is not encrypted
                    # yet, we force inline encryption.
                    return self._crypto.encrypt_doc(doc)
                return doc_json

            d = self._sync_enc_pool.get_encrypted_doc(doc.doc_id, doc.rev)
            d.addCallback(_maybe_encrypt_doc_inline)
        return d

    #
    # methods to receive doc
    #

    @defer.inlineCallbacks
    def _receive_docs(self, last_known_generation, last_known_trans_id,
                      ensure_callback, sync_id, defer_decryption):

        self._queue_for_decrypt = defer_decryption \
            and self._sync_db is not None

        new_generation = last_known_generation
        new_transaction_id = last_known_trans_id

        if self._queue_for_decrypt:
            logger.debug(
                "Soledad sync: will queue received docs for decrypting.")

        if defer_decryption:
            self._setup_sync_decr_pool()

        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/x-soledad-sync-get']})

        # ---------------------------------------------------------------------
        # maybe receive the first document
        # ---------------------------------------------------------------------

        # we fetch the first document before fetching the rest because we need
        # to know the total number of documents to be received, and this
        # information comes as metadata to each request.

        doc = yield self._receive_one_doc(headers, last_known_generation,
                                          last_known_trans_id, sync_id, 0)
        self._received_docs = 0
        number_of_changes, ngen, ntrans = self._insert_received_doc(doc, 1, 1)

        # update the target gen and trans_id in case a document was received
        if ngen:
            new_generation = ngen
            new_transaction_id = ntrans

        if defer_decryption:
            self._sync_decr_pool.start(number_of_changes)

        # ---------------------------------------------------------------------
        # maybe receive the rest of the documents
        # ---------------------------------------------------------------------

        # launch many asynchronous fetches and inserts of received documents
        # in the temporary sync db. Will wait for all results before
        # continuing.

        received = 1
        deferreds = []
        while received < number_of_changes:
            d = self._receive_one_doc(headers, last_known_generation,
                                      last_known_trans_id, sync_id, received)
            d.addCallback(
                self._insert_received_doc,
                received + 1,  # the index of the current received doc
                number_of_changes)
            deferreds.append(d)
            received += 1
        results = yield defer.gatherResults(deferreds)

        # get generation and transaction id of target after insertions
        if deferreds:
            _, new_generation, new_transaction_id = results.pop()

        # ---------------------------------------------------------------------
        # wait for async decryption to finish
        # ---------------------------------------------------------------------

        if defer_decryption:
            yield self._sync_decr_pool.deferred
            self._sync_decr_pool.stop()

        defer.returnValue([new_generation, new_transaction_id])

    def _receive_one_doc(self, headers, last_known_generation,
                         last_known_trans_id, sync_id, received):
        entries = ['[']
        # add remote replica metadata to the request
        self._prepare('',
                      entries,
                      last_known_generation=last_known_generation,
                      last_known_trans_id=last_known_trans_id,
                      sync_id=sync_id,
                      ensure=self._ensure_callback is not None)
        # inform server of how many documents have already been received
        self._prepare(',', entries, received=received)
        entries.append('\r\n]')
        # send headers
        return self._http_request(self._url,
                                  method='POST',
                                  headers=headers,
                                  body=''.join(entries))

    def _insert_received_doc(self, response, idx, total):
        """
        Insert a received document into the local replica.

        :param response: The body and headers of the response.
        :type response: tuple(str, dict)
        :param idx: The index count of the current operation.
        :type idx: int
        :param total: The total number of operations.
        :type total: int
        """
        new_generation, new_transaction_id, number_of_changes, doc_id, \
            rev, content, gen, trans_id = \
            self._parse_received_doc_response(response)
        if doc_id is not None:
            # decrypt incoming document and insert into local database
            # -------------------------------------------------------------
            # symmetric decryption of document's contents
            # -------------------------------------------------------------
            # If arriving content was symmetrically encrypted, we decrypt it.
            # We do it inline if defer_decryption flag is False or no sync_db
            # was defined, otherwise we defer it writing it to the received
            # docs table.
            doc = SoledadDocument(doc_id, rev, content)
            if is_symmetrically_encrypted(doc):
                if self._queue_for_decrypt:
                    self._sync_decr_pool.insert_encrypted_received_doc(
                        doc.doc_id, doc.rev, doc.content, gen, trans_id, idx)
                else:
                    # defer_decryption is False or no-sync-db fallback
                    doc.set_json(self._crypto.decrypt_doc(doc))
                    self._insert_doc_cb(doc, gen, trans_id)
            else:
                # not symmetrically encrypted doc, insert it directly
                # or save it in the decrypted stage.
                if self._queue_for_decrypt:
                    self._sync_decr_pool.insert_received_doc(
                        doc.doc_id, doc.rev, doc.content, gen, trans_id, idx)
                else:
                    self._insert_doc_cb(doc, gen, trans_id)
            # -------------------------------------------------------------
            # end of symmetric decryption
            # -------------------------------------------------------------
        self._received_docs += 1
        msg = "%d/%d" % (self._received_docs, total)
        content = {'received': self._received_docs, 'total': total}
        emit(SOLEDAD_SYNC_RECEIVE_STATUS, content)
        logger.debug("Sync receive status: %s" % msg)
        return number_of_changes, new_generation, new_transaction_id

    def _parse_received_doc_response(self, response):
        """
        Parse the response from the server containing the received document.

        :param response: The body and headers of the response.
        :type response: tuple(str, dict)

        :return: (new_gen, new_trans_id, number_of_changes, doc_id, rev,
                 content, gen, trans_id)
        :rtype: tuple
        """
        # decode incoming stream
        parts = response.splitlines()
        if not parts or parts[0] != '[' or parts[-1] != ']':
            raise errors.BrokenSyncStream
        data = parts[1:-1]
        # decode metadata
        try:
            line, comma = utils.check_and_strip_comma(data[0])
            metadata = None
        except (IndexError):
            raise errors.BrokenSyncStream
        try:
            metadata = json.loads(line)
            new_generation = metadata['new_generation']
            new_transaction_id = metadata['new_transaction_id']
            number_of_changes = metadata['number_of_changes']
        except (ValueError, KeyError):
            raise errors.BrokenSyncStream
        # make sure we have replica_uid from fresh new dbs
        if self._ensure_callback and 'replica_uid' in metadata:
            self._ensure_callback(metadata['replica_uid'])
        # parse incoming document info
        doc_id = None
        rev = None
        content = None
        gen = None
        trans_id = None
        if number_of_changes > 0:
            try:
                entry = json.loads(data[1])
                doc_id = entry['id']
                rev = entry['rev']
                content = entry['content']
                gen = entry['gen']
                trans_id = entry['trans_id']
            except (IndexError, KeyError):
                raise errors.BrokenSyncStream
        return new_generation, new_transaction_id, number_of_changes, \
            doc_id, rev, content, gen, trans_id

    def _setup_sync_decr_pool(self):
        """
        Set up the SyncDecrypterPool for deferred decryption.
        """
        if self._sync_decr_pool is None and self._sync_db is not None:
            # initialize syncing queue decryption pool
            self._sync_decr_pool = SyncDecrypterPool(
                self._crypto,
                self._sync_db,
                insert_doc_cb=self._insert_doc_cb,
                source_replica_uid=self.source_replica_uid)

    def _http_request(self, url, method='GET', body=None, headers={}):
        d = self._http.request(url, method, body, headers, readBody)
        d.addErrback(_unauth_to_invalid_token_error)
        return d
Esempio n. 9
0
 def __init__(self, nickserver_uri, ca_cert_path, token):
     self._nickserver_uri = nickserver_uri
     self._async_client_pinned = HTTPClient(ca_cert_path)
     self.token = token
Esempio n. 10
0
class Nicknym(object):
    """
    Responsible for communication to the nicknym server.
    """

    log = Logger()

    PUBKEY_KEY = "user[public_key]"

    def __init__(self, nickserver_uri, ca_cert_path, token):
        self._nickserver_uri = nickserver_uri
        self._async_client_pinned = HTTPClient(ca_cert_path)
        self.token = token

    @defer.inlineCallbacks
    def put_key(self, uid, key_data, api_uri, api_version):
        """
        Send a PUT request to C{uri} containing C{data}.

        The request will be sent using the configured CA certificate path to
        verify the server certificate and the configured session id for
        authentication.

        :param uid: The URI of the request.
        :type uid: str
        :param key_data: The body of the request.
        :type key_data: dict, str or file

        :return: A deferred that will be fired when PUT request finishes
        :rtype: Deferred
        """
        data = {self.PUBKEY_KEY: key_data}

        uri = "%s/%s/users/%s.json" % (api_uri, api_version, uid)

        leap_assert(self.token is not None,
                    'We need a token to interact with webapp!')
        if type(data) == dict:
            data = urllib.urlencode(data)
        headers = {'Authorization': [str('Token token=%s' % self.token)]}
        headers['Content-Type'] = ['application/x-www-form-urlencoded']
        try:
            res = yield self._async_client_pinned.request(str(uri),
                                                          'PUT',
                                                          body=str(data),
                                                          headers=headers)
        except Exception as e:
            self.log.warn('Error uploading key: %r' % (e, ))
            raise e
        if 'error' in res:
            # FIXME: That's a workaround for 500,
            # we need to implement a readBody to assert response code
            self.log.warn('Error uploading key: %r' % (res, ))
            raise Exception(res)

    @defer.inlineCallbacks
    def _get_key_from_nicknym(self, uri):
        """
        Send a GET request to C{uri} containing C{data}.

        :param uri: The URI of the request.
        :type uri: str

        :return: A deferred that will be fired with GET content as json (dict)
        :rtype: Deferred
        """
        try:
            content = yield self._fetch_and_handle_404_from_nicknym(uri)
            json_content = json.loads(content)
        except KeyNotFound:
            raise
        except IOError as e:
            self.log.warn('HTTP error retrieving key: %r' % (e, ))
            self.log.warn("%s" % (content, ))
            raise KeyNotFound(e.message), None, sys.exc_info()[2]
        except ValueError as v:
            self.log.warn('Invalid JSON data from key: %s' % (uri, ))
            raise KeyNotFound(v.message + ' - ' + uri), None, sys.exc_info()[2]
        except Exception as e:
            self.log.warn('Error retrieving key: %r' % (e, ))
            raise KeyNotFound(e.message), None, sys.exc_info()[2]
        # Responses are now text/plain, although it's json anyway, but
        # this will fail when it shouldn't
        # leap_assert(
        #     res.headers['content-type'].startswith('application/json'),
        #     'Content-type is not JSON.')
        defer.returnValue(json_content)

    def _fetch_and_handle_404_from_nicknym(self, uri):
        """
        Send a GET request to C{uri} containing C{data}.

        :param uri: The URI of the request.
        :type uri: str

        :return: A deferred that will be fired with GET content as json (dict)
        :rtype: Deferred
        """
        def check_404(response):
            if response.code == NOT_FOUND:
                message = ' %s: Key not found. Request: %s' \
                          % (response.code, uri)
                self.log.warn(message)
                raise KeyNotFound(message), None, sys.exc_info()[2]
            return response

        d = self._async_client_pinned.request(str(uri),
                                              'GET',
                                              callback=check_404)
        d.addCallback(client.readBody)
        return d

    @memoized_method(invalidation=300)
    def fetch_key_with_address(self, address):
        """
        Fetch keys bound to address from nickserver.

        :param address: The address bound to the keys.
        :type address: str

        :return: A Deferred which fires when the key is in the storage,
                 or which fails with KeyNotFound if the key was not found on
                 nickserver.
        :rtype: Deferred

        """
        return self._get_key_from_nicknym(self._nickserver_uri + '?address=' +
                                          address)

    @memoized_method(invalidation=300)
    def fetch_key_with_fingerprint(self, fingerprint):
        """
        Fetch keys bound to fingerprint from nickserver.

        :param fingerprint: The fingerprint bound to the keys.
        :type fingerprint: str

        :return: A Deferred which fires when the key is in the storage,
                 or which fails with KeyNotFound if the key was not found on
                 nickserver.
        :rtype: Deferred

        """
        return self._get_key_from_nicknym(self._nickserver_uri +
                                          '?fingerprint=' + fingerprint)
Esempio n. 11
0
class KeyManager(object):

    #
    # server's key storage constants
    #

    log = Logger()

    OPENPGP_KEY = 'openpgp'
    PUBKEY_KEY = "user[public_key]"

    def __init__(self,
                 address,
                 nickserver_uri,
                 soledad,
                 token=None,
                 ca_cert_path=None,
                 api_uri=None,
                 api_version=None,
                 uid=None,
                 gpgbinary=None,
                 combined_ca_bundle=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        create = self._create_combined_bundle_file
        try:
            self._combined_ca_bundle = combined_ca_bundle or create()
        except Exception:
            self.log.warn('Error while creating combined ca bundle')
            self._combined_ca_bundle = ''

        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._nicknym = Nicknym(self._nickserver_uri, self._ca_cert_path,
                                self._token)
        self.refresher = None
        self._init_gpg(soledad, gpgbinary)

    #
    # utilities
    #

    def _init_gpg(self, soledad, gpgbinary):
        self._openpgp = OpenPGPScheme(soledad, gpgbinary=gpgbinary)

    def start_refresher(self):
        self.refresher = RandomRefreshPublicKey(self._openpgp, self)
        self.refresher.start()

    def stop_refresher(self):
        self.refresher.stop()

    def _create_combined_bundle_file(self):
        leap_ca_bundle = ca_bundle.where()

        if self._ca_cert_path == leap_ca_bundle:
            return self._ca_cert_path  # don't merge file with itself
        elif not self._ca_cert_path:
            return leap_ca_bundle

        tmp_file = tempfile.NamedTemporaryFile(delete=False)

        with open(tmp_file.name, 'w') as fout:
            fin = fileinput.input(files=(leap_ca_bundle, self._ca_cert_path))
            for line in fin:
                fout.write(line)
            fin.close()

        return tmp_file.name

    @defer.inlineCallbacks
    def _get_key_from_nicknym(self, address):
        """
        Send a GET request to C{uri} containing C{data}.

        :param address: The URI of the request.
        :type address: str

        :return: A deferred that will be fired with GET content as json (dict)
        :rtype: Deferred
        """
        try:
            uri = self._nickserver_uri + '?address=' + address
            content = yield self._fetch_and_handle_404_from_nicknym(
                uri, address)
            json_content = json.loads(content)

        except keymanager_errors.KeyNotFound:
            raise
        except IOError as e:
            self.log.warn("HTTP error retrieving key: %r" % (e, ))
            self.log.warn("%s" % (content, ))
            raise keymanager_errors.KeyNotFound(e.message), \
                None, sys.exc_info()[2]
        except ValueError as v:
            self.log.warn("Invalid JSON data from key: %s" % (uri, ))
            raise keymanager_errors.KeyNotFound(v.message + ' - ' + uri), \
                None, sys.exc_info()[2]

        except Exception as e:
            self.log.warn("Error retrieving key: %r" % (e, ))
            raise keymanager_errors.KeyNotFound(e.message), \
                None, sys.exc_info()[2]
        # Responses are now text/plain, although it's json anyway, but
        # this will fail when it shouldn't
        # leap_assert(
        #     res.headers['content-type'].startswith('application/json'),
        #     'Content-type is not JSON.')
        defer.returnValue(json_content)

    def _fetch_and_handle_404_from_nicknym(self, uri, address):
        """
        Send a GET request to C{uri} containing C{data}.

        :param uri: The URI of the request.
        :type uri: str
        :param address: The email corresponding to the key.
        :type address: str

        :return: A deferred that will be fired with GET content as json (dict)
        :rtype: Deferred
        """
        def check_404(response):
            if response.code == NOT_FOUND:
                message = '%s: %s key not found.' % (response.code, address)
                self.log.warn(message)
                raise KeyNotFound(message), None, sys.exc_info()[2]
            return response

        d = self._nicknym._async_client_pinned.request(str(uri),
                                                       'GET',
                                                       callback=check_404)
        d.addCallback(client.readBody)
        return d

    @defer.inlineCallbacks
    def _get_with_combined_ca_bundle(self, uri, data=None):
        """
        Send a GET request to C{uri} containing C{data}.

        Instead of using the ca_cert provided on construction time, this
        version also uses the default certificates shipped with leap.common

        :param uri: The URI of the request.
        :type uri: str
        :param data: The body of the request.
        :type data: dict, str or file

        :return: A deferred that will be fired with the GET response
        :rtype: Deferred
        """
        try:
            content = yield self._async_client.request(str(uri), 'GET')
        except Exception as e:
            self.log.warn("There was a problem fetching key: %s" % (e, ))
            raise keymanager_errors.KeyNotFound(uri)
        if not content:
            raise keymanager_errors.KeyNotFound(uri)
        defer.returnValue(content)

    #
    # key management
    #

    def send_key(self):
        """
        Send user's key to provider.

        Public key bound to user's is sent to provider, which will sign it and
        replace any prior keys for the same address in its database.

        :return: A Deferred which fires when the key is sent, or which fails
                 with KeyNotFound if the key was not found in local database.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def send(pubkey):
            d = self._nicknym.put_key(self.uid, pubkey.key_data, self._api_uri,
                                      self._api_version)
            d.addCallback(lambda _: emit_async(
                catalog.KEYMANAGER_DONE_UPLOADING_KEYS, self._address))
            return d

        d = self.get_key(self._address, private=False, fetch_remote=False)
        d.addCallback(send)
        return d

    @defer.inlineCallbacks
    def _fetch_keys_from_server_and_store_local(self, address):
        """
        Fetch keys  from nickserver and insert them in locale database.

        :param address: The address bound to the keys.
        :type address: str

        :return: A Deferred which fires when the key is in the storage,
                     or which fails with KeyNotFound if the key was not
                     found on nickserver.
        :rtype: Deferred

        """
        server_keys = yield self._nicknym.fetch_key_with_address(address)

        # insert keys in local database
        if self.OPENPGP_KEY in server_keys:
            # nicknym server is authoritative for its own domain,
            # for other domains the key might come from key servers.
            validation_level = ValidationLevels.Weak_Chain
            _, domain = _split_email(address)
            if (domain == _get_domain(self._nickserver_uri)):
                validation_level = ValidationLevels.Provider_Trust

        yield self.put_raw_key(server_keys['openpgp'],
                               address=address,
                               validation=validation_level)

    def get_key(self, address, private=False, fetch_remote=True):
        """
        Return a key bound to address.

        First, search for the key in local storage. If it is not available,
        then try to fetch from nickserver.

        :param address: The address bound to the key.
        :type address: str
        :param private: Look for a private key instead of a public one?
        :type private: bool
        :param fetch_remote: If key not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with an EncryptionKey bound to address,
                 or which fails with KeyNotFound if no key was found neither
                 locally or in keyserver or fail with KeyVersionError if the
                 key has a format not supported by this version of KeyManager
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self.log.debug('Getting key for %s' % (address, ))
        emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)

        def key_found(key):
            emit_async(catalog.KEYMANAGER_KEY_FOUND, address)
            return key

        def key_not_found(failure):
            if not failure.check(keymanager_errors.KeyNotFound):
                return failure

            emit_async(catalog.KEYMANAGER_KEY_NOT_FOUND, address)

            # we will only try to fetch a key from nickserver if fetch_remote
            # is True and the key is not private.
            if fetch_remote is False or private is True:
                return failure

            emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)
            d = self._fetch_keys_from_server_and_store_local(address)
            d.addCallback(
                lambda _: self._openpgp.get_key(address, private=False))
            d.addCallback(key_found)
            return d

        # return key if it exists in local database
        d = self._openpgp.get_key(address, private=private)
        d.addCallbacks(key_found, key_not_found)
        return d

    def get_all_keys(self, private=False):
        """
        Return all keys stored in local database.

        :param private: Include private keys
        :type private: bool

        :return: A Deferred which fires with a list of all keys in local db.
        :rtype: Deferred
        """
        return self._openpgp.get_all_keys(private)

    def gen_key(self):
        """
        Generate a key bound to the user's address.

        :return: A Deferred which fires with the generated EncryptionKey.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def signal_finished(key):
            emit_async(catalog.KEYMANAGER_FINISHED_KEY_GENERATION,
                       self._address)
            return key

        emit_async(catalog.KEYMANAGER_STARTED_KEY_GENERATION, self._address)

        d = self._openpgp.gen_key(self._address)
        d.addCallback(signal_finished)
        return d

    #
    # Setters/getters
    #

    def _get_token(self):
        return self._token

    def _set_token(self, token):
        self._token = token
        self._nicknym.token = token

    token = property(_get_token, _set_token, doc='The session token.')

    def _get_ca_cert_path(self):
        return self._ca_cert_path

    def _set_ca_cert_path(self, ca_cert_path):
        self._ca_cert_path = ca_cert_path

    ca_cert_path = property(_get_ca_cert_path,
                            _set_ca_cert_path,
                            doc='The path to the CA certificate.')

    def _get_api_uri(self):
        return self._api_uri

    def _set_api_uri(self, api_uri):
        self._api_uri = api_uri

    api_uri = property(_get_api_uri, _set_api_uri, doc='The webapp API URI.')

    def _get_api_version(self):
        return self._api_version

    def _set_api_version(self, api_version):
        self._api_version = api_version

    api_version = property(_get_api_version,
                           _set_api_version,
                           doc='The webapp API version.')

    def _get_uid(self):
        return self._uid

    def _set_uid(self, uid):
        self._uid = uid

    uid = property(_get_uid, _set_uid, doc='The uid of the user.')

    #
    # encrypt/decrypt and sign/verify API
    #

    def encrypt(self,
                data,
                address,
                passphrase=None,
                sign=None,
                cipher_algo='AES256',
                fetch_remote=True):
        """
        Encrypt data with the public key bound to address and sign with with
        the private key bound to sign address.

        :param data: The data to be encrypted.
        :type data: str
        :param address: The address to encrypt it for.
        :type address: str
        :param passphrase: The passphrase for the secret key used for the
                           signature.
        :type passphrase: str
        :param sign: The address to be used for signature.
        :type sign: str
        :param cipher_algo: The cipher algorithm to use.
        :type cipher_algo: str
        :param fetch_remote: If key is not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the encrypted data as str, or
                 which fails with KeyNotFound if no keys were found neither
                 locally or in keyserver or fails with KeyVersionError if the
                 key format is not supported or fails with EncryptError if
                 failed encrypting for some reason.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        @defer.inlineCallbacks
        def encrypt(keys):
            pubkey, signkey = keys
            encrypted = yield self._openpgp.encrypt(data,
                                                    pubkey,
                                                    passphrase,
                                                    sign=signkey,
                                                    cipher_algo=cipher_algo)
            if not pubkey.encr_used:
                pubkey.encr_used = True
                yield self._openpgp.put_key(pubkey)
            defer.returnValue(encrypted)

        dpub = self.get_key(address, private=False, fetch_remote=fetch_remote)
        dpriv = defer.succeed(None)
        if sign is not None:
            dpriv = self.get_key(sign, private=True)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(encrypt, self._extract_first_error)
        return d

    def decrypt(self,
                data,
                address,
                passphrase=None,
                verify=None,
                fetch_remote=True):
        """
        Decrypt data using private key from address and verify with public key
        bound to verify address.

        :param data: The data to be decrypted.
        :type data: str
        :param address: The address to whom data was encrypted.
        :type address: str
        :param passphrase: The passphrase for the secret key used for
                           decryption.
        :type passphrase: str
        :param verify: The address to be used for signature.
        :type verify: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with:
            * (decripted str, signing key) if validation works
            * (decripted str, KeyNotFound) if signing key not found
            * (decripted str, InvalidSignature) if signature is invalid
            * KeyNotFound failure if private key not found
            * DecryptError failure if decription failed
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        @defer.inlineCallbacks
        def decrypt(keys):
            pubkey, privkey = keys
            decrypted, signed = yield self._openpgp.decrypt(
                data, privkey, passphrase=passphrase, verify=pubkey)
            if pubkey is None:
                signature = keymanager_errors.KeyNotFound(verify)
            elif signed:
                signature = pubkey
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    yield self._openpgp.put_key(pubkey)
                    defer.returnValue((decrypted, signature))
            else:
                signature = keymanager_errors.InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint, ))
            defer.returnValue((decrypted, signature))

        dpriv = self.get_key(address, private=True)
        dpub = defer.succeed(None)
        if verify is not None:
            dpub = self.get_key(verify,
                                private=False,
                                fetch_remote=fetch_remote)
            dpub.addErrback(lambda f: None
                            if f.check(keymanager_errors.KeyNotFound) else f)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(decrypt, self._extract_first_error)
        return d

    def _extract_first_error(self, failure):
        return failure.value.subFailure

    def sign(self,
             data,
             address,
             digest_algo='SHA512',
             clearsign=False,
             detach=True,
             binary=False):
        """
        Sign data with private key bound to address.

        :param data: The data to be signed.
        :type data: str
        :param address: The address to be used to sign.
        :type address: EncryptionKey
        :param digest_algo: The hash digest to use.
        :type digest_algo: str
        :param clearsign: If True, create a cleartext signature.
        :type clearsign: bool
        :param detach: If True, create a detached signature.
        :type detach: bool
        :param binary: If True, do not ascii armour the output.
        :type binary: bool

        :return: A Deferred which fires with the signed data as str or fails
                 with KeyNotFound if no key was found neither locally or in
                 keyserver or fails with SignFailed if there was any error
                 signing.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def sign(privkey):
            return self._openpgp.sign(data,
                                      privkey,
                                      digest_algo=digest_algo,
                                      clearsign=clearsign,
                                      detach=detach,
                                      binary=binary)

        d = self.get_key(address, private=True)
        d.addCallback(sign)
        return d

    def verify(self, data, address, detached_sig=None, fetch_remote=True):
        """
        Verify signed data with private key bound to address, eventually using
        detached_sig.

        :param data: The data to be verified.
        :type data: str
        :param address: The address to be used to verify.
        :type address: EncryptionKey
        :param detached_sig: A detached signature. If given, C{data} is
                             verified using this detached signature.
        :type detached_sig: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the signing EncryptionKey if
                 signature verifies, or which fails with InvalidSignature if
                 signature don't verifies or fails with KeyNotFound if no key
                 was found neither locally or in keyserver.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def verify(pubkey):
            signed = self._openpgp.verify(data,
                                          pubkey,
                                          detached_sig=detached_sig)
            if signed:
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    d = self._openpgp.put_key(pubkey)
                    d.addCallback(lambda _: pubkey)
                    return d
                return pubkey
            else:
                raise keymanager_errors.InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint, ))

        d = self.get_key(address, private=False, fetch_remote=fetch_remote)
        d.addCallback(verify)
        return d

    def delete_key(self, key):
        """
        Remove key from storage.

        :param key: The key to be removed.
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is deleted, or which fails
                 KeyNotFound if the key was not found on local storage.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        return self._openpgp.delete_key(key)

    def put_key(self, key):
        """
        Put key bound to address in local storage.

        :param key: The key to be stored
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def old_key_not_found(failure):
            if failure.check(keymanager_errors.KeyNotFound):
                return None
            else:
                return failure

        def check_upgrade(old_key):
            if key.private or can_upgrade(key, old_key):
                return self._openpgp.put_key(key)
            else:
                raise keymanager_errors.KeyNotValidUpgrade(
                    "Key %s can not be upgraded by new key %s" %
                    (old_key.fingerprint, key.fingerprint))

        d = self._openpgp.get_key(key.address, private=key.private)
        d.addErrback(old_key_not_found)
        d.addCallback(check_upgrade)
        return d

    def put_raw_key(self,
                    key,
                    address,
                    validation=ValidationLevels.Weak_Chain):
        """
        Put raw key bound to address in local storage.

        :param key: The key to be stored
        :type key: str
        :param address: address for which this key will be active
        :type address: str
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyAddressMismatch if address doesn't match
                 any uid on the key or fails with KeyNotFound if no OpenPGP
                 material was found in key or fails with KeyNotValidUpdate if a
                 key with the same uid exists and the new one is not a valid
                 update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        pubkey, privkey = self._openpgp.parse_key(key, address)

        if pubkey is None:
            return defer.fail(keymanager_errors.KeyNotFound(key))

        pubkey.validation = validation
        d = self.put_key(pubkey)
        if privkey is not None:
            d.addCallback(lambda _: self.put_key(privkey))
        return d

    @defer.inlineCallbacks
    def fetch_key(self, address, uri, validation=ValidationLevels.Weak_Chain):
        """
        Fetch a public key bound to address from the network and put it in
        local storage.

        :param address: The email address of the key.
        :type address: str
        :param uri: The URI of the key.
        :type uri: str
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotFound: if not valid key on uri or fails
                 with KeyAddressMismatch if address doesn't match any uid on
                 the key or fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """

        self.log.info('Fetch key for %s from %s' % (address, uri))
        key_content = yield self._get_with_combined_ca_bundle(uri)

        # XXX parse binary keys
        pubkey, _ = self._openpgp.parse_key(key_content, address)
        if pubkey is None:
            raise keymanager_errors.KeyNotFound(uri)

        pubkey.validation = validation
        yield self.put_key(pubkey)

    def ever_synced(self):
        # TODO: provide this method in soledad api, avoid using a private
        # attribute here
        d = self._soledad._dbpool.runQuery('SELECT * FROM sync_log')
        d.addCallback(lambda result: bool(result))
        return d
Esempio n. 12
0
class KeyManager(object):
    #
    # server's key storage constants
    #

    log = Logger()

    def __init__(self,
                 address,
                 nickserver_uri,
                 soledad,
                 token=None,
                 ca_cert_path=None,
                 api_uri=None,
                 api_version=None,
                 uid=None,
                 gpgbinary=None,
                 combined_ca_bundle=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        create = self._create_combined_bundle_file
        try:
            self._combined_ca_bundle = combined_ca_bundle or create()
        except Exception:
            self.log.warn('Error while creating combined ca bundle')
            self._combined_ca_bundle = ''

        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._nicknym = Nicknym(self._nickserver_uri, self._ca_cert_path,
                                self._token)
        self.refresher = None
        self._init_gpg(soledad, gpgbinary)

    #
    # utilities
    #

    def _init_gpg(self, soledad, gpgbinary):
        self._openpgp = OpenPGPScheme(soledad, gpgbinary=gpgbinary)

    def start_refresher(self):
        self.refresher = RandomRefreshPublicKey(self._openpgp, self)
        self.refresher.start()

    def stop_refresher(self):
        self.refresher.stop()

    def _create_combined_bundle_file(self):
        leap_ca_bundle = ca_bundle.where()

        if self._ca_cert_path == leap_ca_bundle:
            return self._ca_cert_path  # don't merge file with itself
        elif not self._ca_cert_path:
            return leap_ca_bundle

        tmp_file = tempfile.NamedTemporaryFile(delete=False)

        with open(tmp_file.name, 'w') as fout:
            fin = fileinput.input(files=(leap_ca_bundle, self._ca_cert_path))
            for line in fin:
                fout.write(line)
            fin.close()

        return tmp_file.name

    @defer.inlineCallbacks
    def _get_with_combined_ca_bundle(self, uri, data=None):
        """
        Send a GET request to C{uri} containing C{data}.

        Instead of using the ca_cert provided on construction time, this
        version also uses the default certificates shipped with leap.common

        :param uri: The URI of the request.
        :type uri: str
        :param data: The body of the request.
        :type data: dict, str or file

        :return: A deferred that will be fired with the GET response
        :rtype: Deferred
        """
        try:
            content = yield self._async_client.request(str(uri), 'GET')
        except Exception as e:
            self.log.warn("There was a problem fetching key: %s" % (e, ))
            raise keymanager_errors.KeyNotFound(uri)
        if not content:
            raise keymanager_errors.KeyNotFound(uri)
        defer.returnValue(content)

    #
    # key management
    #

    @defer.inlineCallbacks
    def send_key(self):
        """
        Send user's key to provider.

        Public key bound to user's is sent to provider, which will
        replace any prior keys for the same address in its database.

        :return: A Deferred which fires when the key is sent, or which fails
                 with KeyNotFound if the key was not found in local database.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        if not self.token:
            self.log.debug('Token not available, scheduling '
                           'a new key sending attempt...')
            yield task.deferLater(reactor, 5, self.send_key)

        self.log.info('Sending public key to server')
        key = yield self.get_key(self._address, fetch_remote=False)
        yield self._nicknym.put_key(self.uid, key.key_data, self._api_uri,
                                    self._api_version)
        emit_async(catalog.KEYMANAGER_DONE_UPLOADING_KEYS, self._address)
        self.log.info('Key sent to server')
        defer.returnValue(key)

    @defer.inlineCallbacks
    def _fetch_keys_from_server_and_store_local(self, address):
        """
        Fetch keys  from nickserver and insert them in locale database.

        :param address: The address bound to the keys.
        :type address: str

        :return: A Deferred which fires when the key is in the storage,
                     or which fails with KeyNotFound if the key was not
                     found on nickserver.
        :rtype: Deferred

        """
        raw_key = yield self._nicknym.fetch_key_with_address(address)

        # nicknym server is authoritative for its own domain,
        # for other domains the key might come from key servers.
        validation_level = ValidationLevels.Weak_Chain
        _, domain = _split_email(address)
        if (domain == _get_domain(self._nickserver_uri)):
            validation_level = ValidationLevels.Provider_Trust

        yield self.put_raw_key(raw_key,
                               address=address,
                               validation=validation_level)

    @defer.inlineCallbacks
    def _get_inactive_private_keys(self):
        """
        Return all inactive private keys bound to address, that are
        stored locally.
        This can be used to attempt decryption from multiple keys.

        :return: A Deferred which fires the list of inactive keys sorted
                 according to their expiry dates.
        :rtype: Deferred
        """
        all_keys = yield self.get_all_keys(private=True)
        inactive_keys = filter(lambda _key: not _key.is_active(), all_keys)

        inactive_keys = \
            sorted(inactive_keys, key=lambda _key: _key.expiry_date)
        defer.returnValue(inactive_keys)

    def get_key(self, address, private=False, fetch_remote=True):
        """
        Return a key bound to address.

        First, search for the key in local storage. When it is available
        locally but is expired or when it is not available locally,
        then a fetch from nickserver is tried.

        :param address: The address bound to the key.
        :type address: str
        :param private: Look for a private key instead of a public one?
        :type private: bool
        :param fetch_remote: If key not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with an EncryptionKey bound to address,
                 or which fails with KeyNotFound if no key was found neither
                 locally or in keyserver or fail with KeyVersionError if the
                 key has a format not supported by this version of KeyManager
                 or KeyNotValidUpgrade if the key is renewed remotely but fails
                 the validation rule
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self.log.debug('Getting key for %s' % (address, ))
        emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)

        @defer.inlineCallbacks
        def maybe_extend_expiration(key):
            if key.needs_renewal():
                key = yield self._openpgp.expire(key, expiration_time='1y')
                yield self.send_key()
            defer.returnValue(key)

        def key_found(key):
            emit_async(catalog.KEYMANAGER_KEY_FOUND, address)
            return key

        def ensure_valid(key):
            if key.is_expired():
                self.log.info('Found expired key for %s.' % address)
                return _fetch_remotely(key)
            key_found(key)
            return key

        def key_not_found(failure):
            if not failure.check(keymanager_errors.KeyNotFound):
                return failure

            emit_async(catalog.KEYMANAGER_KEY_NOT_FOUND, address)
            return _fetch_remotely(failure)

        def _fetch_remotely(passthru):
            # we will only try to fetch a key from nickserver if fetch_remote
            # is True and the key is not private.
            if fetch_remote is False or private is True:
                return passthru

            self.log.debug('Fetching remotely key for %s.' % address)
            emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)
            d = self._fetch_keys_from_server_and_store_local(address)
            d.addCallback(
                lambda _: self._openpgp.get_key(address, private=False))
            d.addCallback(key_found)
            return d

        # return key if it exists in local database
        d = self._openpgp.get_key(address, private=private)
        if private:
            d.addCallback(maybe_extend_expiration)
        d.addCallbacks(ensure_valid, key_not_found)
        return d

    @defer.inlineCallbacks
    def fetch_key_fingerprint(self, address, fingerprint):
        """
        Fetch a key from the key servers by fingerprint.

        It will replace any key assigned to the address in the keyring and have
        validation level Fingerprint.

        :param address: The address bound to the key.
        :type address: str
        :param fingerprint: The fingerprint of the key to fetch.
        :type fingerprint: str

        :return: A Deferred which fires with an EncryptionKey fetched,
                 or which fails with KeyNotFound if no key was found in the
                 keyserver for this fingerprint.
        :rtype: Deferred
        """
        key_data = yield self._nicknym.fetch_key_with_fingerprint(fingerprint)
        key, _ = self._openpgp.parse_key(key_data, address)
        key.validation = ValidationLevels.Fingerprint

        if key.fingerprint != fingerprint:
            raise keymanager_errors.KeyNotFound("Got wrong fingerprint")

        try:
            old_key = yield self._openpgp.get_key(address)
            if old_key.fingerprint == key.fingerprint:
                key.last_audited_at = old_key.last_audited_at
                key.encr_used = old_key.encr_used
                key.sign_used = old_key.sign_used
        except keymanager_errors.KeyNotFound:
            pass

        yield self._openpgp.put_key(key)
        defer.returnValue(key)

    def get_all_keys(self, private=False):
        """
        Return all keys stored in local database.

        :param private: Include private keys
        :type private: bool

        :return: A Deferred which fires with a list of all keys in local db.
        :rtype: Deferred
        """
        return self._openpgp.get_all_keys(private)

    def gen_key(self):
        """
        Generate a key bound to the user's address.

        :return: A Deferred which fires with the generated EncryptionKey.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def signal_finished(key):
            self.log.info('Key generated for %s' % self._address)
            emit_async(catalog.KEYMANAGER_FINISHED_KEY_GENERATION,
                       self._address)
            return key

        emit_async(catalog.KEYMANAGER_STARTED_KEY_GENERATION, self._address)

        d = self._openpgp.gen_key(self._address)
        d.addCallback(signal_finished)
        return d

    #
    # Setters/getters
    #

    def _get_token(self):
        return self._token

    def _set_token(self, token):
        self._token = token
        self._nicknym.token = token

    token = property(_get_token, _set_token, doc='The session token.')

    def _get_ca_cert_path(self):
        return self._ca_cert_path

    def _set_ca_cert_path(self, ca_cert_path):
        self._ca_cert_path = ca_cert_path

    ca_cert_path = property(_get_ca_cert_path,
                            _set_ca_cert_path,
                            doc='The path to the CA certificate.')

    def _get_api_uri(self):
        return self._api_uri

    def _set_api_uri(self, api_uri):
        self._api_uri = api_uri

    api_uri = property(_get_api_uri, _set_api_uri, doc='The webapp API URI.')

    def _get_api_version(self):
        return self._api_version

    def _set_api_version(self, api_version):
        self._api_version = api_version

    api_version = property(_get_api_version,
                           _set_api_version,
                           doc='The webapp API version.')

    def _get_uid(self):
        return self._uid

    def _set_uid(self, uid):
        self._uid = uid

    uid = property(_get_uid, _set_uid, doc='The uid of the user.')

    #
    # encrypt/decrypt and sign/verify API
    #

    def encrypt(self,
                data,
                address,
                passphrase=None,
                sign=None,
                cipher_algo='AES256',
                fetch_remote=True):
        """
        Encrypt data with the public key bound to address and sign with with
        the private key bound to sign address.

        :param data: The data to be encrypted.
        :type data: str
        :param address: The address to encrypt it for.
        :type address: str
        :param passphrase: The passphrase for the secret key used for the
                           signature.
        :type passphrase: str
        :param sign: The address to be used for signature.
        :type sign: str
        :param cipher_algo: The cipher algorithm to use.
        :type cipher_algo: str
        :param fetch_remote: If key is not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the encrypted data as str, or
                 which fails with KeyNotFound if no keys were found neither
                 locally or in keyserver or fails with KeyVersionError if the
                 key format is not supported or fails with EncryptError if
                 failed encrypting for some reason.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        @defer.inlineCallbacks
        def encrypt(keys):
            pubkey, signkey = keys
            encrypted = yield self._openpgp.encrypt(data,
                                                    pubkey,
                                                    passphrase,
                                                    sign=signkey,
                                                    cipher_algo=cipher_algo)
            if not pubkey.encr_used:
                pubkey.encr_used = True
                yield self._openpgp.put_key(pubkey)
            defer.returnValue(encrypted)

        dpub = self.get_key(address, private=False, fetch_remote=fetch_remote)
        dpriv = defer.succeed(None)
        if sign is not None:
            dpriv = self.get_key(sign, private=True)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(encrypt, self._extract_first_error)
        return d

    def decrypt(self,
                data,
                address,
                passphrase=None,
                verify=None,
                fetch_remote=True):
        """
        Decrypt data using private key from address and verify with public key
        bound to verify address. If the decryption using the active private
        key fails, then decryption with inactive keys, if any, is recursively
        tried.

        :param data: The data to be decrypted.
        :type data: str
        :param address: The address to whom data was encrypted.
        :type address: str
        :param passphrase: The passphrase for the secret key used for
                           decryption.
        :type passphrase: str
        :param verify: The address to be used for signature.
        :type verify: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with:
            * (decripted str, signing key) if validation works
            * (decripted str, KeyNotFound) if signing key not found
            * (decripted str, InvalidSignature) if signature is invalid
            * KeyNotFound failure if private key not found
            * DecryptError failure if decription failed
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        @defer.inlineCallbacks
        def _decrypt(keys):
            pubkey, privkey = keys
            decrypted, signed = yield self._openpgp.decrypt(
                data, privkey, passphrase=passphrase, verify=pubkey)
            if pubkey is None:
                signature = keymanager_errors.KeyNotFound(verify)
            elif signed:
                signature = pubkey
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    yield self._openpgp.put_key(pubkey)
                    defer.returnValue((decrypted, signature))
            else:
                signature = keymanager_errors.InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint, ))
            defer.returnValue((decrypted, signature))

        @defer.inlineCallbacks
        def decrypt_with_inactive_keys(inactive_keys, verify_key,
                                       original_decrypt_err):
            if not inactive_keys:
                # when there are no more keys to go through
                raise original_decrypt_err

            try:
                inactive_key = inactive_keys.pop()
                result = yield _decrypt([verify_key, inactive_key])
            except keymanager_errors.DecryptError:
                result = yield decrypt_with_inactive_keys(
                    inactive_keys, verify_key, original_decrypt_err)
            defer.returnValue(result)

        @defer.inlineCallbacks
        def decrypt(keys):
            try:
                result = yield _decrypt(keys)
            except keymanager_errors.DecryptError as e:
                verify_key, active_key = keys
                inactive_keys = yield self._get_inactive_private_keys()
                result = yield decrypt_with_inactive_keys(
                    inactive_keys, verify_key, e)
            defer.returnValue(result)

        dpriv = self.get_key(address, private=True)
        dpub = defer.succeed(None)
        if verify is not None:
            dpub = self.get_key(verify,
                                private=False,
                                fetch_remote=fetch_remote)
            dpub.addErrback(lambda f: None
                            if f.check(keymanager_errors.KeyNotFound) else f)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(decrypt, self._extract_first_error)
        return d

    def _extract_first_error(self, failure):
        return failure.value.subFailure

    def sign(self,
             data,
             address,
             digest_algo='SHA512',
             clearsign=False,
             detach=True,
             binary=False):
        """
        Sign data with private key bound to address.

        :param data: The data to be signed.
        :type data: str
        :param address: The address to be used to sign.
        :type address: EncryptionKey
        :param digest_algo: The hash digest to use.
        :type digest_algo: str
        :param clearsign: If True, create a cleartext signature.
        :type clearsign: bool
        :param detach: If True, create a detached signature.
        :type detach: bool
        :param binary: If True, do not ascii armour the output.
        :type binary: bool

        :return: A Deferred which fires with the signed data as str or fails
                 with KeyNotFound if no key was found neither locally or in
                 keyserver or fails with SignFailed if there was any error
                 signing.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def sign(privkey):
            return self._openpgp.sign(data,
                                      privkey,
                                      digest_algo=digest_algo,
                                      clearsign=clearsign,
                                      detach=detach,
                                      binary=binary)

        d = self.get_key(address, private=True)
        d.addCallback(sign)
        return d

    def verify(self, data, address, detached_sig=None, fetch_remote=True):
        """
        Verify signed data with private key bound to address, eventually using
        detached_sig.

        :param data: The data to be verified.
        :type data: str
        :param address: The address to be used to verify.
        :type address: EncryptionKey
        :param detached_sig: A detached signature. If given, C{data} is
                             verified using this detached signature.
        :type detached_sig: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the signing EncryptionKey if
                 signature verifies, or which fails with InvalidSignature if
                 signature don't verifies or fails with KeyNotFound if no key
                 was found neither locally or in keyserver.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def verify(pubkey):
            signed = self._openpgp.verify(data,
                                          pubkey,
                                          detached_sig=detached_sig)
            if signed:
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    d = self._openpgp.put_key(pubkey)
                    d.addCallback(lambda _: pubkey)
                    return d
                return pubkey
            else:
                raise keymanager_errors.InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint, ))

        d = self.get_key(address, private=False, fetch_remote=fetch_remote)
        d.addCallback(verify)
        return d

    def delete_key(self, key):
        """
        Remove key from storage.

        :param key: The key to be removed.
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is deleted, or which fails
                 KeyNotFound if the key was not found on local storage.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        return self._openpgp.delete_key(key)

    def put_key(self, key):
        """
        Put key bound to address in local storage.

        :param key: The key to be stored
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        def old_key_not_found(failure):
            if failure.check(keymanager_errors.KeyNotFound):
                return None
            else:
                return failure

        def check_upgrade(old_key):
            if key.private or can_upgrade(key, old_key):
                return self._openpgp.put_key(key)
            else:
                raise keymanager_errors.KeyNotValidUpgrade(
                    "Key %s can not be upgraded by new key %s" %
                    (old_key.fingerprint, key.fingerprint))

        d = self._openpgp.get_key(key.address, private=key.private)
        d.addErrback(old_key_not_found)
        d.addCallback(check_upgrade)
        return d

    @defer.inlineCallbacks
    def put_raw_key(self,
                    key,
                    address,
                    validation=ValidationLevels.Weak_Chain):
        """
        Put raw key bound to address in local storage.

        :param key: The key to be stored
        :type key: str
        :param address: address for which this key will be active
        :type address: str
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyAddressMismatch if address doesn't match
                 any uid on the key or fails with KeyNotFound if no OpenPGP
                 material was found in key or fails with KeyNotValidUpdate if a
                 key with the same uid exists and the new one is not a valid
                 update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        pubkey, privkey = self._openpgp.parse_key(key, address)

        if pubkey is None:
            raise keymanager_errors.KeyNotFound(key)

        if address == self._address and not privkey:
            try:
                existing = yield self.get_key(address, fetch_remote=False)
            except KeyNotFound:
                existing = None
            if (existing is None
                    or pubkey.fingerprint != existing.fingerprint):
                raise keymanager_errors.KeyNotValidUpgrade(
                    "Cannot update your %s key without the private part" %
                    (address, ))

        pubkey.validation = validation
        yield self.put_key(pubkey)
        if privkey is not None:
            yield self.put_key(privkey)

        if address == self._address:
            yield self.send_key()

    @defer.inlineCallbacks
    def fetch_key(self, address, uri, validation=ValidationLevels.Weak_Chain):
        """
        Fetch a public key bound to address from the network and put it in
        local storage.

        :param address: The email address of the key.
        :type address: str
        :param uri: The URI of the key.
        :type uri: str
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotFound: if not valid key on uri or fails
                 with KeyAddressMismatch if address doesn't match any uid on
                 the key or fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """

        self.log.info('Fetch key for %s from %s' % (address, uri))
        key_content = yield self._get_with_combined_ca_bundle(uri)

        # XXX parse binary keys
        pubkey, _ = self._openpgp.parse_key(key_content, address)
        if pubkey is None:
            raise keymanager_errors.KeyNotFound(uri)

        pubkey.validation = validation
        yield self.put_key(pubkey)
Esempio n. 13
0
class KeyManager(object):

    #
    # server's key storage constants
    #

    OPENPGP_KEY = 'openpgp'
    PUBKEY_KEY = "user[public_key]"

    def __init__(self, address, nickserver_uri, soledad, token=None,
                 ca_cert_path=None, api_uri=None, api_version=None, uid=None,
                 gpgbinary=None):
        """
        Initialize a Key Manager for user's C{address} with provider's
        nickserver reachable in C{nickserver_uri}.

        :param address: The email address of the user of this Key Manager.
        :type address: str
        :param nickserver_uri: The URI of the nickserver.
        :type nickserver_uri: str
        :param soledad: A Soledad instance for local storage of keys.
        :type soledad: leap.soledad.Soledad
        :param token: The token for interacting with the webapp API.
        :type token: str
        :param ca_cert_path: The path to the CA certificate.
        :type ca_cert_path: str
        :param api_uri: The URI of the webapp API.
        :type api_uri: str
        :param api_version: The version of the webapp API.
        :type api_version: str
        :param uid: The user's UID.
        :type uid: str
        :param gpgbinary: Name for GnuPG binary executable.
        :type gpgbinary: C{str}
        """
        self._address = address
        self._nickserver_uri = nickserver_uri
        self._soledad = soledad
        self._token = token
        self.ca_cert_path = ca_cert_path
        self.api_uri = api_uri
        self.api_version = api_version
        self.uid = uid
        # a dict to map key types to their handlers
        self._wrapper_map = {
            OpenPGPKey: OpenPGPScheme(soledad, gpgbinary=gpgbinary),
            # other types of key will be added to this mapper.
        }
        # the following are used to perform https requests
        self._fetcher = requests
        self._combined_ca_bundle = self._create_combined_bundle_file()
        self._async_client = HTTPClient(self._combined_ca_bundle)
        self._async_client_pinned = HTTPClient(self._ca_cert_path)

    #
    # destructor
    #

    def __del__(self):
        try:
            created_tmp_combined_ca_bundle = self._combined_ca_bundle not in \
                [ca_bundle.where(), self._ca_cert_path]
            if created_tmp_combined_ca_bundle:
                os.remove(self._combined_ca_bundle)
        except OSError:
            pass

    #
    # utilities
    #

    def _create_combined_bundle_file(self):
        leap_ca_bundle = ca_bundle.where()

        if self._ca_cert_path == leap_ca_bundle:
            return self._ca_cert_path   # don't merge file with itself
        elif not self._ca_cert_path:
            return leap_ca_bundle

        tmp_file = tempfile.NamedTemporaryFile(delete=False)

        with open(tmp_file.name, 'w') as fout:
            fin = fileinput.input(files=(leap_ca_bundle, self._ca_cert_path))
            for line in fin:
                fout.write(line)
            fin.close()

        return tmp_file.name

    def _key_class_from_type(self, ktype):
        """
        Given a class type, return a class

        :param ktype: string representation of a class name
        :type ktype: str

        :return: A class with the matching name
        :rtype: classobj or type
        """
        return filter(
            lambda klass: klass.__name__ == ktype,
            self._wrapper_map).pop()

    @defer.inlineCallbacks
    def _get_key_from_nicknym(self, address):
        """
        Send a GET request to C{uri} containing C{data}.

        :param address: The URI of the request.
        :type address: str

        :return: A deferred that will be fired with GET content as json (dict)
        :rtype: Deferred
        """
        try:
            uri = self._nickserver_uri + '?address=' + address
            content = yield self._async_client_pinned.request(str(uri), 'GET')
            json_content = json.loads(content)
        except IOError as e:
            # FIXME: 404 doesnt raise today, but it wont produce json anyway
            # if e.response.status_code == 404:
                # raise KeyNotFound(address)
            logger.warning("HTTP error retrieving key: %r" % (e,))
            logger.warning("%s" % (content,))
            raise KeyNotFound(e.message), None, sys.exc_info()[2]
        except ValueError as v:
            logger.warning("Invalid JSON data from key: %s" % (uri,))
            raise KeyNotFound(v.message + ' - ' + uri), None, sys.exc_info()[2]

        except Exception as e:
            logger.warning("Error retrieving key: %r" % (e,))
            raise KeyNotFound(e.message), None, sys.exc_info()[2]
        # Responses are now text/plain, although it's json anyway, but
        # this will fail when it shouldn't
        # leap_assert(
        #     res.headers['content-type'].startswith('application/json'),
        #     'Content-type is not JSON.')
        defer.returnValue(json_content)

    @defer.inlineCallbacks
    def _get_with_combined_ca_bundle(self, uri, data=None):
        """
        Send a GET request to C{uri} containing C{data}.

        Instead of using the ca_cert provided on construction time, this
        version also uses the default certificates shipped with leap.common

        :param uri: The URI of the request.
        :type uri: str
        :param data: The body of the request.
        :type data: dict, str or file

        :return: A deferred that will be fired with the GET response
        :rtype: Deferred
        """
        try:
            content = yield self._async_client.request(str(uri), 'GET')
        except Exception as e:
            logger.warning("There was a problem fetching key: %s" % (e,))
            raise KeyNotFound(uri)
        if not content:
            raise KeyNotFound(uri)
        defer.returnValue(content)

    @defer.inlineCallbacks
    def _put(self, uri, data=None):
        """
        Send a PUT request to C{uri} containing C{data}.

        The request will be sent using the configured CA certificate path to
        verify the server certificate and the configured session id for
        authentication.

        :param uri: The URI of the request.
        :type uri: str
        :param data: The body of the request.
        :type data: dict, str or file

        :return: A deferred that will be fired when PUT request finishes
        :rtype: Deferred
        """
        leap_assert(
            self._token is not None,
            'We need a token to interact with webapp!')
        if type(data) == dict:
            data = urllib.urlencode(data)
        headers = {'Authorization': [str('Token token=%s' % self._token)]}
        headers['Content-Type'] = ['application/x-www-form-urlencoded']
        try:
            res = yield self._async_client_pinned.request(str(uri), 'PUT',
                                                          body=str(data),
                                                          headers=headers)
        except Exception as e:
            logger.warning("Error uploading key: %r" % (e,))
            raise e
        if 'error' in res:
            # FIXME: That's a workaround for 500,
            # we need to implement a readBody to assert response code
            logger.warning("Error uploading key: %r" % (res,))
            raise Exception(res)

    @memoized_method(invalidation=300)
    @defer.inlineCallbacks
    def _fetch_keys_from_server(self, address):
        """
        Fetch keys bound to address from nickserver and insert them in
        local database.

        :param address: The address bound to the keys.
        :type address: str

        :return: A Deferred which fires when the key is in the storage,
                 or which fails with KeyNotFound if the key was not found on
                 nickserver.
        :rtype: Deferred

        """
        # request keys from the nickserver
        server_keys = yield self._get_key_from_nicknym(address)

        # insert keys in local database
        if self.OPENPGP_KEY in server_keys:
            # nicknym server is authoritative for its own domain,
            # for other domains the key might come from key servers.
            validation_level = ValidationLevels.Weak_Chain
            _, domain = _split_email(address)
            if (domain == _get_domain(self._nickserver_uri)):
                validation_level = ValidationLevels.Provider_Trust

            yield self.put_raw_key(
                server_keys['openpgp'],
                OpenPGPKey,
                address=address,
                validation=validation_level)

    #
    # key management
    #

    def send_key(self, ktype):
        """
        Send user's key of type ktype to provider.

        Public key bound to user's is sent to provider, which will sign it and
        replace any prior keys for the same address in its database.

        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey

        :return: A Deferred which fires when the key is sent, or which fails
                 with KeyNotFound if the key was not found in local database.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)

        def send(pubkey):
            data = {
                self.PUBKEY_KEY: pubkey.key_data
            }
            uri = "%s/%s/users/%s.json" % (
                self._api_uri,
                self._api_version,
                self._uid)
            d = self._put(uri, data)
            d.addCallback(lambda _:
                          emit_async(catalog.KEYMANAGER_DONE_UPLOADING_KEYS,
                                     self._address))
            return d

        d = self.get_key(
            self._address, ktype, private=False, fetch_remote=False)
        d.addCallback(send)
        return d

    def get_key(self, address, ktype, private=False, fetch_remote=True):
        """
        Return a key of type ktype bound to address.

        First, search for the key in local storage. If it is not available,
        then try to fetch from nickserver.

        :param address: The address bound to the key.
        :type address: str
        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey
        :param private: Look for a private key instead of a public one?
        :type private: bool
        :param fetch_remote: If key not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with an EncryptionKey of type ktype
                 bound to address, or which fails with KeyNotFound if no key
                 was found neither locally or in keyserver.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        logger.debug("getting key for %s" % (address,))
        leap_assert(
            ktype in self._wrapper_map,
            'Unkown key type: %s.' % str(ktype))
        _keys = self._wrapper_map[ktype]

        emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)

        def key_found(key):
            emit_async(catalog.KEYMANAGER_KEY_FOUND, address)
            return key

        def key_not_found(failure):
            if not failure.check(KeyNotFound):
                return failure

            emit_async(catalog.KEYMANAGER_KEY_NOT_FOUND, address)

            # we will only try to fetch a key from nickserver if fetch_remote
            # is True and the key is not private.
            if fetch_remote is False or private is True:
                return failure

            emit_async(catalog.KEYMANAGER_LOOKING_FOR_KEY, address)
            d = self._fetch_keys_from_server(address)
            d.addCallback(
                lambda _: _keys.get_key(address, private=False))
            d.addCallback(key_found)
            return d

        # return key if it exists in local database
        d = _keys.get_key(address, private=private)
        d.addCallbacks(key_found, key_not_found)
        return d

    def get_all_keys(self, private=False):
        """
        Return all keys stored in local database.

        :param private: Include private keys
        :type private: bool

        :return: A Deferred which fires with a list of all keys in local db.
        :rtype: Deferred
        """
        # TODO: should it be based on activedocs?
        def build_keys(docs):
            return map(
                lambda doc: build_key_from_dict(
                    self._key_class_from_type(doc.content['type']),
                    doc.content),
                docs)

        # XXX: there is no check that the soledad indexes are ready, as it
        #      happens with EncryptionScheme.
        #      The usecases right now are not problematic. This could be solve
        #      adding a keytype to this funciont and moving the soledad request
        #      to the EncryptionScheme.
        d = self._soledad.get_from_index(
            TAGS_PRIVATE_INDEX,
            KEYMANAGER_KEY_TAG,
            '1' if private else '0')
        d.addCallback(build_keys)
        return d

    def gen_key(self, ktype):
        """
        Generate a key of type ktype bound to the user's address.

        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey

        :return: A Deferred which fires with the generated EncryptionKey.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        def signal_finished(key):
            emit_async(
                catalog.KEYMANAGER_FINISHED_KEY_GENERATION, self._address)
            return key

        emit_async(catalog.KEYMANAGER_STARTED_KEY_GENERATION, self._address)

        d = _keys.gen_key(self._address)
        d.addCallback(signal_finished)
        return d

    #
    # Setters/getters
    #

    def _get_token(self):
        return self._token

    def _set_token(self, token):
        self._token = token

    token = property(
        _get_token, _set_token, doc='The session token.')

    def _get_ca_cert_path(self):
        return self._ca_cert_path

    def _set_ca_cert_path(self, ca_cert_path):
        self._ca_cert_path = ca_cert_path

    ca_cert_path = property(
        _get_ca_cert_path, _set_ca_cert_path,
        doc='The path to the CA certificate.')

    def _get_api_uri(self):
        return self._api_uri

    def _set_api_uri(self, api_uri):
        self._api_uri = api_uri

    api_uri = property(
        _get_api_uri, _set_api_uri, doc='The webapp API URI.')

    def _get_api_version(self):
        return self._api_version

    def _set_api_version(self, api_version):
        self._api_version = api_version

    api_version = property(
        _get_api_version, _set_api_version, doc='The webapp API version.')

    def _get_uid(self):
        return self._uid

    def _set_uid(self, uid):
        self._uid = uid

    uid = property(
        _get_uid, _set_uid, doc='The uid of the user.')

    #
    # encrypt/decrypt and sign/verify API
    #

    def encrypt(self, data, address, ktype, passphrase=None, sign=None,
                cipher_algo='AES256', fetch_remote=True):
        """
        Encrypt data with the public key bound to address and sign with with
        the private key bound to sign address.

        :param data: The data to be encrypted.
        :type data: str
        :param address: The address to encrypt it for.
        :type address: str
        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey
        :param passphrase: The passphrase for the secret key used for the
                           signature.
        :type passphrase: str
        :param sign: The address to be used for signature.
        :type sign: str
        :param cipher_algo: The cipher algorithm to use.
        :type cipher_algo: str
        :param fetch_remote: If key is not found in local storage try to fetch
                             from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the encrypted data as str, or
                 which fails with KeyNotFound if no keys were found neither
                 locally or in keyserver or fails with EncryptError if failed
                 encrypting for some reason.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        @defer.inlineCallbacks
        def encrypt(keys):
            pubkey, signkey = keys
            encrypted = yield _keys.encrypt(
                data, pubkey, passphrase, sign=signkey,
                cipher_algo=cipher_algo)
            if not pubkey.encr_used:
                pubkey.encr_used = True
                yield _keys.put_key(pubkey)
            defer.returnValue(encrypted)

        dpub = self.get_key(address, ktype, private=False,
                            fetch_remote=fetch_remote)
        dpriv = defer.succeed(None)
        if sign is not None:
            dpriv = self.get_key(sign, ktype, private=True)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(encrypt, self._extract_first_error)
        return d

    def decrypt(self, data, address, ktype, passphrase=None, verify=None,
                fetch_remote=True):
        """
        Decrypt data using private key from address and verify with public key
        bound to verify address.

        :param data: The data to be decrypted.
        :type data: str
        :param address: The address to whom data was encrypted.
        :type address: str
        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey
        :param passphrase: The passphrase for the secret key used for
                           decryption.
        :type passphrase: str
        :param verify: The address to be used for signature.
        :type verify: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with:
            * (decripted str, signing key) if validation works
            * (decripted str, KeyNotFound) if signing key not found
            * (decripted str, InvalidSignature) if signature is invalid
            * KeyNotFound failure if private key not found
            * DecryptError failure if decription failed
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        @defer.inlineCallbacks
        def decrypt(keys):
            pubkey, privkey = keys
            decrypted, signed = yield _keys.decrypt(
                data, privkey, passphrase=passphrase, verify=pubkey)
            if pubkey is None:
                signature = KeyNotFound(verify)
            elif signed:
                signature = pubkey
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    yield _keys.put_key(pubkey)
                    defer.returnValue((decrypted, signature))
            else:
                signature = InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint,))
            defer.returnValue((decrypted, signature))

        dpriv = self.get_key(address, ktype, private=True)
        dpub = defer.succeed(None)
        if verify is not None:
            dpub = self.get_key(verify, ktype, private=False,
                                fetch_remote=fetch_remote)
            dpub.addErrback(lambda f: None if f.check(KeyNotFound) else f)
        d = defer.gatherResults([dpub, dpriv], consumeErrors=True)
        d.addCallbacks(decrypt, self._extract_first_error)
        return d

    def _extract_first_error(self, failure):
        return failure.value.subFailure

    def sign(self, data, address, ktype, digest_algo='SHA512', clearsign=False,
             detach=True, binary=False):
        """
        Sign data with private key bound to address.

        :param data: The data to be signed.
        :type data: str
        :param address: The address to be used to sign.
        :type address: EncryptionKey
        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey
        :param digest_algo: The hash digest to use.
        :type digest_algo: str
        :param clearsign: If True, create a cleartext signature.
        :type clearsign: bool
        :param detach: If True, create a detached signature.
        :type detach: bool
        :param binary: If True, do not ascii armour the output.
        :type binary: bool

        :return: A Deferred which fires with the signed data as str or fails
                 with KeyNotFound if no key was found neither locally or in
                 keyserver or fails with SignFailed if there was any error
                 signing.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        def sign(privkey):
            return _keys.sign(
                data, privkey, digest_algo=digest_algo, clearsign=clearsign,
                detach=detach, binary=binary)

        d = self.get_key(address, ktype, private=True)
        d.addCallback(sign)
        return d

    def verify(self, data, address, ktype, detached_sig=None,
               fetch_remote=True):
        """
        Verify signed data with private key bound to address, eventually using
        detached_sig.

        :param data: The data to be verified.
        :type data: str
        :param address: The address to be used to verify.
        :type address: EncryptionKey
        :param ktype: The type of the key.
        :type ktype: subclass of EncryptionKey
        :param detached_sig: A detached signature. If given, C{data} is
                             verified using this detached signature.
        :type detached_sig: str
        :param fetch_remote: If key for verify not found in local storage try
                             to fetch from nickserver
        :type fetch_remote: bool

        :return: A Deferred which fires with the signing EncryptionKey if
                 signature verifies, or which fails with InvalidSignature if
                 signature don't verifies or fails with KeyNotFound if no key
                 was found neither locally or in keyserver.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        def verify(pubkey):
            signed = _keys.verify(
                data, pubkey, detached_sig=detached_sig)
            if signed:
                if not pubkey.sign_used:
                    pubkey.sign_used = True
                    d = _keys.put_key(pubkey)
                    d.addCallback(lambda _: pubkey)
                    return d
                return pubkey
            else:
                raise InvalidSignature(
                    'Failed to verify signature with key %s' %
                    (pubkey.fingerprint,))

        d = self.get_key(address, ktype, private=False,
                         fetch_remote=fetch_remote)
        d.addCallback(verify)
        return d

    def delete_key(self, key):
        """
        Remove key from storage.

        :param key: The key to be removed.
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is deleted, or which fails
                 KeyNotFound if the key was not found on local storage.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(type(key))
        _keys = self._wrapper_map[type(key)]
        return _keys.delete_key(key)

    def put_key(self, key):
        """
        Put key bound to address in local storage.

        :param key: The key to be stored
        :type key: EncryptionKey

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        ktype = type(key)
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        def old_key_not_found(failure):
            if failure.check(KeyNotFound):
                return None
            else:
                return failure

        def check_upgrade(old_key):
            if key.private or can_upgrade(key, old_key):
                return _keys.put_key(key)
            else:
                raise KeyNotValidUpgrade(
                    "Key %s can not be upgraded by new key %s"
                    % (old_key.fingerprint, key.fingerprint))

        d = _keys.get_key(key.address, private=key.private)
        d.addErrback(old_key_not_found)
        d.addCallback(check_upgrade)
        return d

    def put_raw_key(self, key, ktype, address,
                    validation=ValidationLevels.Weak_Chain):
        """
        Put raw key bound to address in local storage.

        :param key: The ascii key to be stored
        :type key: str
        :param ktype: the type of the key.
        :type ktype: subclass of EncryptionKey
        :param address: address for which this key will be active
        :type address: str
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyAddressMismatch if address doesn't match
                 any uid on the key or fails with KeyNotFound if no OpenPGP
                 material was found in key or fails with KeyNotValidUpdate if a
                 key with the same uid exists and the new one is not a valid
                 update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        pubkey, privkey = _keys.parse_ascii_key(key, address)

        if pubkey is None:
            return defer.fail(KeyNotFound(key))

        pubkey.validation = validation
        d = self.put_key(pubkey)
        if privkey is not None:
            d.addCallback(lambda _: self.put_key(privkey))
        return d

    @defer.inlineCallbacks
    def fetch_key(self, address, uri, ktype,
                  validation=ValidationLevels.Weak_Chain):
        """
        Fetch a public key bound to address from the network and put it in
        local storage.

        :param address: The email address of the key.
        :type address: str
        :param uri: The URI of the key.
        :type uri: str
        :param ktype: the type of the key.
        :type ktype: subclass of EncryptionKey
        :param validation: validation level for this key
                           (default: 'Weak_Chain')
        :type validation: ValidationLevels

        :return: A Deferred which fires when the key is in the storage, or
                 which fails with KeyNotFound: if not valid key on uri or fails
                 with KeyAddressMismatch if address doesn't match any uid on
                 the key or fails with KeyNotValidUpdate if a key with the same
                 uid exists and the new one is not a valid update for it.
        :rtype: Deferred

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        self._assert_supported_key_type(ktype)
        _keys = self._wrapper_map[ktype]

        logger.info("Fetch key for %s from %s" % (address, uri))
        ascii_content = yield self._get_with_combined_ca_bundle(uri)

        # XXX parse binary keys
        pubkey, _ = _keys.parse_ascii_key(ascii_content, address)
        if pubkey is None:
            raise KeyNotFound(uri)

        pubkey.validation = validation
        yield self.put_key(pubkey)

    def _assert_supported_key_type(self, ktype):
        """
        Check if ktype is one of the supported key types

        :param ktype: the type of the key.
        :type ktype: subclass of EncryptionKey

        :raise UnsupportedKeyTypeError: if invalid key type
        """
        if ktype not in self._wrapper_map:
            raise UnsupportedKeyTypeError(str(ktype))
Esempio n. 14
0
class SoledadHTTPSyncTarget(SyncTarget):
    """
    A SyncTarget that encrypts data before sending and decrypts data after
    receiving.

    Normally encryption will have been written to the sync database upon
    document modification. The sync database is also used to write temporarily
    the parsed documents that the remote send us, before being decrypted and
    written to the main database.
    """

    def __init__(self, url, source_replica_uid, creds, crypto, cert_file,
                 sync_db=None, sync_enc_pool=None):
        """
        Initialize the sync target.

        :param url: The server sync url.
        :type url: str
        :param source_replica_uid: The source replica uid which we use when
                                   deferring decryption.
        :type source_replica_uid: str
        :param creds: A dictionary containing the uuid and token.
        :type creds: creds
        :param crypto: An instance of SoledadCrypto so we can encrypt/decrypt
                        document contents when syncing.
        :type crypto: soledad.crypto.SoledadCrypto
        :param cert_file: Path to the certificate of the ca used to validate
                          the SSL certificate used by the remote soledad
                          server.
        :type cert_file: str
        :param sync_db: Optional. handler for the db with the symmetric
                        encryption of the syncing documents. If
                        None, encryption will be done in-place,
                        instead of retreiving it from the dedicated
                        database.
        :type sync_db: Sqlite handler
        :param verify_ssl: Whether we should perform SSL server certificate
                           verification.
        :type verify_ssl: bool
        """
        if url.endswith("/"):
            url = url[:-1]
        self._url = str(url) + "/sync-from/" + str(source_replica_uid)
        self.source_replica_uid = source_replica_uid
        self._auth_header = None
        self.set_creds(creds)
        self._crypto = crypto
        self._sync_db = sync_db
        self._sync_enc_pool = sync_enc_pool
        self._insert_doc_cb = None
        # asynchronous encryption/decryption attributes
        self._decryption_callback = None
        self._sync_decr_pool = None
        self._http = HTTPClient(cert_file)

    def close(self):
        self._http.close()

    def set_creds(self, creds):
        """
        Update credentials.

        :param creds: A dictionary containing the uuid and token.
        :type creds: dict
        """
        uuid = creds['token']['uuid']
        token = creds['token']['token']
        auth = '%s:%s' % (uuid, token)
        b64_token = base64.b64encode(auth)
        self._auth_header = {'Authorization': ['Token %s' % b64_token]}

    @property
    def _defer_encryption(self):
        return self._sync_enc_pool is not None

    #
    # SyncTarget API
    #

    @defer.inlineCallbacks
    def get_sync_info(self, source_replica_uid):
        """
        Return information about known state of remote database.

        Return the replica_uid and the current database generation of the
        remote database, and its last-seen database generation for the client
        replica.

        :param source_replica_uid: The client-size replica uid.
        :type source_replica_uid: str

        :return: A deferred which fires with (target_replica_uid,
                 target_replica_generation, target_trans_id,
                 source_replica_last_known_generation,
                 source_replica_last_known_transaction_id)
        :rtype: twisted.internet.defer.Deferred
        """
        raw = yield self._http_request(self._url, headers=self._auth_header)
        res = json.loads(raw)
        defer.returnValue([
            res['target_replica_uid'],
            res['target_replica_generation'],
            res['target_replica_transaction_id'],
            res['source_replica_generation'],
            res['source_transaction_id']
        ])

    def record_sync_info(
            self, source_replica_uid, source_replica_generation,
            source_replica_transaction_id):
        """
        Record tip information for another replica.

        After sync_exchange has been processed, the caller will have
        received new content from this replica. This call allows the
        source replica instigating the sync to inform us what their
        generation became after applying the documents we returned.

        This is used to allow future sync operations to not need to repeat data
        that we just talked about. It also means that if this is called at the
        wrong time, there can be database records that will never be
        synchronized.

        :param source_replica_uid: The identifier for the source replica.
        :type source_replica_uid: str
        :param source_replica_generation: The database generation for the
                                          source replica.
        :type source_replica_generation: int
        :param source_replica_transaction_id: The transaction id associated
                                              with the source replica
                                              generation.
        :type source_replica_transaction_id: str

        :return: A deferred which fires with the result of the query.
        :rtype: twisted.internet.defer.Deferred
        """
        data = json.dumps({
            'generation': source_replica_generation,
            'transaction_id': source_replica_transaction_id
        })
        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/json']})
        return self._http_request(
            self._url,
            method='PUT',
            headers=headers,
            body=data)

    @defer.inlineCallbacks
    def sync_exchange(self, docs_by_generation, source_replica_uid,
                      last_known_generation, last_known_trans_id,
                      insert_doc_cb, ensure_callback=None,
                      defer_decryption=True, sync_id=None):
        """
        Find out which documents the remote database does not know about,
        encrypt and send them. After that, receive documents from the remote
        database.

        :param docs_by_generations: A list of (doc_id, generation, trans_id)
                                    of local documents that were changed since
                                    the last local generation the remote
                                    replica knows about.
        :type docs_by_generations: list of tuples

        :param source_replica_uid: The uid of the source replica.
        :type source_replica_uid: str

        :param last_known_generation: Target's last known generation.
        :type last_known_generation: int

        :param last_known_trans_id: Target's last known transaction id.
        :type last_known_trans_id: str

        :param insert_doc_cb: A callback for inserting received documents from
                              target. If not overriden, this will call u1db
                              insert_doc_from_target in synchronizer, which
                              implements the TAKE OTHER semantics.
        :type insert_doc_cb: function

        :param ensure_callback: A callback that ensures we know the target
                                replica uid if the target replica was just
                                created.
        :type ensure_callback: function

        :param defer_decryption: Whether to defer the decryption process using
                                 the intermediate database. If False,
                                 decryption will be done inline.
        :type defer_decryption: bool

        :return: A deferred which fires with the new generation and
                 transaction id of the target replica.
        :rtype: twisted.internet.defer.Deferred
        """

        self._ensure_callback = ensure_callback

        if sync_id is None:
            sync_id = str(uuid4())
        self.source_replica_uid = source_replica_uid

        # save a reference to the callback so we can use it after decrypting
        self._insert_doc_cb = insert_doc_cb

        gen_after_send, trans_id_after_send = yield self._send_docs(
            docs_by_generation,
            last_known_generation,
            last_known_trans_id,
            sync_id)

        cur_target_gen, cur_target_trans_id = yield self._receive_docs(
            last_known_generation, last_known_trans_id,
            ensure_callback, sync_id,
            defer_decryption=defer_decryption)

        # update gen and trans id info in case we just sent and did not
        # receive docs.
        if gen_after_send is not None and gen_after_send > cur_target_gen:
            cur_target_gen = gen_after_send
            cur_target_trans_id = trans_id_after_send

        defer.returnValue([cur_target_gen, cur_target_trans_id])

    #
    # methods to send docs
    #

    def _prepare(self, comma, entries, **dic):
        entry = comma + '\r\n' + json.dumps(dic)
        entries.append(entry)
        return len(entry)

    @defer.inlineCallbacks
    def _send_docs(self, docs_by_generation, last_known_generation,
                   last_known_trans_id, sync_id):

        if not docs_by_generation:
            defer.returnValue([None, None])

        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/x-soledad-sync-put']})
        # add remote replica metadata to the request
        first_entries = ['[']
        self._prepare(
            '', first_entries,
            last_known_generation=last_known_generation,
            last_known_trans_id=last_known_trans_id,
            sync_id=sync_id,
            ensure=self._ensure_callback is not None)
        idx = 0
        total = len(docs_by_generation)
        for doc, gen, trans_id in docs_by_generation:
            idx += 1
            result = yield self._send_one_doc(
                headers, first_entries, doc,
                gen, trans_id, total, idx)
            if self._defer_encryption:
                self._sync_enc_pool.delete_encrypted_doc(
                    doc.doc_id, doc.rev)
            emit(SOLEDAD_SYNC_SEND_STATUS,
                   "Soledad sync send status: %d/%d"
                   % (idx, total))
        response_dict = json.loads(result)[0]
        gen_after_send = response_dict['new_generation']
        trans_id_after_send = response_dict['new_transaction_id']
        defer.returnValue([gen_after_send, trans_id_after_send])

    @defer.inlineCallbacks
    def _send_one_doc(self, headers, first_entries, doc, gen, trans_id,
                      number_of_docs, doc_idx):
        entries = first_entries[:]
        # add the document to the request
        content = yield self._encrypt_doc(doc)
        self._prepare(
            ',', entries,
            id=doc.doc_id, rev=doc.rev, content=content, gen=gen,
            trans_id=trans_id, number_of_docs=number_of_docs,
            doc_idx=doc_idx)
        entries.append('\r\n]')
        data = ''.join(entries)
        result = yield self._http_request(
            self._url,
            method='POST',
            headers=headers,
            body=data)
        defer.returnValue(result)

    def _encrypt_doc(self, doc):
        d = None
        if doc.is_tombstone():
            d = defer.succeed(None)
        elif not self._defer_encryption:
            # fallback case, for tests
            d = defer.succeed(encrypt_doc(self._crypto, doc))
        else:

            def _maybe_encrypt_doc_inline(doc_json):
                if doc_json is None:
                    # the document is not marked as tombstone, but we got
                    # nothing from the sync db. As it is not encrypted
                    # yet, we force inline encryption.
                    return encrypt_doc(self._crypto, doc)
                return doc_json

            d = self._sync_enc_pool.get_encrypted_doc(doc.doc_id, doc.rev)
            d.addCallback(_maybe_encrypt_doc_inline)
        return d

    #
    # methods to receive doc
    #

    @defer.inlineCallbacks
    def _receive_docs(self, last_known_generation, last_known_trans_id,
                      ensure_callback, sync_id, defer_decryption):

        self._queue_for_decrypt = defer_decryption \
            and self._sync_db is not None

        new_generation = last_known_generation
        new_transaction_id = last_known_trans_id

        if self._queue_for_decrypt:
            logger.debug(
                "Soledad sync: will queue received docs for decrypting.")

        if defer_decryption:
            self._setup_sync_decr_pool()

        headers = self._auth_header.copy()
        headers.update({'content-type': ['application/x-soledad-sync-get']})

        #---------------------------------------------------------------------
        # maybe receive the first document
        #---------------------------------------------------------------------

        # we fetch the first document before fetching the rest because we need
        # to know the total number of documents to be received, and this
        # information comes as metadata to each request.

        d = self._receive_one_doc(
            headers, last_known_generation, last_known_trans_id,
            sync_id, 0)
        d.addCallback(partial(self._insert_received_doc, 1, 1))
        number_of_changes, ngen, ntrans = yield d

        if defer_decryption:
            self._sync_decr_pool.start(number_of_changes)

        #---------------------------------------------------------------------
        # maybe receive the rest of the documents
        #---------------------------------------------------------------------

        # launch many asynchronous fetches and inserts of received documents
        # in the temporary sync db. Will wait for all results before
        # continuing.

        received = 1
        deferreds = []
        while received < number_of_changes:
            d = self._receive_one_doc(
                headers, last_known_generation,
                last_known_trans_id, sync_id, received)
            d.addCallback(
                partial(
                    self._insert_received_doc,
                    received + 1,  # the index of the current received doc
                    number_of_changes))
            deferreds.append(d)
            received += 1
        results = yield defer.gatherResults(deferreds)

        # get generation and transaction id of target after insertions
        if deferreds:
            _, new_generation, new_transaction_id = results.pop()

        #---------------------------------------------------------------------
        # wait for async decryption to finish
        #---------------------------------------------------------------------

        # below we do a trick so we can wait for the SyncDecrypterPool to
        # finish its work before finally returning the new generation and
        # transaction id of the remote replica. To achieve that, we create a
        # Deferred that will return the results of the sync and, if we are
        # decrypting asynchronously, we use reactor.callLater() to
        # periodically poll the decrypter and check if it has finished its
        # work. When it has finished, we either call the callback or errback
        # of that deferred. In case we are not asynchronously decrypting, we
        # just fire the deferred.

        def _shutdown_and_finish(res):
            self._sync_decr_pool.close()
            return new_generation, new_transaction_id

        d = defer.Deferred()
        d.addCallback(_shutdown_and_finish)

        def _wait_or_finish():
            if not self._sync_decr_pool.has_finished():
                reactor.callLater(
                    SyncDecrypterPool.DECRYPT_LOOP_PERIOD,
                    _wait_or_finish)
            else:
                if not self._sync_decr_pool.failed():
                    d.callback(None)
                else:
                    d.errback(self._sync_decr_pool.failure)

        if defer_decryption:
            _wait_or_finish()
        else:
            d.callback(None)

        new_generation, new_transaction_id = yield d
        defer.returnValue([new_generation, new_transaction_id])

    def _receive_one_doc(self, headers, last_known_generation,
                         last_known_trans_id, sync_id, received):
        entries = ['[']
        # add remote replica metadata to the request
        self._prepare(
            '', entries,
            last_known_generation=last_known_generation,
            last_known_trans_id=last_known_trans_id,
            sync_id=sync_id,
            ensure=self._ensure_callback is not None)
        # inform server of how many documents have already been received
        self._prepare(
            ',', entries, received=received)
        entries.append('\r\n]')
        # send headers
        return self._http_request(
            self._url,
            method='POST',
            headers=headers,
            body=''.join(entries))

    def _insert_received_doc(self, idx, total, response):
        """
        Insert a received document into the local replica.

        :param idx: The index count of the current operation.
        :type idx: int
        :param total: The total number of operations.
        :type total: int
        :param response: The body and headers of the response.
        :type response: tuple(str, dict)
        """
        new_generation, new_transaction_id, number_of_changes, doc_id, \
            rev, content, gen, trans_id = \
            self._parse_received_doc_response(response)
        if doc_id is not None:
            # decrypt incoming document and insert into local database
            # -------------------------------------------------------------
            # symmetric decryption of document's contents
            # -------------------------------------------------------------
            # If arriving content was symmetrically encrypted, we decrypt it.
            # We do it inline if defer_decryption flag is False or no sync_db
            # was defined, otherwise we defer it writing it to the received
            # docs table.
            doc = SoledadDocument(doc_id, rev, content)
            if is_symmetrically_encrypted(doc):
                if self._queue_for_decrypt:
                    self._sync_decr_pool.insert_encrypted_received_doc(
                        doc.doc_id, doc.rev, doc.content, gen, trans_id,
                        idx)
                else:
                    # defer_decryption is False or no-sync-db fallback
                    doc.set_json(decrypt_doc(self._crypto, doc))
                    self._insert_doc_cb(doc, gen, trans_id)
            else:
                # not symmetrically encrypted doc, insert it directly
                # or save it in the decrypted stage.
                if self._queue_for_decrypt:
                    self._sync_decr_pool.insert_received_doc(
                        doc.doc_id, doc.rev, doc.content, gen, trans_id,
                        idx)
                else:
                    self._insert_doc_cb(doc, gen, trans_id)
            # -------------------------------------------------------------
            # end of symmetric decryption
            # -------------------------------------------------------------
        msg = "%d/%d" % (idx, total)
        emit(SOLEDAD_SYNC_RECEIVE_STATUS, msg)
        logger.debug("Soledad sync receive status: %s" % msg)
        return number_of_changes, new_generation, new_transaction_id

    def _parse_received_doc_response(self, response):
        """
        Parse the response from the server containing the received document.

        :param response: The body and headers of the response.
        :type response: tuple(str, dict)

        :return: (new_gen, new_trans_id, number_of_changes, doc_id, rev,
                 content, gen, trans_id)
        :rtype: tuple
        """
        # decode incoming stream
        parts = response.splitlines()
        if not parts or parts[0] != '[' or parts[-1] != ']':
            raise errors.BrokenSyncStream
        data = parts[1:-1]
        # decode metadata
        line, comma = utils.check_and_strip_comma(data[0])
        metadata = None
        try:
            metadata = json.loads(line)
            new_generation = metadata['new_generation']
            new_transaction_id = metadata['new_transaction_id']
            number_of_changes = metadata['number_of_changes']
        except (json.JSONDecodeError, KeyError):
            raise errors.BrokenSyncStream
        # make sure we have replica_uid from fresh new dbs
        if self._ensure_callback and 'replica_uid' in metadata:
            self._ensure_callback(metadata['replica_uid'])
        # parse incoming document info
        doc_id = None
        rev = None
        content = None
        gen = None
        trans_id = None
        if number_of_changes > 0:
            try:
                entry = json.loads(data[1])
                doc_id = entry['id']
                rev = entry['rev']
                content = entry['content']
                gen = entry['gen']
                trans_id = entry['trans_id']
            except (IndexError, KeyError):
                raise errors.BrokenSyncStream
        return new_generation, new_transaction_id, number_of_changes, \
            doc_id, rev, content, gen, trans_id

    def _setup_sync_decr_pool(self):
        """
        Set up the SyncDecrypterPool for deferred decryption.
        """
        if self._sync_decr_pool is None and self._sync_db is not None:
            # initialize syncing queue decryption pool
            self._sync_decr_pool = SyncDecrypterPool(
                self._crypto,
                self._sync_db,
                insert_doc_cb=self._insert_doc_cb,
                source_replica_uid=self.source_replica_uid)

    def _http_request(self, url, method='GET', body=None, headers={}):
        d = self._http.request(url, method, body, headers)
        d.addErrback(_unauth_to_invalid_token_error)
        return d