Exemplo n.º 1
0
class BlobEncryptor(object):
    """
    Produces encrypted data from the cleartext data associated with a given
    SoledadDocument using AES-256 cipher in GCM mode.
    The production happens using a Twisted's FileBodyProducer, which uses a
    Cooperator to schedule calls and can be paused/resumed. Each call takes at
    most 65536 bytes from the input.
    Both the production input and output are file descriptors, so they can be
    applied to a stream of data.
    """
    def __init__(self, doc_info, content_fd, secret=None):
        if not secret:
            raise EncryptionDecryptionError('no secret given')

        self.doc_id = doc_info.doc_id
        self.rev = doc_info.rev
        self._content_fd = content_fd
        content_fd.seek(0, os.SEEK_END)
        self._content_size = content_fd.tell()
        content_fd.seek(0)
        self._producer = FileBodyProducer(content_fd, readSize=2**16)

        self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret)
        self._aes = AESWriter(self.sym_key)
        self._aes.authenticate(self._encode_preamble())

    @property
    def iv(self):
        return self._aes.iv

    @property
    def tag(self):
        return self._aes.tag

    def encrypt(self):
        """
        Starts producing encrypted data from the cleartext data.

        :return: A deferred which will be fired when encryption ends and whose
            callback will be invoked with the resulting ciphertext.
        :rtype: twisted.internet.defer.Deferred
        """
        d = self._producer.startProducing(self._aes)
        d.addCallback(lambda _: self._end_crypto_stream())
        return d

    def _encode_preamble(self):
        current_time = int(time.time())

        return PACMAN.pack(BLOB_SIGNATURE_MAGIC, ENC_SCHEME.symkey,
                           ENC_METHOD.aes_256_gcm, current_time, self.iv,
                           str(self.doc_id), str(self.rev), self._content_size)

    def _end_crypto_stream(self):
        preamble, encrypted = self._aes.end()
        result = BytesIO()
        result.write(base64.urlsafe_b64encode(preamble))
        result.write(' ')
        result.write(base64.urlsafe_b64encode(encrypted + self.tag))
        return defer.succeed(result)
Exemplo n.º 2
0
 def decrypt_bytes():
     producer = FileBodyProducer(read_handle)
     buff = BytesIO()
     yield producer.startProducing(buff)
     self.buff = buff.getvalue()
     self.len_read += len(self.buff)
     write_bytes()
     finish_decrypt()
Exemplo n.º 3
0
 def decrypt_bytes():
     producer = FileBodyProducer(read_handle)
     buff = BytesIO()
     yield producer.startProducing(buff)
     self.buff = buff.getvalue()
     self.len_read += len(self.buff)
     write_bytes()
     finish_decrypt()
def encrypt_blob(filename, key, iv):
    dummy_announcer = DummyHashAnnouncer()
    manager = DiskBlobManager(dummy_announcer, '.', '.')
    yield manager.setup()
    creator = CryptStreamCreator(manager, filename, key, iv_generator(iv))
    with open(filename, 'r') as infile:
        producer = FileBodyProducer(infile, readSize=2**22)
        yield producer.startProducing(creator)
    yield creator.stop()
Exemplo n.º 5
0
def encrypt_blob(filename, key, iv):
    dummy_announcer = DummyHashAnnouncer()
    manager = DiskBlobManager(dummy_announcer, '.', '.')
    yield manager.setup()
    creator = CryptStreamCreator(manager, filename, key, iv_generator(iv))
    with open(filename, 'r') as infile:
        producer = FileBodyProducer(infile, readSize=2**22)
        yield producer.startProducing(creator)
    yield creator.stop()
Exemplo n.º 6
0
 def _save_verified_blob(self, writer):
     if self.saved_verified_blob is False:
         writer.write_handle.seek(0)
         out_path = os.path.join(self.blob_dir, self.blob_hash)
         producer = FileBodyProducer(writer.write_handle)
         yield producer.startProducing(open(out_path, 'wb'))
         self.saved_verified_blob = True
         defer.returnValue(True)
     else:
         raise DownloadCanceledError()
Exemplo n.º 7
0
 def _save_verified_blob(self, writer):
     if self.saved_verified_blob is False:
         writer.write_handle.seek(0)
         out_path = os.path.join(self.blob_dir, self.blob_hash)
         producer = FileBodyProducer(writer.write_handle)
         yield producer.startProducing(open(out_path, 'wb'))
         self.saved_verified_blob = True
         defer.returnValue(True)
     else:
         raise DownloadCanceledError()
Exemplo n.º 8
0
 def put(self, blob_id, blob_fd, size=None):
     logger.info("Saving blob in local database...")
     insert = 'INSERT INTO blobs (blob_id, payload) VALUES (?, zeroblob(?))'
     irow = yield self.dbpool.insertAndGetLastRowid(insert, (blob_id, size))
     handle = yield self.dbpool.blob('blobs', 'payload', irow, 1)
     blob_fd.seek(0)
     # XXX I have to copy the buffer here so that I'm able to
     # return a non-closed file to the caller (blobmanager.get)
     # FIXME should remove this duplication!
     # have a look at how treq does cope with closing the handle
     # for uploading a file
     producer = FileBodyProducer(blob_fd)
     done = yield producer.startProducing(handle)
     logger.info("Finished saving blob in local database.")
     defer.returnValue(done)
Exemplo n.º 9
0
 def close(self):
     self.length = self.len_so_far
     self.blob_hash = self._hashsum.hexdigest()
     if self.blob_hash and self._is_open and self.length > 0:
         # do not save 0 length files (empty tail blob in streams)
         # or if its been closed already
         self.buffer.seek(0)
         out_path = os.path.join(self.blob_dir, self.blob_hash)
         producer = FileBodyProducer(self.buffer)
         yield producer.startProducing(open(out_path, 'wb'))
         self._is_open = False
     if self.length > 0:
         defer.returnValue(self.blob_hash)
     else:
         # 0 length files (empty tail blob in streams )
         # must return None as their blob_hash for
         # it to be saved properly by EncryptedFileMetadataManagers
         defer.returnValue(None)
Exemplo n.º 10
0
 def close(self):
     self.length = self.len_so_far
     self.blob_hash = self._hashsum.hexdigest()
     if self.blob_hash and self._is_open and self.length > 0:
         # do not save 0 length files (empty tail blob in streams)
         # or if its been closed already
         self.buffer.seek(0)
         out_path = os.path.join(self.blob_dir, self.blob_hash)
         producer = FileBodyProducer(self.buffer)
         yield producer.startProducing(open(out_path, 'wb'))
         self._is_open = False
     if self.length > 0:
         defer.returnValue(self.blob_hash)
     else:
         # 0 length files (empty tail blob in streams )
         # must return None as their blob_hash for
         # it to be saved properly by EncryptedFileMetadataManagers
         defer.returnValue(None)
Exemplo n.º 11
0
 def write_blob(self, user, blob_id, request, namespace=''):
     path = self._get_path(user, blob_id, namespace)
     try:
         mkdir_p(os.path.split(path)[0])
     except OSError:
         pass
     if os.path.isfile(path):
         # 409 - Conflict
         request.setResponseCode(409)
         request.write("Blob already exists: %s" % blob_id)
         defer.returnValue(None)
     used = yield self.get_total_storage(user)
     if used > self.quota:
         logger.error("Error 507: Quota exceeded for user: %s" % user)
         request.setResponseCode(507)
         request.write('Quota Exceeded!')
         defer.returnValue(None)
     logger.info('writing blob: %s - %s' % (user, blob_id))
     fbp = FileBodyProducer(request.content)
     yield fbp.startProducing(open(path, 'wb'))
Exemplo n.º 12
0
 def registerWithConsumer(consumer):
     producer = FileBodyProducer(fobj, **kw)
     d = producer.startProducing(consumer)
     d.addCallback(lambda ign: consumer.unregisterProducer())
     d.addErrback(log.err, 'error producing file body')
     consumer.registerProducer(producer, True)
Exemplo n.º 13
0
class BlobDecryptor(object):
    """
    Decrypts an encrypted blob associated with a given Document.

    Will raise an exception if the blob doesn't have the expected structure, or
    if the GCM tag doesn't verify.
    """
    def __init__(self, doc_info, ciphertext_fd, result=None,
                 secret=None, armor=True, start_stream=True, tag=None):
        if not secret:
            raise EncryptionDecryptionError('no secret given')

        self.doc_id = doc_info.doc_id
        self.rev = doc_info.rev
        self.fd = ciphertext_fd
        self.armor = armor
        self._producer = None
        self.result = result or BytesIO()
        sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret)
        self.size = None
        self.tag = None

        preamble, iv = self._consume_preamble()
        soledad_assert(preamble)
        soledad_assert(iv)

        self._aes = AESWriter(sym_key, iv, self.result, tag=tag or self.tag)
        self._aes.authenticate(preamble)
        if start_stream:
            self._start_stream()

    @property
    def decrypted_content_size(self):
        return self._aes.written

    def _start_stream(self):
        self._producer = FileBodyProducer(self.fd, readSize=2**16)

    def _consume_preamble(self):
        """
        Consume the preamble and write remaining bytes as ciphertext. This
        function is called during a stream and can be holding both, so we need
        to consume only preamble and store the remaining.
        """
        self.fd.seek(0)
        try:
            parts = self.fd.getvalue().split(SEPARATOR, 1)
            encoded_preamble = base64.urlsafe_b64decode(parts[0])
            if len(parts) == 2:
                ciphertext = parts[1]
                if self.armor:
                    ciphertext = base64.urlsafe_b64decode(ciphertext)
                self.tag, ciphertext = ciphertext[-16:], ciphertext[:-16]
                self.fd.seek(0)
                self.fd.write(ciphertext)
                self.fd.seek(len(ciphertext))
                self.fd.truncate()
                self.fd.seek(0)

        except (TypeError, ValueError):
            raise InvalidBlob

        try:
            preamble = decode_preamble(encoded_preamble)
        except InvalidPreambleException as e:
            raise InvalidBlob(e)

        if preamble.magic != MAGIC:
            raise InvalidBlob
        # TODO check timestamp. Just as a sanity check, but for instance
        # we can refuse to process something that is in the future or
        # too far in the past (1984 would be nice, hehe)
        if preamble.scheme != ENC_SCHEME.symkey:
            raise EncryptionSchemeNotImplementedException(preamble.scheme)
        if preamble.method != ENC_METHOD.aes_256_gcm:
            method = preamble.method
            raise InvalidBlob('Invalid encryption scheme: %s' % method)
        if preamble.rev != self.rev:
            rev = preamble.rev
            msg = 'Invalid revision. Expected: %s, was: %s' % (self.rev, rev)
            raise InvalidBlob(msg)
        if preamble.doc_id != self.doc_id:
            msg = 'Invalid doc_id. '
            + 'Expected: %s, was: %s' % (self.doc_id, preamble.doc_id)
            raise InvalidBlob(msg)

        return encoded_preamble, preamble.iv

    def _end_stream(self):
        try:
            self._aes.end()
        except InvalidTag:
            raise InvalidBlob('Invalid Tag. Blob authentication failed.')
        fd = self.result
        fd.seek(0)
        return self.result

    def decrypt(self):
        """
        Starts producing encrypted data from the cleartext data.

        :return: A deferred which will be fired when encryption ends and whose
            callback will be invoked with the resulting ciphertext.
        :rtype: twisted.internet.defer.Deferred
        """
        d = self.startProducing()
        d.addCallback(lambda _: self._end_stream())
        return d

    def startProducing(self):
        if not self._producer:
            self._start_stream()
        return self._producer.startProducing(self._aes)

    def endStream(self):
        return self._end_stream()

    def write(self, data):
        self._aes.write(data)

    def close(self):
        result = self._aes.end()
        return result
Exemplo n.º 14
0
class Protocol(CommonProtocol, Fysom):
    message_dispatcher = MessageDispatcher().register(Connect, Quit, SendChat,
                                                      RequestFileTransfer)

    async_transitions = {'connect', 'send_file'}

    def __init__(self,
                 client_server_proto,
                 user_name,
                 is_initiator=False,
                 file_receive_path=None):
        CommonProtocol.__init__(self)
        Fysom.__init__(
            self,
            initial='not_connected',
            events=[
                # event / from / to
                ('connect', 'not_connected', 'connected'),
                ('disconnect', '*', 'done'),
                ('accept_connection', 'not_connected', 'connected'),
                ('send_chat', 'connected', 'connected'),
                ('receive_chat', 'connected', 'connected'),
                ('send_file', 'connected', 'sending_file'),
                ('receive_file', 'connected', 'receiving_file'),
                ('send_file_success', 'sending_file', 'connected'),
                ('send_file_failure', 'sending_file', 'connected'),
                ('receive_file_success', 'receiving_file', 'connected'),
                ('receive_file_failure', 'receiving_file', 'connected')
            ])

        self.client_server_proto = client_server_proto
        self.other_user_name = user_name
        self.transfer_file = None
        self.file_producer = None
        self.file_consumer = None
        self.is_initiator = is_initiator

        self.receive_path = file_receive_path
        if not self.receive_path:
            self.receive_path = os.path.abspath(os.getcwd())

    def on_message_received(self, message):
        for msg_cls, action in {
                Connect:
                lambda m: self.accept_connection(),
                Quit:
                lambda m: self.disconnect(),
                RequestFileTransfer:
                lambda m: self.receive_file(TransferFile.from_message(m)),
                SendChat:
                lambda m: self.receive_chat(m.message)
        }.items():
            if isinstance(message, msg_cls):
                action(message)
                return

    def rawDataReceived(self, data):
        assert self.file_consumer is not None
        self.file_consumer.write(data)

    def on_before_connect(self, _):
        def on_response(response):
            if self.check_response_error(response):
                self.log("Connection rejected")
                self.cancel_transition()
                self.disconnect()
            else:
                self.log("Connected")
                self.transition()

        self.send_message(Connect(), on_response)

    def on_accept_connection(self, _):
        self.log("Accepting incoming connection")
        self.send_response({})

    def on_before_send_chat(self, event):
        message = event.args[0]

        def on_response(response):
            if self.check_response_error(response):
                self.log("Received negative chat ack")
            else:
                self.log("Received positive chat ack")

        self.send_message(SendChat(message), on_response)

    def on_receive_chat(self, event):
        self.log("Received chat message: '{message}'", message=event.args[0])

        self.send_response({})

    def open_transfer_file_read(self, transfer):
        try:
            fp = open(transfer.path, 'rb')
            self.file_producer = FileBodyProducer(fp,
                                                  readSize=transfer.block_size)
            self.transfer_file = transfer
        except IOError:
            return False

        return True

    def on_before_send_file(self, event):
        transfer = event.args[0]

        def on_response(response):
            if self.check_response_error(response):
                self.log("Received error after file transfer request")
            elif response.get('result') != 'confirmed':
                self.log("File transfer request denied")
            else:
                self.log("File transfer request accepted, starting")
                if self.open_transfer_file_read(transfer):
                    self.transition()
                    return

            self.cancel_transition()

        self.send_message(
            transfer.to_message(),
            on_response).addErrback(lambda _: self.cancel_transition())

    def on_enter_sending_file(self, _):
        assert self.transfer_file is not None
        assert self.file_producer is not None

        self.setRawMode()
        d = self.file_producer.startProducing(self.transport)

        def on_success(_):
            self.log("File send successfully")
            self.send_file_success()

        def on_failure(failure):
            failure.trap(Exception)
            self.log("File send failed: {e}", e=failure)
            self.send_file_failure()

        d.addCallbacks(on_success, on_failure)

    def on_leave_sending_file(self, _):
        self.transfer_file = None
        self.file_producer.stopProducing()
        self.file_producer = None

        self.setLineMode()

    def open_transfer_file_write(self, transfer):
        try:
            fp = open(transfer.path, 'wb')
        except (OSError, IOError) as e:
            self.log("Failed to open file for writing: {e}", e=e)
            return False

        try:
            self.file_consumer = FileConsumer(fp, transfer.size)
            self.transfer_file = transfer
        except (IOError, OSError) as e:
            self.log("Failed allocating {size} byte file for transfer: {e}",
                     size=transfer.size,
                     e=e)
            fp.close()
            return False

        return True

    def on_before_receive_file(self, event):
        transfer = event.args[0]
        path = os.path.join(self.receive_path, transfer.name)

        def generate_unique_path(initial_path):
            filename, ext = os.path.splitext(initial_path)
            for n in itertools.count():
                yield "{0}-{1}{2}".format(filename, n, ext)

        if os.path.exists(path):
            for path in generate_unique_path(path):
                if not os.path.exists(path):
                    break

        transfer = transfer._replace(path=path)

        if not self.open_transfer_file_write(transfer):
            self.write_response({'result': 'rejected'})
            return False

        self.log("Receiving file as {path}", path=path)
        self.send_response({'result': 'confirmed'})

    def on_enter_receiving_file(self, _):
        assert self.transfer_file is not None
        assert self.file_consumer is not None

        self.setRawMode()
        d = self.file_consumer.registerProducer(self, streaming=True)

        def on_success(_):
            self.log("File received successfully")
            self.receive_file_success()

        def on_failure(failure):
            failure.trap(Exception)
            self.log("File receive failed: {e}", e=failure)
            self.receive_file_failure()

        d.addCallbacks(on_success, on_failure)

    def on_leave_receiving_file(self, _):
        self.transfer_file = None
        self.file_consumer.finish()
        self.file_consumer = None

        self.setLineMode()

    def connectionMade(self):
        if self.is_initiator:
            self.connect()

    def connectionLost(self, reason=connectionDone):
        self.cancel_transition()
        self.disconnect()

    def on_enter_done(self, _):
        if self.current == 'receiving_file':
            self.receive_file_failure()
        elif self.current == 'sending_file':
            self.send_file_failure()

        if not self.transport_connected:
            return

        self.send_message(Quit()).addBoth(self.transport.loseConnection)
Exemplo n.º 15
0
class BlobDecryptor(object):
    """
    Decrypts an encrypted blob associated with a given Document.

    Will raise an exception if the blob doesn't have the expected structure, or
    if the GCM tag doesn't verify.
    """
    def __init__(self,
                 doc_info,
                 ciphertext_fd,
                 result=None,
                 secret=None,
                 armor=True,
                 start_stream=True,
                 tag=None):
        if not secret:
            raise EncryptionDecryptionError('no secret given')

        self.doc_id = doc_info.doc_id
        self.rev = doc_info.rev
        self.fd = ciphertext_fd
        self.armor = armor
        self._producer = None
        self.result = result or BytesIO()
        sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret)
        self.size = None
        self.tag = None

        preamble, iv = self._consume_preamble()
        soledad_assert(preamble)
        soledad_assert(iv)

        self._aes = AESWriter(sym_key, iv, self.result, tag=tag or self.tag)
        self._aes.authenticate(preamble)
        if start_stream:
            self._start_stream()

    @property
    def decrypted_content_size(self):
        return self._aes.written

    def _start_stream(self):
        self._producer = FileBodyProducer(self.fd, readSize=2**16)

    def _consume_preamble(self):
        """
        Consume the preamble and write remaining bytes as ciphertext. This
        function is called during a stream and can be holding both, so we need
        to consume only preamble and store the remaining.
        """
        self.fd.seek(0)
        try:
            parts = self.fd.getvalue().split(SEPARATOR, 1)
            encoded_preamble = base64.urlsafe_b64decode(parts[0])
            if len(parts) == 2:
                ciphertext = parts[1]
                if self.armor:
                    ciphertext = base64.urlsafe_b64decode(ciphertext)
                self.tag, ciphertext = ciphertext[-16:], ciphertext[:-16]
                self.fd.seek(0)
                self.fd.write(ciphertext)
                self.fd.seek(len(ciphertext))
                self.fd.truncate()
                self.fd.seek(0)

        except (TypeError, ValueError):
            raise InvalidBlob

        try:
            preamble = decode_preamble(encoded_preamble)
        except InvalidPreambleException as e:
            raise InvalidBlob(e)

        if preamble.magic != MAGIC:
            raise InvalidBlob
        # TODO check timestamp. Just as a sanity check, but for instance
        # we can refuse to process something that is in the future or
        # too far in the past (1984 would be nice, hehe)
        if preamble.scheme != ENC_SCHEME.symkey:
            raise EncryptionSchemeNotImplementedException(preamble.scheme)
        if preamble.method != ENC_METHOD.aes_256_gcm:
            method = preamble.method
            raise InvalidBlob('Invalid encryption scheme: %s' % method)
        if preamble.rev != self.rev:
            rev = preamble.rev
            msg = 'Invalid revision. Expected: %s, was: %s' % (self.rev, rev)
            raise InvalidBlob(msg)
        if preamble.doc_id != self.doc_id:
            msg = 'Invalid doc_id. '
            + 'Expected: %s, was: %s' % (self.doc_id, preamble.doc_id)
            raise InvalidBlob(msg)

        return encoded_preamble, preamble.iv

    def _end_stream(self):
        try:
            self._aes.end()
        except InvalidTag:
            raise InvalidBlob('Invalid Tag. Blob authentication failed.')
        fd = self.result
        fd.seek(0)
        return self.result

    def decrypt(self):
        """
        Starts producing encrypted data from the cleartext data.

        :return: A deferred which will be fired when encryption ends and whose
            callback will be invoked with the resulting ciphertext.
        :rtype: twisted.internet.defer.Deferred
        """
        d = self.startProducing()
        d.addCallback(lambda _: self._end_stream())
        return d

    def startProducing(self):
        if not self._producer:
            self._start_stream()
        return self._producer.startProducing(self._aes)

    def endStream(self):
        return self._end_stream()

    def write(self, data):
        self._aes.write(data)

    def close(self):
        result = self._aes.end()
        return result
Exemplo n.º 16
0
class BlobEncryptor(object):
    """
    Produces encrypted data from the cleartext data associated with a given
    Document using AES-256 cipher in GCM mode.

    The production happens using a Twisted's FileBodyProducer, which uses a
    Cooperator to schedule calls and can be paused/resumed. Each call takes at
    most 65536 bytes from the input.

    Both the production input and output are file descriptors, so they can be
    applied to a stream of data.
    """

    # TODO
    # This class needs further work to allow for proper streaming.
    # Right now we HAVE TO WAIT until the end of the stream before encoding the
    # result. It should be possible to do that just encoding the chunks and
    # passing them to a sink, but for that we have to encode the chunks at
    # proper alignment (3 bytes?) with b64 if armor is defined.

    def __init__(self,
                 doc_info,
                 content_fd,
                 secret=None,
                 armor=True,
                 sink=None):
        if not secret:
            raise EncryptionDecryptionError('no secret given')

        self.doc_id = doc_info.doc_id
        self.rev = doc_info.rev
        self.armor = armor

        self._content_fd = content_fd
        self._content_size = self._get_rounded_size(content_fd)
        self._producer = FileBodyProducer(content_fd, readSize=2**16)

        self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret)
        self._aes = AESWriter(self.sym_key, _buffer=sink)
        self._aes.authenticate(self._encode_preamble())

    def _get_rounded_size(self, fd):
        """
        Returns a rounded value in order to minimize information leaks due to
        the original size being exposed.
        """
        fd.seek(0, os.SEEK_END)
        size = _ceiling(fd.tell())
        fd.seek(0)
        return size

    @property
    def iv(self):
        return self._aes.iv

    @property
    def tag(self):
        return self._aes.tag

    def encrypt(self):
        """
        Starts producing encrypted data from the cleartext data.

        :return: A deferred which will be fired when encryption ends and whose
                 callback will be invoked with the resulting ciphertext.
        :rtype: twisted.internet.defer.Deferred
        """
        # XXX pass a sink to aes?
        d = self._producer.startProducing(self._aes)
        d.addCallback(lambda _: self._end_crypto_stream_and_encode_result())
        return d

    def _encode_preamble(self):
        scheme = ENC_SCHEME.symkey
        method = ENC_METHOD.aes_256_gcm
        content_size = self._content_size

        return Preamble(self.doc_id,
                        self.rev,
                        scheme,
                        method,
                        iv=self.iv,
                        content_size=content_size).encode()

    def _end_crypto_stream_and_encode_result(self):

        # TODO ---- this needs to be refactored to allow PROPER streaming
        # We should write the preamble as soon as possible,
        # Is it possible to write the AES stream as soon as it is encrypted by
        # chunks?
        # FIXME also, it needs to be able to encode chunks with base64 if armor

        raw_preamble, encrypted = self._aes.end()
        result = BytesIO()
        result.write(base64.urlsafe_b64encode(raw_preamble))
        result.write(SEPARATOR)

        if self.armor:
            result.write(base64.urlsafe_b64encode(encrypted + self.tag))
        else:
            result.write(encrypted + self.tag)

        result.seek(0)
        return defer.succeed(result)
Exemplo n.º 17
0
class BlobEncryptor(object):
    """
    Produces encrypted data from the cleartext data associated with a given
    Document using AES-256 cipher in GCM mode.

    The production happens using a Twisted's FileBodyProducer, which uses a
    Cooperator to schedule calls and can be paused/resumed. Each call takes at
    most 65536 bytes from the input.

    Both the production input and output are file descriptors, so they can be
    applied to a stream of data.
    """
    # TODO
    # This class needs further work to allow for proper streaming.
    # Right now we HAVE TO WAIT until the end of the stream before encoding the
    # result. It should be possible to do that just encoding the chunks and
    # passing them to a sink, but for that we have to encode the chunks at
    # proper alignment (3 bytes?) with b64 if armor is defined.

    def __init__(self, doc_info, content_fd, secret=None, armor=True,
                 sink=None):
        if not secret:
            raise EncryptionDecryptionError('no secret given')

        self.doc_id = doc_info.doc_id
        self.rev = doc_info.rev
        self.armor = armor

        self._content_fd = content_fd
        self._content_size = self._get_rounded_size(content_fd)
        self._producer = FileBodyProducer(content_fd, readSize=2**16)

        self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret)
        self._aes = AESWriter(self.sym_key, _buffer=sink)
        self._aes.authenticate(self._encode_preamble())

    def _get_rounded_size(self, fd):
        """
        Returns a rounded value in order to minimize information leaks due to
        the original size being exposed.
        """
        fd.seek(0, os.SEEK_END)
        size = _ceiling(fd.tell())
        fd.seek(0)
        return size

    @property
    def iv(self):
        return self._aes.iv

    @property
    def tag(self):
        return self._aes.tag

    def encrypt(self):
        """
        Starts producing encrypted data from the cleartext data.

        :return: A deferred which will be fired when encryption ends and whose
                 callback will be invoked with the resulting ciphertext.
        :rtype: twisted.internet.defer.Deferred
        """
        # XXX pass a sink to aes?
        d = self._producer.startProducing(self._aes)
        d.addCallback(lambda _: self._end_crypto_stream_and_encode_result())
        return d

    def _encode_preamble(self):
        scheme = ENC_SCHEME.symkey
        method = ENC_METHOD.aes_256_gcm
        content_size = self._content_size

        return Preamble(self.doc_id, self.rev, scheme, method, iv=self.iv,
                        content_size=content_size).encode()

    def _end_crypto_stream_and_encode_result(self):

        # TODO ---- this needs to be refactored to allow PROPER streaming
        # We should write the preamble as soon as possible,
        # Is it possible to write the AES stream as soon as it is encrypted by
        # chunks?
        # FIXME also, it needs to be able to encode chunks with base64 if armor

        raw_preamble, encrypted = self._aes.end()
        result = BytesIO()
        result.write(
            base64.urlsafe_b64encode(raw_preamble))
        result.write(SEPARATOR)

        if self.armor:
            result.write(
                base64.urlsafe_b64encode(encrypted + self.tag))
        else:
            result.write(encrypted + self.tag)

        result.seek(0)
        return defer.succeed(result)
Exemplo n.º 18
0
class Protocol(CommonProtocol, Fysom):
    message_dispatcher = MessageDispatcher().register(
        Connect, Quit,
        SendChat, RequestFileTransfer
    )

    async_transitions = {'connect', 'send_file'}

    def __init__(self, client_server_proto, user_name, is_initiator=False,
                 file_receive_path=None):
        CommonProtocol.__init__(self)
        Fysom.__init__(self, initial='not_connected', events=[
            # event / from / to
            ('connect',
                'not_connected', 'connected'),
            ('disconnect',
             '*', 'done'),
            ('accept_connection',
                'not_connected', 'connected'),
            ('send_chat',
                'connected', 'connected'),
            ('receive_chat',
                'connected', 'connected'),
            ('send_file',
                'connected', 'sending_file'),
            ('receive_file',
                'connected', 'receiving_file'),
            ('send_file_success',
                'sending_file', 'connected'),
            ('send_file_failure',
                'sending_file', 'connected'),
            ('receive_file_success',
                'receiving_file', 'connected'),
            ('receive_file_failure',
                'receiving_file', 'connected')
        ])

        self.client_server_proto = client_server_proto
        self.other_user_name = user_name
        self.transfer_file = None
        self.file_producer = None
        self.file_consumer = None
        self.is_initiator = is_initiator

        self.receive_path = file_receive_path
        if not self.receive_path:
            self.receive_path = os.path.abspath(os.getcwd())

    def on_message_received(self, message):
        for msg_cls, action in {
            Connect:
                lambda m: self.accept_connection(),
            Quit:
                lambda m: self.disconnect(),
            RequestFileTransfer:
                lambda m: self.receive_file(TransferFile.from_message(m)),
            SendChat:
                lambda m: self.receive_chat(m.message)
        }.items():
            if isinstance(message, msg_cls):
                action(message)
                return

    def rawDataReceived(self, data):
        assert self.file_consumer is not None
        self.file_consumer.write(data)

    def on_before_connect(self, _):
        def on_response(response):
            if self.check_response_error(response):
                self.log("Connection rejected")
                self.cancel_transition()
                self.disconnect()
            else:
                self.log("Connected")
                self.transition()

        self.send_message(Connect(), on_response)

    def on_accept_connection(self, _):
        self.log("Accepting incoming connection")
        self.send_response({})

    def on_before_send_chat(self, event):
        message = event.args[0]

        def on_response(response):
            if self.check_response_error(response):
                self.log("Received negative chat ack")
            else:
                self.log("Received positive chat ack")

        self.send_message(SendChat(message), on_response)

    def on_receive_chat(self, event):
        self.log("Received chat message: '{message}'", message = event.args[0])

        self.send_response({})

    def open_transfer_file_read(self, transfer):
        try:
            fp = open(transfer.path, 'rb')
            self.file_producer = FileBodyProducer(fp,
                readSize=transfer.block_size)
            self.transfer_file = transfer
        except IOError:
            return False

        return True

    def on_before_send_file(self, event):
        transfer = event.args[0]

        def on_response(response):
            if self.check_response_error(response):
                self.log("Received error after file transfer request")
            elif response.get('result') != 'confirmed':
                self.log("File transfer request denied")
            else:
                self.log("File transfer request accepted, starting")
                if self.open_transfer_file_read(transfer):
                    self.transition()
                    return

            self.cancel_transition()

        self.send_message(transfer.to_message(), on_response).addErrback(
            lambda _: self.cancel_transition())

    def on_enter_sending_file(self, _):
        assert self.transfer_file is not None
        assert self.file_producer is not None

        self.setRawMode()
        d = self.file_producer.startProducing(self.transport)

        def on_success(_):
            self.log("File send successfully")
            self.send_file_success()

        def on_failure(failure):
            failure.trap(Exception)
            self.log("File send failed: {e}", e=failure)
            self.send_file_failure()

        d.addCallbacks(on_success, on_failure)

    def on_leave_sending_file(self, _):
        self.transfer_file = None
        self.file_producer.stopProducing()
        self.file_producer = None

        self.setLineMode()

    def open_transfer_file_write(self, transfer):
        try:
            fp = open(transfer.path, 'wb')
        except (OSError, IOError) as e:
            self.log("Failed to open file for writing: {e}", e=e)
            return False

        try:
            self.file_consumer = FileConsumer(fp, transfer.size)
            self.transfer_file = transfer
        except (IOError, OSError) as e:
            self.log("Failed allocating {size} byte file for transfer: {e}",
                     size=transfer.size, e=e)
            fp.close()
            return False

        return True

    def on_before_receive_file(self, event):
        transfer = event.args[0]
        path = os.path.join(self.receive_path, transfer.name)

        def generate_unique_path(initial_path):
            filename, ext = os.path.splitext(initial_path)
            for n in itertools.count():
                yield "{0}-{1}{2}".format(filename, n, ext)

        if os.path.exists(path):
            for path in generate_unique_path(path):
                if not os.path.exists(path):
                    break

        transfer = transfer._replace(path=path)

        if not self.open_transfer_file_write(transfer):
            self.write_response({'result': 'rejected'})
            return False

        self.log("Receiving file as {path}", path=path)
        self.send_response({'result': 'confirmed'})

    def on_enter_receiving_file(self, _):
        assert self.transfer_file is not None
        assert self.file_consumer is not None

        self.setRawMode()
        d = self.file_consumer.registerProducer(self, streaming=True)

        def on_success(_):
            self.log("File received successfully")
            self.receive_file_success()

        def on_failure(failure):
            failure.trap(Exception)
            self.log("File receive failed: {e}", e=failure)
            self.receive_file_failure()

        d.addCallbacks(on_success, on_failure)

    def on_leave_receiving_file(self, _):
        self.transfer_file = None
        self.file_consumer.finish()
        self.file_consumer = None

        self.setLineMode()

    def connectionMade(self):
        if self.is_initiator:
            self.connect()

    def connectionLost(self, reason=connectionDone):
        self.cancel_transition()
        self.disconnect()

    def on_enter_done(self, _):
        if self.current == 'receiving_file':
            self.receive_file_failure()
        elif self.current == 'sending_file':
            self.send_file_failure()

        if not self.transport_connected:
            return

        self.send_message(Quit()).addBoth(
            self.transport.loseConnection)