Пример #1
0
    def connectionMade(self):
        # Add these callbacks, otherwise FileSender throws some exceptions
        # when it completes
        def f1(lastChunk):
            print "finished"
            self.transport.loseConnection()

        def f2(reason):
            print "failed"
            print reason
            self.transport.loseConnection()

        fs = FileSender()
        fs.beginFileTransfer(self.factory.fp, self.transport, None).addCallbacks(f1, f2)
Пример #2
0
 def start_stream():
     file_sender = FileSender()
     d = file_sender.beginFileTransfer(file_handle, lbry_file_creator)
     d.addCallback(lambda _: stop_file(lbry_file_creator))
     d.addCallback(lambda _: make_stream_desc_file(lbry_file_creator.stream_hash))
     d.addCallback(lambda _: lbry_file_creator.stream_hash)
     return d
Пример #3
0
    def transfer_file(self, file_path):
        """
        Send a file via the transit. Assume that the latter has been already
        established. If the other end provides a hash when done, check it.

        Helper for the send_file method above.
        """
        record_pipe = yield self.transit.connect()
        hasher = hashlib.sha256()

        def func(data):
            hasher.update(data)
            return data

        with open(file_path, 'rb') as f:
            file_sender = FileSender()
            yield file_sender.beginFileTransfer(f, record_pipe, func)

        ack_record = yield record_pipe.receive_record()
        ack_record = json.loads(str(ack_record, 'utf-8'))

        yield record_pipe.close()

        try:
            assert ack_record['ack'] == 'ok'
            if ack_record['sha256']:
                assert ack_record['sha256'] == hasher.hexdigest()
        except (AssertionError, KeyError):
            raise TransferError('The file transfer failed.')

        return returnValue(hasher.hexdigest())
Пример #4
0
 def connectionMade(self):
     self.transport.write('%s\r\n' % (self.insize))
     sender = FileSender()
     sender.CHUNK_SIZE = 2 ** 16
     d = sender.beginFileTransfer(self.infile, self.transport,
                                  self._monitor)
     d.addCallback(self.cbTransferCompleted)
Пример #5
0
        def getshared_cb(data):
            if len(data) == 0:
                error = { 'status': {'error': "Invalid Request",
                                     'message': "File does not exist."} }
                request.write(json.dumps(error, sort_keys=True, encoding="utf-8"))
                request.finish()
                return

            file_path = str(data[0][0]) + "/" + fileid
            if not os.path.exists(file_path):
                error = { 'status': {'error': "Invalid Request",
                                     'message': "File does not exist."} }
                request.write(json.dumps(error, sort_keys=True, encoding="utf-8"))
                request.finish()

            request.write(str(data[0][1])) # writing key
            iv_plain = self.sid.decryptData(data[0][2]) # writing IV
            print iv_plain
            iv = self.sid.encryptData(iv_plain, pubkey)
            request.write(iv)
            file = open(file_path ,"r")
            sender = FileSender()
            sender.CHUNK_SIZE = 200
            df = sender.beginFileTransfer(file, request)

            df.addErrback(err)
            df.addCallback(finishTrnf_cb, file)
Пример #6
0
 def start_stream():
     file_sender = FileSender()
     d = file_sender.beginFileTransfer(file_handle, lbry_file_creator)
     d.addCallback(lambda _: stop_file(lbry_file_creator))
     d.addCallback(lambda _: make_stream_desc_file(lbry_file_creator.stream_hash))
     d.addCallback(lambda _: lbry_file_creator.stream_hash)
     return d
Пример #7
0
 def lineReceived(self, line):
     line = line.strip()
     if line == OK:
         sender = FileSender()
         sender.CHUNK_SIZE = 2 ** 16
         deffered = sender.beginFileTransfer(self.fileObj, self.transport, None)
         deffered.addCallback(self.success).addErrback(self.error)     
Пример #8
0
 def on_write_status(self, consumer):
     content = self.generate_content()
     buffer = StringIO(content)
     sender = FileSender()
     d = sender.beginFileTransfer(buffer, consumer)
     d.addCallbacks(lambda _: self.on_write_completed(consumer),
                    self.on_error)
     return d
Пример #9
0
 def lineReceived(self, line):
     print 'sender %s' % line
     line = line.strip()
     if line == OK:
         sender = FileSender()
         sender.CHUNK_SIZE = 2 ** 16
         deffered = sender.beginFileTransfer(self.fileObj, self.transport, None)
         deffered.addCallback(self.success).addErrback(self.error)
Пример #10
0
	def beginFileTransfer(self, file, consumer, rangeBegin, rangeEnd, transform = None):
		if not rangeBegin < rangeEnd:
			raise ValueError('rangeBegin >= rangeEnd')
		self.rangeBegin = rangeBegin
		self.rangeEnd = rangeEnd
		if file:
			file.seek(rangeBegin)
		return FileSender.beginFileTransfer(self, file, consumer, transform)
Пример #11
0
 def on_write_status(self, consumer):
     content = self.generate_content()
     buffer = StringIO(content)
     sender = FileSender()
     d = sender.beginFileTransfer(buffer, consumer)
     d.addCallbacks(lambda _: self.on_write_completed(consumer),
                    self.on_error)
     return d
Пример #12
0
    def _sendMail_data(self, code, resp):
        transfer = FileSender()

        deferred = transfer.beginFileTransfer(self._requestParams['file'], self.transport, self.transformChunk)
        deferred.addCallbacks(self.finishedFileTransfer, self.sendError)

        self._expected = SUCCESS
        self._okresponse = self._sendMail_sent
        self._failresponse = self._sendMail_fail
Пример #13
0
    def start_streaming(self):
        file_sender = FileSender()
        d = file_sender.beginFileTransfer(self.file_handle, self)

        def stop_stream():
            d = self.stop()
            return d

        d.addCallback(lambda _: stop_stream())
        return d
Пример #14
0
    def start_streaming(self):
        file_sender = FileSender()
        d = file_sender.beginFileTransfer(self.file_handle, self)

        def stop_stream():
            d = self.stop()
            return d

        d.addCallback(lambda _: stop_stream())
        return d
Пример #15
0
 def start_stream():
     # TODO: Using FileSender isn't necessary, we can just read
     #       straight from the disk. The stream creation process
     #       should be in its own thread anyway so we don't need to
     #       worry about interacting with the twisted reactor
     file_sender = FileSender()
     d = file_sender.beginFileTransfer(file_handle, lbry_file_creator)
     d.addCallback(lambda _: stop_file(lbry_file_creator))
     d.addCallback(lambda _: make_stream_desc_file(lbry_file_creator.stream_hash))
     d.addCallback(lambda _: lbry_file_creator.stream_hash)
     return d
Пример #16
0
 def connectionMade(self):
     """ """
     instruction = dict(file_size=self.insize,
                        original_file_path=self.path)
     instruction = json.dumps(instruction)
     self.transport.write(instruction+'\r\n')
     sender = FileSender()
     sender.CHUNK_SIZE = 2 ** 16
     d = sender.beginFileTransfer(self.infile, self.transport,
                                  self._monitor)
     d.addCallback(self.cbTransferCompleted)
Пример #17
0
 def _start_transfer(self, _discard):
     if self.fileObj is None:
         self._failure((2, "NO_FILE_LOADED"))
     else:
         logging.info("Started file transfer")
         self.busy = True
         fileProducer = FileSender()
         fileProducer.CHUNK_SIZE = 65536
         def_obj = fileProducer.beginFileTransfer(file=self.fileObj, consumer=self.transport)
         def_obj.addCallback(self._done_transfer, True)
         def_obj.addErrback(self._done_transfer, False)
Пример #18
0
 def _start_transfer(self, _discard):
     if self.fileObj is None:
         self._failure((2, 'NO_FILE_LOADED'))
     else:
         logging.info('Started file transfer')
         self.busy = True
         fileProducer = FileSender()
         fileProducer.CHUNK_SIZE = 65536
         def_obj = fileProducer.beginFileTransfer(file=self.fileObj,
                                                  consumer=self.transport)
         def_obj.addCallback(self._done_transfer, True)
         def_obj.addErrback(self._done_transfer, False)
Пример #19
0
 def connectionMade(self):
     """ """
     fileHeader = session.Message(session.fileMsg)
     fileHeader.fileSize = self.insize
     fileHeader.fileName = self.relPath
     fileHeader.sessionID = self.sessionID
     self.transport.write(fileHeader.serialize() + '\r\n')
     sender = FileSender()
     sender.CHUNK_SIZE = 2 ** 16
     d = sender.beginFileTransfer(self.infile, self.transport,
                                  self._monitor)
     d.addCallback(self.cbTransferCompleted)
Пример #20
0
    def connectionMade(self):
        """ """
        # try:
        instruction = dict(file_size=self.insize, original_file_path=self.path)
        instruction = json.dumps(instruction)
        # except ValueError:
        # "Accepting as delete command instead..."

        self.transport.write(instruction + "\r\n")
        sender = FileSender()
        sender.CHUNK_SIZE = 2 ** 16
        d = sender.beginFileTransfer(self.infile, self.transport, self._monitor)
        d.addCallback(self.cbTransferCompleted)
Пример #21
0
 def beginFileTransfer(self,
                       file,
                       consumer,
                       rangeBegin,
                       rangeEnd,
                       transform=None):
     if not rangeBegin < rangeEnd:
         raise ValueError('rangeBegin >= rangeEnd')
     self.rangeBegin = rangeBegin
     self.rangeEnd = rangeEnd
     if file:
         file.seek(rangeBegin)
     return FileSender.beginFileTransfer(self, file, consumer, transform)
Пример #22
0
    def connectionMade(self):
        """ """
        self.logger = MessageLogger(open('log\client_logger.txt', "a"))
        self.logger.log("[connected at %s]" %
                        time.asctime(time.localtime(time.time())))

        instruction = dict(file_size=self.insize, original_file_path=self.path)
        instruction = json.dumps(instruction)
        self.transport.write(instruction + '\r\n')
        sender = FileSender()
        sender.CHUNK_SIZE = 2**16
        d = sender.beginFileTransfer(self.infile, self.transport,
                                     self._monitor)
        d.addCallback(self.cbTransferCompleted)
Пример #23
0
    def read(self, write_func):
        def close_self(*args):
            self.close_read_handle(file_handle)
            return args[0]

        file_sender = FileSender()
        reader = HashBlobReader(write_func)
        file_handle = self.open_for_reading()
        if file_handle is not None:
            d = file_sender.beginFileTransfer(file_handle, reader)
            d.addCallback(close_self)
        else:
            d = defer.fail(ValueError("Could not read the blob"))
        return d
Пример #24
0
    def read(self, write_func):
        def close_self(*args):
            self.close_read_handle(file_handle)
            return args[0]

        file_sender = FileSender()
        reader = HashBlobReader(write_func)
        file_handle = self.open_for_reading()
        if file_handle is not None:
            d = file_sender.beginFileTransfer(file_handle, reader)
            d.addCallback(close_self)
        else:
            d = defer.fail(ValueError("Could not read the blob"))
        return d
Пример #25
0
    def read(self, write_func):
        """
        This function is only used in StreamBlobDecryptor
        and should be deprecated in favor of open_for_reading()
        """
        def close_self(*args):
            self.close_read_handle(file_handle)
            return args[0]

        file_sender = FileSender()
        reader = HashBlobReader_v0(write_func)
        file_handle = self.open_for_reading()
        if file_handle is not None:
            d = file_sender.beginFileTransfer(file_handle, reader)
            d.addCallback(close_self)
        else:
            d = defer.fail(IOError("Could not read the blob"))
        return d
Пример #26
0
    def send_file_data(self):
        fh = open(self.file_name, 'rb')

        def file_transferred(_):
            self.gui.write_own_file(self.username,
                                    os.path.basename(self.file_name))
            fh.close()

        def finish(_):
            if not fh.closed:
                fh.close()

        def error(e):
            self.gui.print_debug_info('Error sending the file')

        sender = FileSender()
        d = sender.beginFileTransfer(fh, self.protocol.transport)
        d.addCallbacks(file_transferred, finish)
        d.addErrback(error)
Пример #27
0
Файл: io.py Проект: Fuzzwah/riko
class FileReader(AccumulatingProtocol):
    def __init__(self, filename, transform=None, delay=0, verbose=False):
        self.f = open(filename, 'rb')
        self.transform = transform
        self.delay = delay
        self.producer = FileSender()
        self.logger = gogo.Gogo(__name__, verbose=verbose).logger

    def cleanup(self, *args):
        self.f.close()
        self.producer.stopProducing()

    def resumeProducing(self):
        chunk = self.file.read(self.CHUNK_SIZE) if self.file else ''

        if not chunk:
            self.file = None
            self.consumer.unregisterProducer()

            if self.deferred and self.delay:
                callLater(self.delay, self.deferred.callback, self.lastSent)
            elif self.deferred:
                self.deferred.callback(self.lastSent)

            self.deferred = None
            return

    def connectionLost(self, reason):
        self.logger.debug('connectionLost: %s', reason)
        self.cleanup()

    def connectionMade(self):
        self.logger.debug('Connection made from %s', self.transport.getPeer())
        args = (self.f, self.transport, self.transform)
        self.d = self.closedDeferred = self.producer.beginFileTransfer(*args)

        while not self.d.called:
            self.producer.resumeProducing()

        self.d.addErrback(self.logger.error)
        self.d.addBoth(self.cleanup)
Пример #28
0
Файл: io.py Проект: zmyer/riko
class FileReader(AccumulatingProtocol):
    def __init__(self, filename, transform=None, delay=0, verbose=False):
        self.f = open(filename, 'rb')
        self.transform = transform
        self.delay = delay
        self.producer = FileSender()
        self.logger = gogo.Gogo(__name__, verbose=verbose).logger

    def cleanup(self, *args):
        self.f.close()
        self.producer.stopProducing()

    def resumeProducing(self):
        chunk = self.file.read(self.CHUNK_SIZE) if self.file else ''

        if not chunk:
            self.file = None
            self.consumer.unregisterProducer()

            if self.deferred and self.delay:
                callLater(self.delay, self.deferred.callback, self.lastSent)
            elif self.deferred:
                self.deferred.callback(self.lastSent)

            self.deferred = None
            return

    def connectionLost(self, reason):
        self.logger.debug('connectionLost: %s', reason)
        self.cleanup()

    def connectionMade(self):
        self.logger.debug('Connection made from %s', self.transport.getPeer())
        args = (self.f, self.transport, self.transform)
        self.d = self.closedDeferred = self.producer.beginFileTransfer(*args)

        while not self.d.called:
            self.producer.resumeProducing()

        self.d.addErrback(self.logger.error)
        self.d.addBoth(self.cleanup)
Пример #29
0
    def sendFile(self, filename, callback = lambda x,y: (x,y)):
        d = self._mkHeaders(filename)

        print d['size']

        transport = self.transport
        end_callback = self.end_callback

        class ProgressMeter(object):
            def __init__(self, filename, callback):
                self.transferred = 0
                self.full = d['size']
                self.callback = callback
                self.cancelled = False
            def monitor(self, data):
                if self.cancelled:
                    print 'progressmeter: cancelled!'
                    transport.unregisterProducer()
                    transport.loseConnection()
                    end_callback()

                self.transferred += len(data)
                self.callback(self.transferred, self.full)
                return data

        self.fp = urllib.urlopen(filename)
        self.sentBytes = 0


        self.transport.write(base64.encodestring(json.dumps(d)))
        self.transport.write('\r\n')

        sender = FileSender()
        sender.CHUNK_SIZE = 2 ** 16

        pm = ProgressMeter(filename, callback)

        d = sender.beginFileTransfer(self.fp, self.transport, pm.monitor)

        d.addCallback(self.done)
        return pm
Пример #30
0
        def getshared_cb(data):
            if len(data) == 0:
                error = {
                    'status': {
                        'error': "Invalid Request",
                        'message': "File does not exist."
                    }
                }
                request.write(
                    json.dumps(error, sort_keys=True, encoding="utf-8"))
                request.finish()
                return

            file_path = str(data[0][0]) + "/" + fileid
            if not os.path.exists(file_path):
                error = {
                    'status': {
                        'error': "Invalid Request",
                        'message': "File does not exist."
                    }
                }
                request.write(
                    json.dumps(error, sort_keys=True, encoding="utf-8"))
                request.finish()
                return

            request.write(str(data[0][1]))  # writing key
            iv_plain = self.sid.decryptData(data[0][2])  # writing IV
            #print iv_plain
            iv = self.sid.encryptData(iv_plain, pubkey)
            request.write(iv)
            file = open(file_path, "r")
            sender = FileSender()
            sender.CHUNK_SIZE = 200
            df = sender.beginFileTransfer(file, request)

            df.addErrback(err)
            df.addCallback(finishTrnf_cb, file)
Пример #31
0
            def send_file(result):
                filename = result[0][0].strip()
                filepath = os.path.join(common.file_dir, filename)
                print "send file ", filepath
                try:
                    outfile = open(filepath, 'rb')
                except IOError:
                    print "no such file"
                    return
                else:
                    s = outfile.read()
                    outfile.seek(0)
                    filesize = len(s)
                    crc = crc32(s)
                    line = '2#%s|%s|%s' % (filename, filesize, crc)
                    self.sendLine(line)

                def transfer_completed(lastsent):
                    outfile.close()

                sender = FileSender()
                sender.CHUNK_SIZE = common.chunk_size
                d = sender.beginFileTransfer(outfile, self.transport)
                d.addCallback(transfer_completed)
Пример #32
0
            def send_file(result):
                filename = result[0][0].strip()
                filepath = os.path.join(common.file_dir, filename)
                print "send file ", filepath
                try:
                    outfile = open(filepath, 'rb')
                except IOError:
                    print "no such file"
                    return
                else:
                    s = outfile.read()
                    outfile.seek(0)
                    filesize = len(s)
                    crc = crc32(s)
                    line = '2#%s|%s|%s' % (filename, filesize, crc)
                    self.sendLine(line)

                def transfer_completed(lastsent):
                    outfile.close()

                sender = FileSender()
                sender.CHUNK_SIZE = common.chunk_size
                d = sender.beginFileTransfer(outfile, self.transport)
                d.addCallback(transfer_completed)
Пример #33
0
class BlobRequestHandler(object):
    implements(IQueryHandler, IBlobSender)
    PAYMENT_RATE_QUERY = 'blob_data_payment_rate'
    BLOB_QUERY = 'requested_blob'
    AVAILABILITY_QUERY = 'requested_blobs'

    def __init__(self, blob_manager, wallet, payment_rate_manager,
                 analytics_manager):
        self.blob_manager = blob_manager
        self.payment_rate_manager = payment_rate_manager
        self.wallet = wallet
        self.query_identifiers = [
            self.PAYMENT_RATE_QUERY, self.BLOB_QUERY, self.AVAILABILITY_QUERY
        ]
        self.analytics_manager = analytics_manager
        self.peer = None
        self.blob_data_payment_rate = None
        self.read_handle = None
        self.currently_uploading = None
        self.file_sender = None
        self.blob_bytes_uploaded = 0
        self._blobs_requested = []

    ######### IQueryHandler #########

    def register_with_request_handler(self, request_handler, peer):
        self.peer = peer
        request_handler.register_query_handler(self, self.query_identifiers)
        request_handler.register_blob_sender(self)

    def handle_queries(self, queries):
        response = defer.succeed({})
        log.debug("Handle query: %s", str(queries))

        if self.AVAILABILITY_QUERY in queries:
            self._blobs_requested = queries[self.AVAILABILITY_QUERY]
            response.addCallback(lambda r: self._reply_to_availability(
                r, self._blobs_requested))
        if self.PAYMENT_RATE_QUERY in queries:
            offered_rate = queries[self.PAYMENT_RATE_QUERY]
            offer = Offer(offered_rate)
            if offer.rate is None:
                log.warning("Empty rate offer")
            response.addCallback(
                lambda r: self._handle_payment_rate_query(offer, r))
        if self.BLOB_QUERY in queries:
            incoming = queries[self.BLOB_QUERY]
            response.addCallback(
                lambda r: self._reply_to_send_request(r, incoming))
        return response

    ######### IBlobSender #########

    def send_blob_if_requested(self, consumer):
        if self.currently_uploading is not None:
            return self.send_file(consumer)
        return defer.succeed(True)

    def cancel_send(self, err):
        if self.currently_uploading is not None:
            self.currently_uploading.close_read_handle(self.read_handle)
        self.read_handle = None
        self.currently_uploading = None
        return err

    ######### internal #########

    def _reply_to_availability(self, request, blobs):
        d = self._get_available_blobs(blobs)

        def set_available(available_blobs):
            log.debug("available blobs: %s", str(available_blobs))
            request.update({'available_blobs': available_blobs})
            return request

        d.addCallback(set_available)
        return d

    def _handle_payment_rate_query(self, offer, request):
        blobs = self._blobs_requested
        log.debug("Offered rate %f LBC/mb for %i blobs", offer.rate,
                  len(blobs))
        reply = self.payment_rate_manager.reply_to_offer(
            self.peer, blobs, offer)
        if reply.is_accepted:
            self.blob_data_payment_rate = offer.rate
            request[self.PAYMENT_RATE_QUERY] = "RATE_ACCEPTED"
            log.debug("Accepted rate: %f", offer.rate)
        elif reply.is_too_low:
            request[self.PAYMENT_RATE_QUERY] = "RATE_TOO_LOW"
            log.debug("Reject rate: %f", offer.rate)
        elif reply.is_unset:
            log.warning("Rate unset")
            request['incoming_blob'] = {'error': 'RATE_UNSET'}
        log.debug("Returning rate query result: %s", str(request))

        return request

    def _handle_blob_query(self, response, query):
        log.debug("Received the client's request to send a blob")
        response['incoming_blob'] = {}

        if self.blob_data_payment_rate is None:
            response['incoming_blob'] = {'error': "RATE_UNSET"}
            return response
        else:
            return self._send_blob(response, query)

    def _send_blob(self, response, query):
        d = self.blob_manager.get_blob(query)
        d.addCallback(self.open_blob_for_reading, response)
        return d

    def open_blob_for_reading(self, blob, response):
        response_fields = {}
        d = defer.succeed(None)
        if blob.is_validated():
            read_handle = blob.open_for_reading()
            if read_handle is not None:
                self.currently_uploading = blob
                self.read_handle = read_handle
                log.info("Sending %s to %s", str(blob), self.peer)
                response_fields['blob_hash'] = blob.blob_hash
                response_fields['length'] = blob.length
                response['incoming_blob'] = response_fields
                d.addCallback(lambda _: self.record_transaction(blob))
                d.addCallback(lambda _: response)
                return d
        log.debug("We can not send %s", str(blob))
        response['incoming_blob'] = {'error': 'BLOB_UNAVAILABLE'}
        d.addCallback(lambda _: response)
        return d

    def record_transaction(self, blob):
        d = self.blob_manager.add_blob_to_upload_history(
            str(blob), self.peer.host, self.blob_data_payment_rate)
        return d

    def _reply_to_send_request(self, response, incoming):
        response_fields = {}
        response['incoming_blob'] = response_fields

        if self.blob_data_payment_rate is None:
            log.debug("Rate not set yet")
            response['incoming_blob'] = {'error': 'RATE_UNSET'}
            return defer.succeed(response)
        else:
            log.debug("Requested blob: %s", str(incoming))
            d = self.blob_manager.get_blob(incoming)
            d.addCallback(
                lambda blob: self.open_blob_for_reading(blob, response))
            return d

    def _get_available_blobs(self, requested_blobs):
        d = self.blob_manager.completed_blobs(requested_blobs)
        return d

    def send_file(self, consumer):
        def _send_file():
            inner_d = start_transfer()
            # TODO: if the transfer fails, check if it's because the connection was cut off.
            # TODO: if so, perhaps bill the client
            inner_d.addCallback(lambda _: set_expected_payment())
            inner_d.addBoth(set_not_uploading)
            return inner_d

        def count_bytes(data):
            uploaded = len(data)
            self.blob_bytes_uploaded += uploaded
            self.peer.update_stats('blob_bytes_uploaded', uploaded)
            if self.analytics_manager is not None:
                self.analytics_manager.add_observation(
                    analytics.BLOB_BYTES_UPLOADED, uploaded)
            return data

        def start_transfer():
            self.file_sender = FileSender()
            log.debug("Starting the file upload")
            assert self.read_handle is not None, \
                "self.read_handle was None when trying to start the transfer"
            d = self.file_sender.beginFileTransfer(self.read_handle, consumer,
                                                   count_bytes)
            return d

        def set_expected_payment():
            log.debug("Setting expected payment")
            if (self.blob_bytes_uploaded != 0
                    and self.blob_data_payment_rate is not None
                    and self.blob_data_payment_rate > 0):
                # TODO: explain why 2**20
                self.wallet.add_expected_payment(
                    self.peer, self.currently_uploading.length * 1.0 *
                    self.blob_data_payment_rate / 2**20)
                self.blob_bytes_uploaded = 0
            self.peer.update_stats('blobs_uploaded', 1)
            return None

        def set_not_uploading(reason=None):
            if self.currently_uploading is not None:
                self.currently_uploading.close_read_handle(self.read_handle)
                self.read_handle = None
                self.currently_uploading = None
            self.file_sender = None
            if reason is not None and isinstance(reason, Failure):
                log.warning("Upload has failed. Reason: %s",
                            reason.getErrorMessage())

        return _send_file()
Пример #34
0
def create_lbry_file(session,
                     lbry_file_manager,
                     file_name,
                     file_handle,
                     key=None,
                     iv_generator=None):
    """Turn a plain file into an LBRY File.

    An LBRY File is a collection of encrypted blobs of data and the metadata that binds them
    together which, when decrypted and put back together according to the metadata, results
    in the original file.

    The stream parameters that aren't specified are generated, the file is read and broken
    into chunks and encrypted, and then a stream descriptor file with the stream parameters
    and other metadata is written to disk.

    @param session: An Session object.
    @type session: Session

    @param lbry_file_manager: The EncryptedFileManager object this LBRY File will be added to.
    @type lbry_file_manager: EncryptedFileManager

    @param file_name: The path to the plain file.
    @type file_name: string

    @param file_handle: The file-like object to read
    @type file_handle: any file-like object which can be read by twisted.protocols.basic.FileSender

    @param key: the raw AES key which will be used to encrypt the blobs. If None, a random key will
        be generated.
    @type key: string

    @param iv_generator: a generator which yields initialization
        vectors for the blobs. Will be called once for each blob.
    @type iv_generator: a generator function which yields strings

    @return: a Deferred which fires with the stream_hash of the LBRY File
    @rtype: Deferred which fires with hex-encoded string
    """

    base_file_name = os.path.basename(file_name)
    file_directory = os.path.dirname(file_handle.name)

    lbry_file_creator = EncryptedFileStreamCreator(session.blob_manager,
                                                   lbry_file_manager,
                                                   base_file_name, key,
                                                   iv_generator)

    yield lbry_file_creator.setup()
    # TODO: Using FileSender isn't necessary, we can just read
    #       straight from the disk. The stream creation process
    #       should be in its own thread anyway so we don't need to
    #       worry about interacting with the twisted reactor
    file_sender = FileSender()
    yield file_sender.beginFileTransfer(file_handle, lbry_file_creator)

    log.debug(
        "the file sender has triggered its deferred. stopping the stream writer"
    )
    yield lbry_file_creator.stop()

    log.debug("making the sd blob")
    sd_info = lbry_file_creator.sd_info
    descriptor_writer = BlobStreamDescriptorWriter(session.blob_manager)
    sd_hash = yield descriptor_writer.create_descriptor(sd_info)

    log.debug("saving the stream")
    yield session.storage.store_stream(sd_info['stream_hash'], sd_hash,
                                       sd_info['stream_name'], sd_info['key'],
                                       sd_info['suggested_file_name'],
                                       sd_info['blobs'])
    log.debug("adding to the file manager")
    lbry_file = yield lbry_file_manager.add_published_file(
        sd_info['stream_hash'], sd_hash, binascii.hexlify(file_directory),
        session.payment_rate_manager,
        session.payment_rate_manager.min_blob_data_payment_rate)
    defer.returnValue(lbry_file)
Пример #35
0
class EchoClient(LineReceiver):
    def __init__(self):
        pub.subscribe(self.filenameListener, "file")
        pub.subscribe(self.folderListener, "folder")
        pub.subscribe(self.cancelData, "stop")
        pub.subscribe(self.deleteQueue, "deletequeues")
        pub.subscribe(self.closed, "closewindow")
        self.filestate = 1
        self.folderstate = 1
        self.folderpath = ""
        self.filequeue = []
        self.folderqueue = []
        self.total = 0
        self.done = 0
        self.which = 0
        self.filecount = 0
        self.num = 0
        self.cancel = 0
        self.foldertab = 0
        self.count = 0
        self.overal = 0

    def connectionMade(self):
        pass

    def closed(self, msg):
        self.transport.loseConnection()

    def cancelData(self, msg):
        if not self.filestate: self.cancelOtherData()
        self.sendLine('{"canceldone":"1"}')

    def deleteQueue(self, msg, msg2):
        if msg2 == "1":
            try:
                del self.folderqueue[int(msg)]
            except:
                pass
        elif msg2 == "2":
            try:
                del self.filequeue[int(msg)]
            except:
                pass

    def deleteFolderQueue(self, msg):
        ind = int(msg)
        del self.folderqueue[ind]

    def cancelOtherData(self):
        self.transport.unregisterProducer()
        self.sendLine("cancel")
        if self.filequeue:
            pub.sendMessage("deletefile", msg="0")
            del self.filequeue[0:self.count]

    def filenameListener(self, real, path, size):
        if self.filestate == 1 and self.folderstate == 1:
            self.filestate = 0
            if self.count:
                self.count -= 1
            data = {"filename": path, "size": size}
            jdata = js.dumps(data)
            self.sendLine(jdata)
            self.f = open(os.path.abspath(real), 'rb')
            self.sender = FileSender()
            self.sender.beginFileTransfer(self.f, self.transport).addCallback(
                self.otan)
        else:
            if path.find("\\") == -1:
                pub.sendMessage("filequeue", msg=path)
            self.filequeue.append([real, path, size])

    def monitor(self, data):
        pass

    def otan(self, lastsent):
        self.f.close()
        self.sendLine("done")

    def folderListener(self, path):
        if self.filestate == 1 and self.folderstate == 1:
            self.folderstate = 0
            self.folderpath = path
            get = folderHandle(path)
            buff = get.folderDetails()
            data = {"folder": buff[0], "files": buff[1], "filesize": buff[2]}
            self.count = buff[1]
            self.overal = buff[1]
            jdata = js.dumps(data)
            self.sendLine(jdata)
            data = get.searchFolder()
            self.transport.write(data)
            self.sendLine("folderDone")
        else:
            pub.sendMessage("folderqueue", msg=path)
            self.folderqueue.append(path)

    def lineReceived(self, line):
        jdata = js.loads(line)
        self.decideData(jdata)

    def decideData(self, data):
        if "name" in data.keys():
            p = self.transport.getPeer()
            jdata = js.dumps(name)
            self.sendLine(jdata)
            pub.sendMessage("peers", name=data['name'], port=p.port)

        elif "canceldone" in data.keys():
            self.cancelOtherData()

        elif "disconnect" in data.keys():
            pub.sendMessage("close", msg="")

        elif "filename" in data.keys():
            self.which = 0
            self.proc = HandleFiles(data['filename'])
            self.proc.openFile()
            if self.num:
                self.filecount += 1
                self.num -= 1
            else:
                self.filecount = 0
            pub.sendMessage("process",
                            name=data['filename'],
                            size=data['size'],
                            num=str(self.filecount))
            self.setRawMode()

        elif "folder" in data.keys():
            self.which = 1
            self.num = int(data['files'])
            pub.sendMessage("updatefolder", msg=str(data['files']))
            self.proc = HandleFiles("None")
            self.proc.openFolder()
            self.setRawMode()

        elif "done" in data.keys():
            pub.sendMessage("donereceived", msg="")
            self.filestate = 1
            if self.filequeue:
                self.continueFileQueue()
            else:
                if self.folderqueue:
                    self.continueFolderQueue()
                else:
                    pass
        elif "doneFolder" in data.keys():
            self.folderstate = 1
            self.processFiles()

        elif "kepp" in data.keys():
            self.cancel = 0

    def createFolder(self):
        pro = folderHandle("None")
        pro.createFolder()

    def processFiles(self):
        path = self.folderpath + r"\\"
        get = folderHandle(path)
        files = get.getFiles()
        for i in files:
            self.filenameListener(i[1], i[0], i[2])

    def continueFolderQueue(self):
        if self.folderqueue:
            self.folderListener(self.folderqueue[0])
            del self.folderqueue[0]
        elif self.folderstate == 1:
            self.continueFileQueue()

    def continueFileQueue(self):
        if self.filequeue:
            self.filenameListener(self.filequeue[0][0], self.filequeue[0][1],
                                  self.filequeue[0][2])
            del self.filequeue[0]
            pub.sendMessage("deletefile", msg="0")
        elif self.filestate == 1:
            self.continueFolderQueue()

    def rawDataReceived(self, data):
        if "cancel\r\n" in data:
            self.sendLine('{"done":"1"}')
            self.setLineMode()
        if self.which == 1:
            get = self.proc.writeFolder(data)
            if get == 1:
                self.total = 0
                self.createFolder()
                self.sendLine('{"doneFolder":"1"}')
                self.setLineMode()
        elif self.which == 0:
            get = self.proc.writeFile(data)
            self.total += len(data)
            pub.sendMessage("update", current=str(self.total), speed=len(data))
            if get == 1:
                self.total = 0
                pub.sendMessage("done", msg="")
                self.sendLine('{"done":"1"}')
                self.setLineMode()
            else:
                pass
        elif self.which == 3:
            pass

    def sendFileName(self, name):
        data = {"filename": name}
        jdata = js.dumps(data)
        self.sendLine(jdata)
Пример #36
0
def cbStore(consumer, filename):
    fs = FileSender()
    d = fs.beginFileTransfer(open(filename, 'r'), consumer)
    d.addCallback(lambda _: consumer.finish()).addErrback(fail, "cbStore")
    return d
Пример #37
0
def sendfile(consumer, fileObj):
	s = FileSender()
	d = s.beginFileTransfer(fileObj, consumer)
	d.addCallback(lambda _: consumer.finish())
	return d
Пример #38
0
 def beginFileTransfer(self, fd, length, *args, **kwargs):
     self.length = length
     return FileSender.beginFileTransfer(self, fd, *args, **kwargs)
Пример #39
0
def upload_file(file, transport):
  sender = FileSender()
  sender.CHUNK_SIZE = 2 ** 16
  return sender.beginFileTransfer(file, transport)
Пример #40
0
class KinectClient(LineReceiver):

    """ Twisted Protocol that receives, handles, and sends commands through the network.

    Upon connecting, this client broadcasts a "get" message to every Kinect at our location. If we were for some reason
    disconnected, this lets the client sync up with current settings.

    Once sync'd, the client will listen for commands over the network and respond accordingly. For example, if this
    client receives "set location ID recording 1", and if the location matches this client's location, then this
    client will set the KinectController's recording setting to 1.

    Meanwhile, this client will also send messages if it's asked to do so by its factory. For example, if the the
    factory recognizes that an external button is pushed, then it will set the KinectController's state and ask this
    client to inform the other Kinects in the room of the change. Also, if this client is running in interactive mode,
    then the user can issue commands manually. In that case the factory will again send the message through this client.
    """

    def __init__(self, factory):
        self.factory = factory
        self.kinectController = self.factory.kinectController
        self.clientLocation = self.factory.clientLocation
        self.clientID = self.factory.clientID
        self.logger = self.factory.logger
        self.kinectController.file_sent = 0
        self.pendingFile = None
        self.lastUploadTime = 0
        # self.sender.CHUCK_SIZE = 2 ** 17



    def uptime(self):
        """ Return how long we've been connected to the server. """

        if self.connected:
            return time.time() - self.timeAtConnect
        else:
            return 0

    def synced(self):
        """ True if we're sync'd up, False otherwise. """

        # Delay in seconds until a new client is considered synchronized
        SYNC_DELAY = 5
        # If we're in interactive mode, there's nothing to sync.
        if self.factory.interactiveMode:
            return True
        else:
            return self.uptime() > SYNC_DELAY

    def matchesLocation(self, location):
        """ True if the provided location matches this client's location. """
        return location in ("*", self.clientLocation)

    def matchesID(self, ID):
        """ True if the provided ID matches this client's ID. """
        return ID in ("*", self.clientID)

    def connectionMade(self):
        """ Callback inherited from Twisted. """

        self.logger.info("connected")
        self.factory.client = self
        self.timeAtConnect = time.time()
        if not self.factory.interactiveMode:
            # When we first connect we should sync our settings with the
            # other clients in the room.
            for setting in self.kinectController.settings.keys():
                message = "get %s * %s" % (self.clientLocation, setting)
                self.sendLine(message)

    def connectionLost(self, reason):
        """ Callback inherited from Twisted. """

        self.logger.info("disconnected.")
        reason.printTraceback()



    def upload(self):
        """ Upload tar files in DATA_DIR"""
        now = time.time()
        # TODO test strict condition
        if self.lastUploadTime != 0:
        #if now - self.lastUploadTime < 60:
            self.logger.info("Skip this upload task because the last one is running")
            return
        # get the file list to upload in this turn
        self.uploadFileList = self.__getUploadFileList()
        if len(self.uploadFileList) > 0:
            self.__uploadFile(self.uploadFileList.pop(0))
        """ Manage backup files """
        self.__clearOldBackupFiles()

    def __clearOldBackupFiles(self):
        """ Remove all outofdate backup files """
        for f in os.listdir(BACKUP_DIR):
            if f.endswith(".tar") and self.__outofdate(f):
                filepath = os.path.join(BACKUP_DIR, f)
                os.remove(filepath)
                self.logger.debug("remove outofdate file: " + f)

    def __outofdate(self, filename):
        """ Return True if the file is out of date """
        currDatetime = datetime.fromtimestamp(time.time())
        try:
            fileDatetime = datetime.strptime(filename[:-4], "%Y%m%d%H%M")
        except ValueError:
            return True
        diffInSecs = (currDatetime - fileDatetime).total_seconds()
        if diffInSecs > BACKUP_SIZE_IN_MINS * 60:
            return True
        return False

    def __getUploadFileList(self):
        filelist = []
        for f in os.listdir(DATA_DIR):
            if f.endswith(".tar") and (self.pendingFile == None or f != self.pendingFile):
                filelist.append(f)
        return filelist

    def __uploadFile(self, filename):
        """ upload file in DATA_DIR """
        if filename.endswith(".tar") and len(filename) == 16:
            self.pendingFile = filename
            filepath = os.path.join(DATA_DIR, filename)
            uploadfile = open(filepath, 'rb')
            uploadsize = os.stat(filepath).st_size

            self.logger.info("upload file: " + filename)
            self.sendLine('%s %s %s %s %s' % ("upload",
                                                   self.clientLocation,
                                                   self.clientID,
                                                   filename,
                                                   uploadsize))
            try:
                self.sender = FileSender()
                self.lastUploadTime = time.time()
                d = self.sender.beginFileTransfer(uploadfile, self.transport, self.__monitor)
                d.addCallback(self.uploadCompleted, filename)
            except RuntimeError as e:
                self.logger.warn("__uploadFile|Unexcepted error:" + str(e))
                traceback.print_exc(file=sys.stdout)

        else:
            self.logger.info("invalid upload file: " + filename)

    def uploadCompleted(self, lastsent, filename):
        self.logger.info("upload done: " + filename)
        self.pendingFile = None
        # move currentUploadFile to backup folder
        srcPath = os.path.join(DATA_DIR, filename)
        dstPath = os.path.join(BACKUP_DIR, filename)
        shutil.move(srcPath, dstPath)
        if len(self.uploadFileList) > 0:
            self.__uploadFile(self.uploadFileList.pop(0))
        else:
            self.lastUploadTime = 0

    def __monitor(self, data):
        """ """
        self.kinectController.file_sent += len(data)
        self.kinectController.total_sent += len(data)

        # Check with controller to see if we've been cancelled and abort
        # if so.
        if self.kinectController.cancel:
            self.logger.warn('FileIOClient.__monitor Cancelling')

            # Need to unregister the producer with the transport or it will
            # wait for it to finish before breaking the connection
            self.transport.unregisterProducer()
            self.transport.loseConnection()

            # Indicate a user cancelled result
            self.result = TransferCancelled('User cancelled transfer')

        return data

    def lineReceived(self, line):
        """ Callback inherited from Twisted. """

        self.logger.info("received '%s'" % line)
        arguments = line.split(" ")

        while len(arguments) < 3:
            arguments.append("*")

        # Respond to pings that were sent to us, whether we're in interactive
        # mode or not.

        if self.matchesLocation(arguments[1]) and self.matchesID(arguments[2]):
            if arguments[0] == "ping":
                message = "pong %s %s uptime %d" % \
                    (self.clientLocation, self.clientID, self.uptime())
                self.sendLine(message)

        # Otherwise, only respond if we're not in interactive mode.

        if not self.factory.interactiveMode:

            # When the connection was first established, we queried every client
            # in the room. Copy them if we're not yet considered sync'd.

            if arguments[0] == "response" and len(arguments) == 5:
                sourceLocation = arguments[1]
                if self.matchesLocation(sourceLocation) and not self.synced():
                    setting = arguments[3]
                    value = arguments[4]
                    self.kinectController.set(setting, value)

            # At this point assume we're receiving a command. (Invalid commands
            # are ignored anyway.)

            command = arguments[0].lower()

            if self.matchesLocation(arguments[1]) and self.matchesID(arguments[2]):

                if command == "get":

                    if len(arguments) < 4 or arguments[3] == "*":
                        settings = self.kinectController.settings.keys()
                    else:
                        settings = [arguments[3].lower()]

                    for setting in settings:
                        value = self.kinectController.get(setting)
                        if value is not None and self.synced():
                            message = "response %s %s %s %d" % (self.clientLocation, self.clientID, setting, value)
                            self.sendLine(message)

                elif command == "set" and len(arguments) == 5:
                    setting = arguments[3].lower()
                    value = arguments[4]
                    self.kinectController.set(setting, value)

        # reactor.iterate takes care of pending socket receives and sends
        reactor.iterate()
Пример #41
0
def cbStore(consumer, filename):
    fs = FileSender()
    d = fs.beginFileTransfer(open(filename, 'r'), consumer)
    d.addCallback(lambda _: consumer.finish()).addErrback(fail, "cbStore")
    return d
Пример #42
0
 def registerWithConsumer(consumer):
     producer = FileSender()
     producer.beginFileTransfer(fileToSend, consumer)
Пример #43
0
 def lineReceived(self, data):
         print "server: ", data
         
         if self.state == "SESSION":
                 if g_data.get_sessions().count(data):
                         self.session_name = data
                         self.state = "IDLE"
                         self.sendLine("OK")
                 else:
                         self.error(data + " doesn't exist on server")
         elif self.state == "SNAPSHOT":
                 if g_data.get_snaps(self.session_name, g_data.get_user()).count(data):
                         filename = g_data.get_session_path(self.session_name)+data+'.ardour'
                         print filename
                         self.sendLine(str(os.stat(filename).st_size))
                         self.sendLine("OK")
                         self.file = open(filename, 'r')
                         file_sender = FileSender()
                         cb = file_sender.beginFileTransfer(self.file, self.transport)
                         cb.addCallback(self.file_done)
                 else:
                         self.error("snapshot: " + data + " doesn't exist on server")
         elif self.state == "SOUNDFILE" or self.state == "SOUNDFILE_HEADER":
                 if g_data.get_sounds(self.session_name, g_data.get_user()).count(data):
                         filename = g_data.get_session_path(self.session_name)+"/interchange/"+self.session_name+"/audiofiles/"+data
                         print filename
                         if self.state == "SOUNDFILE":
                                 self.sendLine(str(os.stat(filename).st_size))
                         else:        #SOUNDFILE_HEADER
                                 header_size = get_header_size(filename)
                                 if header_size:
                                         self.sendLine(str(header_size))
                                 else:
                                         self.error('soundfile: ' + data + 'doesn\'t have "data" chunk')
                         self.sendLine("OK")
                         self.file = open(filename, 'r')
                         if self.state == "SOUNDFILE":
                                 file_sender = FileSender()
                                 cb = file_sender.beginFileTransfer(self.file, self.transport)
                         else:        # SOUNDFILE_HEADER
                                 file_sender = FileSenderLimited()
                                 cb = file_sender.beginFileTransfer(self.file, self.transport, header_size)
                         cb.addCallback(self.file_done)
                 else:
                         self.error("soundfile: " + data + "doesn't exist on server")
         elif self.state == "SOUNDFILE_SIZE":
                 if g_data.get_sounds(self.session_name, g_data.get_user()).count(data):
                         filename = g_data.get_session_path(self.session_name)+"/sounds/"+data
                         print filename
                         self.sendLine(str(os.stat(filename).st_size))
                         self.state = "IDLE"
         elif data == "SESSION":
                 self.state = "SESSION"
         elif data == "SNAPS":
                 self.state = "SNAPS"
                 for snap in g_data.get_snaps(self.session_name, g_data.get_user()):
                         self.sendLine(snap)
                 self.sendLine("OK")
                 self.state = "IDLE"
         elif data == "SNAPSHOT":
                 self.state = "SNAPSHOT"
         elif data == "SOUNDFILE":
                 self.state = "SOUNDFILE"
         elif data == "SOUNDFILE_HEADER":
                 self.state = "SOUNDFILE_HEADER"
         elif data == "SOUNDFILE_SIZE":
                 self.state = "SOUNDFILE_SIZE"
Пример #44
0
class BlobRequestHandler(object):
    implements(IQueryHandler, IBlobSender)

    def __init__(self, blob_manager, wallet, payment_rate_manager):
        self.blob_manager = blob_manager
        self.payment_rate_manager = payment_rate_manager
        self.wallet = wallet
        self.query_identifiers = ['blob_data_payment_rate', 'requested_blob']
        self.peer = None
        self.blob_data_payment_rate = None
        self.read_handle = None
        self.currently_uploading = None
        self.file_sender = None
        self.blob_bytes_uploaded = 0

    ######### IQueryHandler #########

    def register_with_request_handler(self, request_handler, peer):
        self.peer = peer
        request_handler.register_query_handler(self, self.query_identifiers)
        request_handler.register_blob_sender(self)

    def handle_queries(self, queries):
        response = {}
        if self.query_identifiers[0] in queries:
            if not self.handle_blob_data_payment_rate(queries[self.query_identifiers[0]]):
                    response['blob_data_payment_rate'] = "RATE_TOO_LOW"
            else:
                response['blob_data_payment_rate'] = 'RATE_ACCEPTED'

        if self.query_identifiers[1] in queries:
            log.debug("Received the client's request to send a blob")
            response_fields = {}
            response['incoming_blob'] = response_fields

            if self.blob_data_payment_rate is None:
                response_fields['error'] = "RATE_UNSET"
                return defer.succeed(response)
            else:

                d = self.blob_manager.get_blob(queries[self.query_identifiers[1]], True)

                def open_blob_for_reading(blob):
                    if blob.is_validated():
                        read_handle = blob.open_for_reading()
                        if read_handle is not None:
                            self.currently_uploading = blob
                            self.read_handle = read_handle
                            log.debug("Sending %s to client", str(blob))
                            response_fields['blob_hash'] = blob.blob_hash
                            response_fields['length'] = blob.length
                            return response
                    log.debug("We can not send %s", str(blob))
                    response_fields['error'] = "BLOB_UNAVAILABLE"
                    return response

                d.addCallback(open_blob_for_reading)

                return d
        else:
            return defer.succeed(response)

    ######### IBlobSender #########

    def send_blob_if_requested(self, consumer):
        if self.currently_uploading is not None:
            return self.send_file(consumer)
        return defer.succeed(True)

    def cancel_send(self, err):
        if self.currently_uploading is not None:
            self.currently_uploading.close_read_handle(self.read_handle)
        self.read_handle = None
        self.currently_uploading = None
        return err

    ######### internal #########

    def handle_blob_data_payment_rate(self, requested_payment_rate):
        if not self.payment_rate_manager.accept_rate_blob_data(self.peer, requested_payment_rate):
            return False
        else:
            self.blob_data_payment_rate = requested_payment_rate
            return True

    def send_file(self, consumer):

        def _send_file():
            inner_d = start_transfer()
            # TODO: if the transfer fails, check if it's because the connection was cut off.
            # TODO: if so, perhaps bill the client
            inner_d.addCallback(lambda _: set_expected_payment())
            inner_d.addBoth(set_not_uploading)
            return inner_d

        def count_bytes(data):
            self.blob_bytes_uploaded += len(data)
            self.peer.update_stats('blob_bytes_uploaded', len(data))
            return data

        def start_transfer():
            self.file_sender = FileSender()
            log.info("Starting the file upload")
            assert self.read_handle is not None, "self.read_handle was None when trying to start the transfer"
            d = self.file_sender.beginFileTransfer(self.read_handle, consumer, count_bytes)
            return d

        def set_expected_payment():
            log.info("Setting expected payment")
            if self.blob_bytes_uploaded != 0 and self.blob_data_payment_rate is not None:
                # TODO: explain why 2**20
                self.wallet.add_expected_payment(self.peer,
                                                 self.currently_uploading.length * 1.0 *
                                                 self.blob_data_payment_rate / 2**20)
                self.blob_bytes_uploaded = 0
            self.peer.update_stats('blobs_uploaded', 1)
            return None

        def set_not_uploading(reason=None):
            if self.currently_uploading is not None:
                self.currently_uploading.close_read_handle(self.read_handle)
                self.read_handle = None
                self.currently_uploading = None
            self.file_sender = None
            if reason is not None and isinstance(reason, Failure):
                log.info("Upload has failed. Reason: %s", reason.getErrorMessage())

        return _send_file()
Пример #45
0
class BlobRequestHandler(object):
    implements(IQueryHandler, IBlobSender)
    PAYMENT_RATE_QUERY = 'blob_data_payment_rate'
    BLOB_QUERY = 'requested_blob'
    AVAILABILITY_QUERY = 'requested_blobs'

    def __init__(self, blob_manager, wallet, payment_rate_manager, track):
        self.blob_manager = blob_manager
        self.payment_rate_manager = payment_rate_manager
        self.wallet = wallet
        self.query_identifiers = [self.PAYMENT_RATE_QUERY, self.BLOB_QUERY, self.AVAILABILITY_QUERY]
        self.track = track
        self.peer = None
        self.blob_data_payment_rate = None
        self.read_handle = None
        self.currently_uploading = None
        self.file_sender = None
        self.blob_bytes_uploaded = 0
        self._blobs_requested = []

    ######### IQueryHandler #########

    def register_with_request_handler(self, request_handler, peer):
        self.peer = peer
        request_handler.register_query_handler(self, self.query_identifiers)
        request_handler.register_blob_sender(self)

    def handle_queries(self, queries):
        response = defer.succeed({})
        log.debug("Handle query: %s", str(queries))

        if self.AVAILABILITY_QUERY in queries:
            self._blobs_requested = queries[self.AVAILABILITY_QUERY]
            response.addCallback(lambda r: self._reply_to_availability(r, self._blobs_requested))
        if self.PAYMENT_RATE_QUERY in queries:
            offered_rate = queries[self.PAYMENT_RATE_QUERY]
            offer = Offer(offered_rate)
            if offer.rate is None:
                log.warning("Empty rate offer")
            response.addCallback(lambda r: self._handle_payment_rate_query(offer, r))
        if self.BLOB_QUERY in queries:
            incoming = queries[self.BLOB_QUERY]
            response.addCallback(lambda r: self._reply_to_send_request(r, incoming))
        return response

    ######### IBlobSender #########

    def send_blob_if_requested(self, consumer):
        if self.currently_uploading is not None:
            return self.send_file(consumer)
        return defer.succeed(True)

    def cancel_send(self, err):
        if self.currently_uploading is not None:
            self.currently_uploading.close_read_handle(self.read_handle)
        self.read_handle = None
        self.currently_uploading = None
        return err

    ######### internal #########

    def _reply_to_availability(self, request, blobs):
        d = self._get_available_blobs(blobs)

        def set_available(available_blobs):
            log.debug("available blobs: %s", str(available_blobs))
            request.update({'available_blobs': available_blobs})
            return request

        d.addCallback(set_available)
        return d

    def _handle_payment_rate_query(self, offer, request):
        blobs = self._blobs_requested
        log.debug("Offered rate %f LBC/mb for %i blobs", offer.rate, len(blobs))
        reply = self.payment_rate_manager.reply_to_offer(self.peer, blobs, offer)
        if reply.is_accepted:
            self.blob_data_payment_rate = offer.rate
            request[self.PAYMENT_RATE_QUERY] = "RATE_ACCEPTED"
            log.debug("Accepted rate: %f", offer.rate)
        elif reply.is_too_low:
            request[self.PAYMENT_RATE_QUERY] = "RATE_TOO_LOW"
            log.debug("Reject rate: %f", offer.rate)
        elif reply.is_unset:
            log.warning("Rate unset")
            request['incoming_blob'] = {'error': 'RATE_UNSET'}
        log.debug("Returning rate query result: %s", str(request))

        return request

    def _handle_blob_query(self, response, query):
        log.debug("Received the client's request to send a blob")
        response['incoming_blob'] = {}

        if self.blob_data_payment_rate is None:
            response['incoming_blob'] = {'error': "RATE_UNSET"}
            return response
        else:
            return self._send_blob(response, query)

    def _send_blob(self, response, query):
        d = self.blob_manager.get_blob(query, True)
        d.addCallback(self.open_blob_for_reading, response)
        return d

    def open_blob_for_reading(self, blob, response):
        response_fields = {}
        d = defer.succeed(None)
        if blob.is_validated():
            read_handle = blob.open_for_reading()
            if read_handle is not None:
                self.currently_uploading = blob
                self.read_handle = read_handle
                log.info("Sending %s to client", str(blob))
                response_fields['blob_hash'] = blob.blob_hash
                response_fields['length'] = blob.length
                response['incoming_blob'] = response_fields
                d.addCallback(lambda _: self.record_transaction(blob))
                d.addCallback(lambda _: response)
                return d
        log.debug("We can not send %s", str(blob))
        response['incoming_blob'] = {'error': 'BLOB_UNAVAILABLE'}
        d.addCallback(lambda _: response)
        return d

    def record_transaction(self, blob):
        d = self.blob_manager.add_blob_to_upload_history(
            str(blob), self.peer.host, self.blob_data_payment_rate)
        return d

    def _reply_to_send_request(self, response, incoming):
        response_fields = {}
        response['incoming_blob'] = response_fields

        if self.blob_data_payment_rate is None:
            log.debug("Rate not set yet")
            response['incoming_blob'] = {'error': 'RATE_UNSET'}
            return defer.succeed(response)
        else:
            log.debug("Requested blob: %s", str(incoming))
            d = self.blob_manager.get_blob(incoming, True)
            d.addCallback(lambda blob: self.open_blob_for_reading(blob, response))
            return d

    def _get_available_blobs(self, requested_blobs):
        d = self.blob_manager.completed_blobs(requested_blobs)
        return d

    def send_file(self, consumer):

        def _send_file():
            inner_d = start_transfer()
            # TODO: if the transfer fails, check if it's because the connection was cut off.
            # TODO: if so, perhaps bill the client
            inner_d.addCallback(lambda _: set_expected_payment())
            inner_d.addBoth(set_not_uploading)
            return inner_d

        def count_bytes(data):
            uploaded = len(data)
            self.blob_bytes_uploaded += uploaded
            self.peer.update_stats('blob_bytes_uploaded', uploaded)
            self.track.add_observation(analytics.BLOB_BYTES_UPLOADED, uploaded)
            return data

        def start_transfer():
            self.file_sender = FileSender()
            log.debug("Starting the file upload")
            assert self.read_handle is not None, \
                "self.read_handle was None when trying to start the transfer"
            d = self.file_sender.beginFileTransfer(self.read_handle, consumer, count_bytes)
            return d

        def set_expected_payment():
            log.debug("Setting expected payment")
            if self.blob_bytes_uploaded != 0 and self.blob_data_payment_rate is not None:
                # TODO: explain why 2**20
                self.wallet.add_expected_payment(self.peer,
                                                 self.currently_uploading.length * 1.0 *
                                                 self.blob_data_payment_rate / 2**20)
                self.blob_bytes_uploaded = 0
            self.peer.update_stats('blobs_uploaded', 1)
            return None

        def set_not_uploading(reason=None):
            if self.currently_uploading is not None:
                self.currently_uploading.close_read_handle(self.read_handle)
                self.read_handle = None
                self.currently_uploading = None
            self.file_sender = None
            if reason is not None and isinstance(reason, Failure):
                log.warning("Upload has failed. Reason: %s", reason.getErrorMessage())

        return _send_file()
Пример #46
0
class TwistedWebDAVHandler:

    _producer = None
    _requestFinished = False

    def __init__(self, reactor, threadpool, publish_module, request):
        self.reactor = reactor
        self.threadpool = threadpool
        self.publish_module = publish_module
        self.request = request
        self.environ = make_webdav_environ(request)
        self.status = None
        self.headers = None

        self.request.setHeader("server",
                               self.environ["SERVER_SOFTWARE"].encode("utf-8"))
        self.request.notifyFinish().addBoth(self._finished)

    def _finished(self, ignored):
        """
        Record the end of the response generation for the request being
        serviced.
        """
        self._requestFinished = True
        if IProducer.providedBy(self._producer):
            if self._producer.file is not None:
                self._producer.file.close()
            self._producer.stopProducing()

    def start(self):
        d = threads.deferToThread(
            self.publish_module,
            self.environ,
            lambda *args: self.reactor.callFromThread(self.start_response, *
                                                      args),
            _request_factory=HTTPRequest,
        )
        d.addCallback(self.finish_response)

    def start_response(self, status, headers, excInfo=None):
        self.status = status
        self.headers = headers

    def finish_response(self, app_iter):
        code, message = self.status.split(None, 1)
        code = int(code)
        self.request.setResponseCode(code, _wsgiStringToBytes(message))

        for name, value in self.headers:
            # Don't allow the application to control these required headers.
            if name.lower() not in ("server", "date"):
                self.request.responseHeaders.addRawHeader(
                    _wsgiStringToBytes(name), _wsgiStringToBytes(value))

        if isinstance(
                app_iter,
                _FILE_TYPES) or IUnboundStreamIterator.providedBy(app_iter):
            if not self._requestFinished:
                self._producer = FileSender()
                d = self._producer.beginFileTransfer(app_iter, self.request)
                d.addBoth(lambda *args: self.stop())
        else:
            for elem in app_iter:
                if not self._requestFinished:
                    self.request.write(elem)
            self.stop()

    def stop(self):
        if IProducer.providedBy(self._producer):
            if self._producer.file is not None:
                self._producer.file.close()
            self._producer = None
        if not self._requestFinished:
            self.request.finish()