def create_lbry_file(blob_manager, storage, payment_rate_manager, lbry_file_manager, file_name, file_handle, key=None, iv_generator=None): """Turn a plain file into an LBRY File. An LBRY File is a collection of encrypted blobs of data and the metadata that binds them together which, when decrypted and put back together according to the metadata, results in the original file. The stream parameters that aren't specified are generated, the file is read and broken into chunks and encrypted, and then a stream descriptor file with the stream parameters and other metadata is written to disk. @param session: An Session object. @type session: Session @param lbry_file_manager: The EncryptedFileManager object this LBRY File will be added to. @type lbry_file_manager: EncryptedFileManager @param file_name: The path to the plain file. @type file_name: string @param file_handle: The file-like object to read @type file_handle: any file-like object which can be read by twisted.protocols.basic.FileSender @param key: the raw AES key which will be used to encrypt the blobs. If None, a random key will be generated. @type key: string @param iv_generator: a generator which yields initialization vectors for the blobs. Will be called once for each blob. @type iv_generator: a generator function which yields strings @return: a Deferred which fires with the stream_hash of the LBRY File @rtype: Deferred which fires with hex-encoded string """ base_file_name = os.path.basename(file_name) file_directory = os.path.dirname(file_handle.name) lbry_file_creator = EncryptedFileStreamCreator(blob_manager, lbry_file_manager, base_file_name, key, iv_generator) yield lbry_file_creator.setup() # TODO: Using FileSender isn't necessary, we can just read # straight from the disk. The stream creation process # should be in its own thread anyway so we don't need to # worry about interacting with the twisted reactor file_sender = FileSender() yield file_sender.beginFileTransfer(file_handle, lbry_file_creator) log.debug( "the file sender has triggered its deferred. stopping the stream writer" ) yield lbry_file_creator.stop() log.debug("making the sd blob") sd_info = lbry_file_creator.sd_info descriptor_writer = BlobStreamDescriptorWriter(blob_manager) sd_hash = yield descriptor_writer.create_descriptor(sd_info) log.debug("saving the stream") yield storage.store_stream(sd_info['stream_hash'], sd_hash, sd_info['stream_name'], sd_info['key'], sd_info['suggested_file_name'], sd_info['blobs']) log.debug("adding to the file manager") lbry_file = yield lbry_file_manager.add_published_file( sd_info['stream_hash'], sd_hash, hexlify(file_directory.encode()), payment_rate_manager, payment_rate_manager.min_blob_data_payment_rate) defer.returnValue(lbry_file)
def write_to_consumer(self, consumer): return make_deferred_yieldable( FileSender().beginFileTransfer(self.open_file, consumer) )
def upload_file(file, transport): sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 return sender.beginFileTransfer(file, transport)
def sendfile(consumer, fileObj): FileSender().beginFileTransfer( fileObj, consumer, transform=self.putProgress).addCallback( lambda _: consumer.finish()).addCallback( self.putComplete).addErrback(self.putFailed)
def cbStore(consumer, filename): fs = FileSender() d = fs.beginFileTransfer(open(filename, 'r'), consumer) d.addCallback(lambda _: consumer.finish()).addErrback(fail, "cbStore") return d
def write_to_consumer(self, consumer): return FileSender().beginFileTransfer(self.open_file, consumer)
def __init__(self, filename, transform=None, delay=0, verbose=False): self.f = open(filename, 'rb') self.transform = transform self.delay = delay self.producer = FileSender() self.logger = gogo.Gogo(__name__, verbose=verbose).logger
def startCopying(self, msg): def _done(ign, src, dst, f): log.msg("File copied successfully %(src)s -> %(dst)s" % { 'src': src, 'dst': dst }) # self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=0, table=msg.table.getValue()) self.amqp.send_message(exchange="rb", routing_key="file-transfer-ack", msg=json.dumps({ 'id': msg['id'], 'state': 0, 'table': msg['table'], })) def _err_done(reason, src, dst, sendreq=None): log.msg( "File transfer error %(src)s -> %(dst)s. Reason: %(reason)s" % { 'src': src, 'dst': dst, 'reason': reason.getErrorMessage() }, logLevel=logging.ERROR) if sendreq: #self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=1, table=msg.table.getValue()) self.amqp.send_message(exchange="rb", routing_key="file-transfer-ack", msg=json.dumps({ 'id': msg['id'], 'state': 1, 'table': msg['table'], })) return defer.fail(reason) if self.working > self.QUEUESIZE: self.queue.put(msg) return self.incpendingqueue(None, msg=msg) srcfile = msg['src'] dstfile = msg['dst'] s = FilePath(srcfile) d = FilePath(dstfile) if s.exists() and d.exists(): if s.getsize() == d.getsize(): log.msg("Ignore coping, files have the same size: %s -> %s" % (srcfile, dstfile)) self.decpendingqueue(None, msg=msg) #self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=0, table=msg.table.getValue()) self.amqp.send_message(exchange="rb", routing_key="file-transfer-ack", msg=json.dumps({ 'id': msg['id'], 'state': 0, 'table': msg['table'], })) return src = FileSender() try: f = open(srcfile, 'r') except Exception, e: log.msg(str(e), logLevel=logging.ERROR) # f=None return _err_done(Failure(e), srcfile, dstfile, 'err').addBoth(self.decpendingqueue, msg=msg)
def startCopying(self, msg): def _done(ign, src, dst, f): log.msg("File copied successfully %(src)s -> %(dst)s" % { 'src': src, 'dst': dst }) self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=0, table=msg.table.getValue()) def _err_done(reason, src, dst, sendreq=None): log.msg( "File transfer error %(src)s -> %(dst)s. Reason: %(reason)s" % { 'src': src, 'dst': dst, 'reason': reason.getErrorMessage() }, logLevel=logging.ERROR) if sendreq: self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=1, table=msg.table.getValue()) return defer.fail(reason) if self.working > self.QUEUESIZE: self.queue.put(msg) return self.incpendingqueue(None, msg=msg) srcfile = msg.src.getValue() dstfile = msg.dst.getValue() s = FilePath(srcfile) d = FilePath(dstfile) if s.exists() and d.exists(): if s.getsize() == d.getsize(): log.msg("Ignore coping, files have the same size: %s -> %s" % (srcfile, dstfile)) self.decpendingqueue("SAME_SIZE", msg=msg) self.publish('file-transfer-ack', fileTransferReq, id=msg.id.getValue(), state=0, table=msg.table.getValue()) return src = FileSender() if self.conf.get('transport', 'CHUNK_SIZE_POWER'): src.CHUNK_SIZE = 2**int( self.conf.get('transport', 'CHUNK_SIZE_POWER')) try: f = open(srcfile, 'r') except Exception, e: log.msg(str(e), logLevel=logging.ERROR) # f=None return _err_done(Failure(e), srcfile, dstfile, 'err').addBoth(self.decpendingqueue, msg=msg)
def lineReceived(self, data): print "server: ", data if self.state == "SESSION": if g_data.get_sessions().count(data): self.session_name = data self.state = "IDLE" self.sendLine("OK") else: self.error(data + " doesn't exist on server") elif self.state == "SNAPSHOT": if g_data.get_snaps(self.session_name, g_data.get_user()).count(data): filename = g_data.get_session_path(self.session_name)+data+'.ardour' print filename self.sendLine(str(os.stat(filename).st_size)) self.sendLine("OK") self.file = open(filename, 'r') file_sender = FileSender() cb = file_sender.beginFileTransfer(self.file, self.transport) cb.addCallback(self.file_done) else: self.error("snapshot: " + data + " doesn't exist on server") elif self.state == "SOUNDFILE" or self.state == "SOUNDFILE_HEADER": if g_data.get_sounds(self.session_name, g_data.get_user()).count(data): filename = g_data.get_session_path(self.session_name)+"/sounds/"+data print filename if self.state == "SOUNDFILE": self.sendLine(str(os.stat(filename).st_size)) else: #SOUNDFILE_HEADER header_size = get_header_size(filename) if header_size: self.sendLine(str(header_size)) else: self.error('soundfile: ' + data + 'doesn\'t have "data" chunk') self.sendLine("OK") self.file = open(filename, 'r') if self.state == "SOUNDFILE": file_sender = FileSender() cb = file_sender.beginFileTransfer(self.file, self.transport) else: # SOUNDFILE_HEADER file_sender = FileSenderLimited() cb = file_sender.beginFileTransfer(self.file, self.transport, header_size) cb.addCallback(self.file_done) else: self.error("soundfile: " + data + "doesn't exist on server") elif self.state == "SOUNDFILE_SIZE": if g_data.get_sounds(self.session_name, g_data.get_user()).count(data): filename = g_data.get_session_path(self.session_name)+"/sounds/"+data print filename self.sendLine(str(os.stat(filename).st_size)) self.state = "IDLE" elif data == "SESSION": self.state = "SESSION" elif data == "SNAPS": self.state = "SNAPS" for snap in g_data.get_snaps(self.session_name, g_data.get_user()): self.sendLine(snap) self.sendLine("OK") self.state = "IDLE" elif data == "SNAPSHOT": self.state = "SNAPSHOT" elif data == "SOUNDFILE": self.state = "SOUNDFILE" elif data == "SOUNDFILE_HEADER": self.state = "SOUNDFILE_HEADER" elif data == "SOUNDFILE_SIZE": self.state = "SOUNDFILE_SIZE"