def test_transfer(self): """ L{basic.FileSender} sends the content of the given file using a C{IConsumer} interface via C{beginFileTransfer}. It returns a L{Deferred} which fires with the last byte sent. """ source = BytesIO(b"Test content") consumer = proto_helpers.StringTransport() sender = basic.FileSender() d = sender.beginFileTransfer(source, consumer) sender.resumeProducing() # resumeProducing only finishes after trying to read at eof sender.resumeProducing() self.assertEqual(consumer.producer, None) self.assertEqual(b"t", self.successResultOf(d)) self.assertEqual(b"Test content", consumer.value())
def test_abortedTransfer(self): """ The C{Deferred} returned by L{basic.FileSender.beginFileTransfer} fails with an C{Exception} if C{stopProducing} when the transfer is not complete. """ source = BytesIO(b"Test content") consumer = proto_helpers.StringTransport() sender = basic.FileSender() d = sender.beginFileTransfer(source, consumer) # Abort the transfer right away sender.stopProducing() failure = self.failureResultOf(d) failure.trap(Exception) self.assertEqual("Consumer asked us to stop producing", str(failure.value))
def _send_file(self): ts = self._transit_sender self._fd_to_send.seek(0,2) filesize = self._fd_to_send.tell() self._fd_to_send.seek(0,0) record_pipe = yield ts.connect() self._timing.add("transit connected") # record_pipe should implement IConsumer, chunks are just records stderr = self._args.stderr print(u"Sending (%s).." % record_pipe.describe(), file=stderr) hasher = hashlib.sha256() progress = tqdm(file=stderr, disable=self._args.hide_progress, unit="B", unit_scale=True, total=filesize) def _count_and_hash(data): hasher.update(data) progress.update(len(data)) return data fs = basic.FileSender() with self._timing.add("tx file"): with progress: yield fs.beginFileTransfer(self._fd_to_send, record_pipe, transform=_count_and_hash) expected_hash = hasher.digest() expected_hex = bytes_to_hexstr(expected_hash) print(u"File sent.. waiting for confirmation", file=stderr) with self._timing.add("get ack") as t: ack_bytes = yield record_pipe.receive_record() record_pipe.close() ack = bytes_to_dict(ack_bytes) ok = ack.get(u"ack", u"") if ok != u"ok": t.detail(ack="failed") raise TransferError("Transfer failed (remote says: %r)" % ack) if u"sha256" in ack: if ack[u"sha256"] != expected_hex: t.detail(datahash="failed") raise TransferError("Transfer failed (bad remote hash)") print(u"Confirmation received. Transfer complete.", file=stderr) t.detail(ack="ok")
def read(self, consumer, offset=0, size=None): if size is None: data = self.u.data[offset:] else: data = self.u.data[offset:offset + size] # We use twisted.protocols.basic.FileSender, which only does # non-streaming, i.e. PullProducer, where the receiver/consumer must # ask explicitly for each chunk of data. There are only two places in # the Twisted codebase that can't handle streaming=False, both of # which are in the upload path for an FTP/SFTP server # (protocols.ftp.FileConsumer and # vfs.adapters.ftp._FileToConsumerAdapter), neither of which is # likely to be used as the target for a Tahoe download. d = basic.FileSender().beginFileTransfer(StringIO(data), consumer) d.addCallback(lambda lastSent: consumer) return d
def test_transferWithTransform(self): """ L{basic.FileSender.beginFileTransfer} takes a C{transform} argument which allows to manipulate the data on the fly. """ def transform(chunk): return chunk.swapcase() source = BytesIO(b"Test content") consumer = proto_helpers.StringTransport() sender = basic.FileSender() d = sender.beginFileTransfer(source, consumer, transform) sender.resumeProducing() # resumeProducing only finishes after trying to read at eof sender.resumeProducing() self.assertEqual(b"T", self.successResultOf(d)) self.assertEqual(b"tEST CONTENT", consumer.value())
def reseed_file(input_file, sd_blob): sd_blob = SdBlob.new_instance(sd_blob) db_dir = conf.settings['data_dir'] blobfile_dir = os.path.join(db_dir, "blobfiles") announcer = HashAnnouncer.DummyHashAnnouncer() blob_manager = BlobManager.DiskBlobManager(announcer, blobfile_dir, db_dir) yield blob_manager.setup() creator = CryptStreamCreator.CryptStreamCreator(blob_manager, None, sd_blob.key(), sd_blob.iv_generator()) file_sender = basic.FileSender() with open(input_file) as f: yield file_sender.beginFileTransfer(f, creator) yield creator.stop() for blob_info in sd_blob.blob_infos(): if 'blob_hash' not in blob_info: # the last blob is always empty and without a hash continue blob = yield blob_manager.get_blob(blob_info['blob_hash'], True) if not blob.verified: print "Blob {} is not verified".format(blob)
def test_transferMultipleChunks(self): """ L{basic.FileSender} reads at most C{CHUNK_SIZE} every time it resumes producing. """ source = BytesIO(b"Test content") consumer = proto_helpers.StringTransport() sender = basic.FileSender() sender.CHUNK_SIZE = 4 d = sender.beginFileTransfer(source, consumer) # Ideally we would assertNoResult(d) here, but <http://tm.tl/6291> sender.resumeProducing() self.assertEqual(b"Test", consumer.value()) sender.resumeProducing() self.assertEqual(b"Test con", consumer.value()) sender.resumeProducing() self.assertEqual(b"Test content", consumer.value()) # resumeProducing only finishes after trying to read at eof sender.resumeProducing() self.assertEqual(b"t", self.successResultOf(d)) self.assertEqual(b"Test content", consumer.value())
try: article = int(article) return func(self.currentGroup, article) except ValueError: self.sendLine('501 command syntax error') def do_ARTICLE(self, article=None): defer = self.articleWork(article, 'ARTICLE', self.factory.backend.articleRequest) if defer: defer.addCallbacks(self._gotArticle, self._errArticle) def _gotArticle(self, (index, id, article)): self.currentIndex = index self.sendLine('220 %d %s article' % (index, id)) s = basic.FileSender() d = s.beginFileTransfer(article, self.transport) d.addCallback(self.finishedFileTransfer) ## ## Helper for FileSender ## def finishedFileTransfer(self, lastsent): if lastsent != '\n': line = '\r\n.' else: line = '.' self.sendLine(line) ##
def connectionMade(self): d = basic.FileSender().beginFileTransfer(open(self.junkPath, 'rb'), self.transport) d.addErrback(failed) d.addCallback(lambda ign: self.transport.loseConnection())
def connectionMade(self): s = basic.FileSender() d = s.beginFileTransfer(self.f, self.transport, lambda x: x) d.addCallback(lambda r: self.transport.loseConnection())
def startProducing(self, fd): self.deferred = basic.FileSender().beginFileTransfer(fd, self) self.deferred.addBoth(lambda x: self.stopPaging())
def test_interface(self): """ L{basic.FileSender} implements the L{IPullProducer} interface. """ sender = basic.FileSender() self.assertTrue(verifyObject(IProducer, sender))
def startUp(self): self.createTempFile() if self.fh != -1: self.filesender = basic.FileSender() self.filesender.beginFileTransfer(self.msg, self)
def connectionMade(self): basic.FileSender().beginFileTransfer(self.fobj, self.transport)