def start_stream(): file_sender = FileSender() d = file_sender.beginFileTransfer(file_handle, lbry_file_creator) d.addCallback(lambda _: stop_file(lbry_file_creator)) d.addCallback(lambda _: make_stream_desc_file(lbry_file_creator.stream_hash)) d.addCallback(lambda _: lbry_file_creator.stream_hash) return d
def render_GET(self, request): # no auth here on purpose, to allow anyone to view, even across home # servers. # TODO: A little crude here, we could do this better. filename = request.path.split('/')[-1] # be paranoid filename = re.sub("[^0-9A-z.-_]", "", filename) file_path = self.directory + "/" + filename logger.debug("Searching for %s", file_path) if os.path.isfile(file_path): # filename has the content type base64_contentype = filename.split(".")[1] content_type = base64.urlsafe_b64decode(base64_contentype) logger.info("Sending file %s", file_path) f = open(file_path, 'rb') request.setHeader('Content-Type', content_type) d = FileSender().beginFileTransfer(f, request) # after the file has been sent, clean up and finish the request def cbFinished(ignored): f.close() request.finish() d.addCallback(cbFinished) else: respond_with_json_bytes( request, 404, json.dumps(cs_error("Not found", code=Codes.NOT_FOUND)), send_cors=True) return server.NOT_DONE_YET
def render_GET(self, request): log.debug("request from %s: %s" % (self.userid, request.args)) if 'f' in request.args: fn = request.args['f'][0] info = self.fileserver.storage.get(fn, False) if info: filename, mime, md5sum = info log.debug("sending file type %s, path %s, md5sum %s" % (mime, filename, md5sum)) genfilename = util.generate_filename(mime) request.setHeader('content-type', mime) request.setHeader('content-length', os.path.getsize(filename)) request.setHeader('content-disposition', 'attachment; filename="%s"' % (genfilename)) request.setHeader('x-md5sum', md5sum) # stream file to the client fp = open(filename, 'rb') d = FileSender().beginFileTransfer(fp, request) def finished(ignored): fp.close() request.finish() d.addErrback(log.error).addCallback(finished) return server.NOT_DONE_YET # file not found in extra storage else: return self._quick_response(request, 404, 'not found') return self._quick_response(request, 400, 'bad request')
def render_GET(self, request): log.debug("request from %s: %s" % (self.userid, request.args)) if "f" in request.args: fn = request.args["f"][0] info = self.fileserver.storage.get(fn, False) if info: filename, mime, md5sum = info log.debug("sending file type %s, path %s, md5sum %s" % (mime, filename, md5sum)) genfilename = util.generate_filename(mime) request.setHeader("content-type", mime) request.setHeader("content-length", os.path.getsize(filename)) request.setHeader("content-disposition", 'attachment; filename="%s"' % (genfilename)) request.setHeader("x-md5sum", md5sum) # stream file to the client fp = open(filename, "rb") d = FileSender().beginFileTransfer(fp, request) def finished(ignored): fp.close() request.finish() d.addErrback(log.error).addCallback(finished) return server.NOT_DONE_YET # file not found in extra storage else: return self._quick_response(request, 404, "not found") return self._quick_response(request, 400, "bad request")
def connectionMade(self): self.transport.write('%s\r\n' % (self.insize)) sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 d = sender.beginFileTransfer(self.infile, self.transport, self._monitor) d.addCallback(self.cbTransferCompleted)
def getshared_cb(data): if len(data) == 0: error = { 'status': {'error': "Invalid Request", 'message': "File does not exist."} } request.write(json.dumps(error, sort_keys=True, encoding="utf-8")) request.finish() return file_path = str(data[0][0]) + "/" + fileid if not os.path.exists(file_path): error = { 'status': {'error': "Invalid Request", 'message': "File does not exist."} } request.write(json.dumps(error, sort_keys=True, encoding="utf-8")) request.finish() request.write(str(data[0][1])) # writing key iv_plain = self.sid.decryptData(data[0][2]) # writing IV print iv_plain iv = self.sid.encryptData(iv_plain, pubkey) request.write(iv) file = open(file_path ,"r") sender = FileSender() sender.CHUNK_SIZE = 200 df = sender.beginFileTransfer(file, request) df.addErrback(err) df.addCallback(finishTrnf_cb, file)
def sendFile(self, dataObj): try: etag = dataObj.get_md5() if etag == None: etag = self.etag if etag == None: etag = self.calcMd5Sum(dataObj) dataObj.set_md5(etag) self.setHeader(self.request, 'ETag', '"%s"' % (etag)) self.setResponseCode(self.request, 200, 'OK') fp = dataObj d = FileSender().beginFileTransfer(fp, self.request) def cbFinished(ignored): fp.close() self.request.finish() d.addErrback(err).addCallback(cbFinished) except cbException, (ex): ex.sendErrorResponse(self.request, self.requestId) traceback.print_exc(file=sys.stdout) pycb.log(logging.ERROR, "Error sending file %s" % (str(ex)), traceback)
def sendFile(self, dataObj): pycb.log(logging.INFO, "===== def sendFile of cbRequest.py") try: #etag = dataObj.get_md5() etag = pycb.config.bucket.get_md5() pycb.log(logging.INFO, "=====## md5(etag) is %s"%etag) if etag == None: etag = self.etag if etag == None: etag = self.calcMd5Sum(dataObj) dataObj.set_md5(etag) self.setHeader(self.request, 'ETag', '"%s"' % (etag)) self.setResponseCode(self.request, 200, 'OK') fp = dataObj d = FileSender().beginFileTransfer(fp, self.request) def cbFinished(ignored): pycb.log(logging.INFO, "===== def cbFinished of cbRequest.py") fp.close() self.request.finish() d.addErrback(err).addCallback(cbFinished) except cbException, (ex): ex.sendErrorResponse(self.request, self.requestId) traceback.print_exc(file=sys.stdout) pycb.log(logging.ERROR, "Error sending file %s" % (str(ex)), traceback)
def sendFile(request, filename, tb_path, ctype): """Send file to user. Send file to user using producers and consumers system. :param: filename (string) :param: filepath (string) :param: ctype (string) the value for content-type HTTP header """ request.setHeader(b'content-type', ctype) request.setHeader( b'content-disposition', 'attachment; filename=%s' % filename ) fp = FilePath(tb_path).child(filename).open() d = FileSender().beginFileTransfer(fp, request) def cbFinished(ignored): fp.close() request.finish() d.addErrback(err).addCallback(cbFinished)
def render_GET(self, request): if 'camid' in request.args: videoID = request.args.get('camid')[0] myCachedFileName = os.path.join(self._CacheLocation, str(videoID) + ".webvtt") request.responseHeaders.setRawHeaders( "server", ["Bliknet Streamer HTTP Server"]) request.responseHeaders.setRawHeaders("Content-Type", ["text/vtt;charset=utf-8"]) request.responseHeaders.setRawHeaders( "Access-Control-Allow-Origin", ["*"]) request.responseHeaders.setRawHeaders( "Content-Length", [os.path.getsize(myCachedFileName)]) try: fp = open(myCachedFileName, 'rb') d = FileSender().beginFileTransfer(fp, request) def cbFinished(ignored): fp.close() request.finish() d.addErrback(err).addCallback(cbFinished) return NOT_DONE_YET except: exception = str(traceback.format_exc()) self.NodeControl.log.error("Error opening: % s, error: %s ." % (myCachedFileName, exception)) return None
def start_transfer(): self.file_sender = FileSender() log.debug("Starting the file upload") assert self.read_handle is not None, \ "self.read_handle was None when trying to start the transfer" d = self.file_sender.beginFileTransfer(self.read_handle, consumer, count_bytes) return d
def transfer_file(self, file_path): """ Send a file via the transit. Assume that the latter has been already established. If the other end provides a hash when done, check it. Helper for the send_file method above. """ record_pipe = yield self.transit.connect() hasher = hashlib.sha256() def func(data): hasher.update(data) return data with open(file_path, 'rb') as f: file_sender = FileSender() yield file_sender.beginFileTransfer(f, record_pipe, func) ack_record = yield record_pipe.receive_record() ack_record = json.loads(str(ack_record, 'utf-8')) yield record_pipe.close() try: assert ack_record['ack'] == 'ok' if ack_record['sha256']: assert ack_record['sha256'] == hasher.hexdigest() except (AssertionError, KeyError): raise TransferError('The file transfer failed.') return returnValue(hasher.hexdigest())
def render_GET(self, request): #log.debug("request from %s: %s" % (self.userid, request.args)) if 'f' in request.args: fn = request.args['f'][0] info = self.fileserver.storage.get_extra(fn, self.userid) if info: (filename, mime, md5sum) = info log.debug("sending file type %s, path %s, md5sum %s" % (mime, filename, md5sum)) genfilename = utils.generate_filename(mime) request.setHeader('content-type', mime) request.setHeader('content-length', os.path.getsize(filename)) request.setHeader('content-disposition', 'attachment; filename="%s"' % (genfilename)) request.setHeader('x-md5sum', md5sum) # stream file to the client fp = open(filename, 'rb') d = FileSender().beginFileTransfer(fp, request) def finished(ignored): fp.close() request.finish() d.addErrback(err).addCallback(finished) return server.NOT_DONE_YET # file not found in extra storage else: return self.not_found(request) return self.bad_request(request)
def lineReceived(self, line): line = line.strip() if line == OK: sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 deffered = sender.beginFileTransfer(self.fileObj, self.transport, None) deffered.addCallback(self.success).addErrback(self.error)
def on_write_status(self, consumer): content = self.generate_content() buffer = StringIO(content) sender = FileSender() d = sender.beginFileTransfer(buffer, consumer) d.addCallbacks(lambda _: self.on_write_completed(consumer), self.on_error) return d
def lineReceived(self, line): print 'sender %s' % line line = line.strip() if line == OK: sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 deffered = sender.beginFileTransfer(self.fileObj, self.transport, None) deffered.addCallback(self.success).addErrback(self.error)
def _sendMail_data(self, code, resp): transfer = FileSender() deferred = transfer.beginFileTransfer(self._requestParams['file'], self.transport, self.transformChunk) deferred.addCallbacks(self.finishedFileTransfer, self.sendError) self._expected = SUCCESS self._okresponse = self._sendMail_sent self._failresponse = self._sendMail_fail
def render_POST(self, request): # Read POST into a temporary file tmpfile = tempfile.TemporaryFile() while True: data = request.content.read(512 * 1024) if not data: break tmpfile.write(data) tmpfile.seek(0) # Decrypt it decrypted = self.server.key_manager.decrypt_file(tmpfile) request.setHeader("content-type", "text/plain") # Make sure we know the client if not self.server.key_manager.is_gpg_key_in_ring(decrypted.key_id): print("WARNING: Unknown client (Key ID: %s)" % (decrypted.key_id)) request.setResponseCode(511) return "ERROR: I'm sorry sir, I don't know any Mr. %s" % (str( decrypted.key_id)) tmpfile.close() # JSON parse op = json.loads(decrypted.data) # Operations if op['op'] == 'ping': return self.server.key_manager.crypt("PONG", decrypted.key_id) elif op['op'] == 'get channel list': channels = self.server.channel_manager.get_local_channels() channels_json = json.dumps(channels) channels_crypted = self.server.key_manager.crypt( channels_json, decrypted.key_id) return channels_crypted elif op['op'] == 'get file list': files = self.server.channel_manager.get_local_files(op['channels']) files_json = json.dumps(files) return self.server.key_manager.crypt(files_json, decrypted.key_id) elif op['op'] == 'download file': f = self.server.channel_manager.get_filestream( op['channel'], op['file']) p = self.server.key_manager.crypt_file_popen(f, decrypted.key_id) d = FileSender().beginFileTransfer(p.stdout, request) def cbFinished(ignored): p.kill() p.wait() request.finish() d.addErrback(err).addCallback(cbFinished) return server.NOT_DONE_YET
def start_streaming(self): file_sender = FileSender() d = file_sender.beginFileTransfer(self.file_handle, self) def stop_stream(): d = self.stop() return d d.addCallback(lambda _: stop_stream()) return d
def test_pullFileConsumer(self): fileToSend = StringIO(fileData) clock = Clock() consumer = FileConsumer(clock) d = FileSender().beginFileTransfer(fileToSend, consumer) finished = [] d.addCallback(finished.append) while not finished: clock.advance(1) self.assertEqual(consumer.value(), fileData)
def serve_file(request, fo): def on_finish(ignored): fo.close() request.finish() filesender = FileSender().beginFileTransfer(fo, request) filesender.addBoth(on_finish) return filesender
def render_POST(self, request): # Read POST into a temporary file tmpfile = tempfile.TemporaryFile() while True: data = request.content.read(512 * 1024) if not data: break tmpfile.write(data) tmpfile.seek(0) # Decrypt it decrypted = self.server.key_manager.decrypt_file(tmpfile) request.setHeader("content-type", "text/plain") # Make sure we know the client if not self.server.key_manager.is_gpg_key_in_ring(decrypted.key_id): print("WARNING: Unknown client (Key ID: %s)" % (decrypted.key_id)) request.setResponseCode(511) return "ERROR: I'm sorry sir, I don't know any Mr. %s" % (str(decrypted.key_id)) tmpfile.close() # JSON parse op = json.loads(decrypted.data) # Operations if op["op"] == "ping": return self.server.key_manager.crypt("PONG", decrypted.key_id) elif op["op"] == "get channel list": channels = self.server.channel_manager.get_local_channels() channels_json = json.dumps(channels) channels_crypted = self.server.key_manager.crypt(channels_json, decrypted.key_id) return channels_crypted elif op["op"] == "get file list": files = self.server.channel_manager.get_local_files(op["channels"]) files_json = json.dumps(files) return self.server.key_manager.crypt(files_json, decrypted.key_id) elif op["op"] == "download file": f = self.server.channel_manager.get_filestream(op["channel"], op["file"]) p = self.server.key_manager.crypt_file_popen(f, decrypted.key_id) d = FileSender().beginFileTransfer(p.stdout, request) def cbFinished(ignored): p.kill() p.wait() request.finish() d.addErrback(err).addCallback(cbFinished) return server.NOT_DONE_YET
def _start_transfer(self, _discard): if self.fileObj is None: self._failure((2, "NO_FILE_LOADED")) else: logging.info("Started file transfer") self.busy = True fileProducer = FileSender() fileProducer.CHUNK_SIZE = 65536 def_obj = fileProducer.beginFileTransfer(file=self.fileObj, consumer=self.transport) def_obj.addCallback(self._done_transfer, True) def_obj.addErrback(self._done_transfer, False)
def send_open_file(openFile, request): '''Use FileSender to asynchronously send an open file [JBY] From: http://stackoverflow.com/questions/1538617/http-download-very-big-file''' dd = FileSender().beginFileTransfer(openFile, request) def cbFinished(ignored): openFile.close() request.finish() dd.addCallback(cbFinished).addErrback(err)
def connectionMade(self): """ """ instruction = dict(file_size=self.insize, original_file_path=self.path) instruction = json.dumps(instruction) self.transport.write(instruction+'\r\n') sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 d = sender.beginFileTransfer(self.infile, self.transport, self._monitor) d.addCallback(self.cbTransferCompleted)
def start_stream(): # TODO: Using FileSender isn't necessary, we can just read # straight from the disk. The stream creation process # should be in its own thread anyway so we don't need to # worry about interacting with the twisted reactor file_sender = FileSender() d = file_sender.beginFileTransfer(file_handle, lbry_file_creator) d.addCallback(lambda _: stop_file(lbry_file_creator)) d.addCallback(lambda _: make_stream_desc_file(lbry_file_creator.stream_hash)) d.addCallback(lambda _: lbry_file_creator.stream_hash) return d
def connectionMade(self): """ """ fileHeader = session.Message(session.fileMsg) fileHeader.fileSize = self.insize fileHeader.fileName = self.relPath fileHeader.sessionID = self.sessionID self.transport.write(fileHeader.serialize() + '\r\n') sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 d = sender.beginFileTransfer(self.infile, self.transport, self._monitor) d.addCallback(self.cbTransferCompleted)
def _start_transfer(self, _discard): if self.fileObj is None: self._failure((2, 'NO_FILE_LOADED')) else: logging.info('Started file transfer') self.busy = True fileProducer = FileSender() fileProducer.CHUNK_SIZE = 65536 def_obj = fileProducer.beginFileTransfer(file=self.fileObj, consumer=self.transport) def_obj.addCallback(self._done_transfer, True) def_obj.addErrback(self._done_transfer, False)
def render(self,request): self.isLeaf = False #print request #print dir(request) print request.path if (request.path.split('.')[-1] == 'mkv'): print request.path.split('.')[-1] request.setHeader('Content-Type',"application/x-mpegurl") if (self.converter.checkStatus(request.path)): self.converter.start('../../Torrents',request.path) #f = open('playlist.m3u8','rb') #print f.read() #f.close() #f = open('playlist.m3u8','rb') #def cbFinished(ignored): #f.close() #request.finish() playlist = self.converter.getPlaylist() print playlist return playlist #d = FileSender().beginFileTransfer(f,request) #d.addErrback(err).addCallback(cbFinished) #return NOT_DONE_YET elif(request.path.split('.')[-1] == 'ts'): request.setHeader('Content-Type','video/MP2T') print (request.path + '<--------') self.converter.updateRecentSeg(request.path) f = open(request.path[1:],'rb') def cbFinished(ignored): f.close() request.finish() d = FileSender().beginFileTransfer(f,request) d.addErrback(err).addCallback(cbFinished) return NOT_DONE_YET else: print ('travelled well') print self.path request.setHeader('Content-Type','video/octet-stream') self.isLeaf = False return static.File.render(self,request) """f = open(self.path+request.path,'rb')
def render_GET(self, request): # key - path to media file in base64 path = base64.b64decode(request.args[b'key'][0]).decode('utf-8') fp = open('/root/media/{}'.format(path), 'rb') d = FileSender().beginFileTransfer(fp, request) def cbFinished(ignored): fp.close() request.finish() d.addErrback(err).addCallback(cbFinished) return NOT_DONE_YET
def connectionMade(self): """ """ # try: instruction = dict(file_size=self.insize, original_file_path=self.path) instruction = json.dumps(instruction) # except ValueError: # "Accepting as delete command instead..." self.transport.write(instruction + "\r\n") sender = FileSender() sender.CHUNK_SIZE = 2 ** 16 d = sender.beginFileTransfer(self.infile, self.transport, self._monitor) d.addCallback(self.cbTransferCompleted)
def read(self, write_func): def close_self(*args): self.close_read_handle(file_handle) return args[0] file_sender = FileSender() reader = HashBlobReader(write_func) file_handle = self.open_for_reading() if file_handle is not None: d = file_sender.beginFileTransfer(file_handle, reader) d.addCallback(close_self) else: d = defer.fail(ValueError("Could not read the blob")) return d
def connectionMade(self): # Add these callbacks, otherwise FileSender throws some exceptions # when it completes def f1(lastChunk): print "finished" self.transport.loseConnection() def f2(reason): print "failed" print reason self.transport.loseConnection() fs = FileSender() fs.beginFileTransfer(self.factory.fp, self.transport, None).addCallbacks(f1, f2)
def connectionMade(self): """ """ self.logger = MessageLogger(open('log\client_logger.txt', "a")) self.logger.log("[connected at %s]" % time.asctime(time.localtime(time.time()))) instruction = dict(file_size=self.insize, original_file_path=self.path) instruction = json.dumps(instruction) self.transport.write(instruction + '\r\n') sender = FileSender() sender.CHUNK_SIZE = 2**16 d = sender.beginFileTransfer(self.infile, self.transport, self._monitor) d.addCallback(self.cbTransferCompleted)
def download_file(request, name, path, file_is_temp=False): file = open(path, 'r') def finalize(*args, **kwargs): request.finish() file.close() if file_is_temp: os.remove(path) request.setHeader('Content-Disposition', 'attachment; filename="{}"'.format(name)) file_sender = FileSender().beginFileTransfer(file, request) file_sender.addCallback(finalize) return NOT_DONE_YET
def render_POST(self, request): # Read POST into a temporary file tmpfile = tempfile.TemporaryFile() while True: data = request.content.read(512 * 1024) if not data: break tmpfile.write(data) tmpfile.seek(0) # Decrypt it decrypted = self.server.key_manager.decrypt_file(tmpfile) request.setHeader("content-type", "text/plain") # JSON parse op = json.loads(decrypted.data) print op # Operations if op['op'] == 'ping': return self.server.key_manager.crypt("PONG", decrypted.key_id) elif op['op'] == 'get channel list': channels = self.server.channel_manager.get_local_channels() channels_json = json.dumps(channels) return self.server.key_manager.crypt(channels_json, decrypted.key_id) elif op['op'] == 'get file list': files = self.server.channel_manager.get_local_files(op['channels']) files_json = json.dumps(files) return self.server.key_manager.crypt(files_json, decrypted.key_id) elif op['op'] == 'download file': f = self.server.channel_manager.get_filestream( op['channel'], op['file']) p = self.server.key_manager.crypt_file_popen(f, decrypted.key_id) d = FileSender().beginFileTransfer(p.stdout, request) def cbFinished(ignored): p.kill() p.wait() request.finish() d.addErrback(err).addCallback(cbFinished) return server.NOT_DONE_YET
def handle_normal_response(self, response_dict): if self.file_sender is None: # Expecting Server Info Response if 'send_blob' not in response_dict: raise ValueError( "I don't know whether to send the blob or not!") if response_dict['send_blob'] is True: self.file_sender = FileSender() return defer.succeed(True) else: log.info("Reflector already has %s", self.next_blob_to_send) return self.set_not_uploading() else: # Expecting Server Blob Response if 'received_blob' not in response_dict: raise ValueError( "I don't know if the blob made it to the intended destination!" ) else: if response_dict['received_blob']: self.reflected_blobs.append( self.next_blob_to_send.blob_hash) log.debug("Sent reflector blob %s", self.next_blob_to_send) else: log.warning("Reflector failed to receive blob %s", self.next_blob_to_send) return self.set_not_uploading()
def sendOpenFile(request, openFile): '''Use FileSender to asynchronously send an open file [JBY] From: http://stackoverflow.com/questions/1538617/http-download-very-big-file''' contentType, junk = mimetypes.guess_type(request.path) request.setHeader('Content-Type', contentType if contentType else 'text/plain') dd = FileSender().beginFileTransfer(openFile, request) def cbFinished(ignored): openFile.close() request.finish() dd.addErrback(err) dd.addCallback(cbFinished) return server.NOT_DONE_YET
def filenameListener(self, real, path, size): if self.filestate == 1 and self.folderstate == 1: self.filestate = 0 if self.count: self.count -= 1 data = {"filename": path, "size": size} jdata = js.dumps(data) self.sendLine(jdata) self.f = open(os.path.abspath(real), 'rb') self.sender = FileSender() self.sender.beginFileTransfer(self.f, self.transport).addCallback( self.otan) else: if path.find("\\") == -1: pub.sendMessage("filequeue", msg=path) self.filequeue.append([real, path, size])
def __uploadFile(self, filename): """ upload file in DATA_DIR """ if filename.endswith(".tar") and len(filename) == 16: self.pendingFile = filename filepath = os.path.join(DATA_DIR, filename) uploadfile = open(filepath, 'rb') uploadsize = os.stat(filepath).st_size self.logger.info("upload file: " + filename) self.sendLine('%s %s %s %s %s' % ("upload", self.clientLocation, self.clientID, filename, uploadsize)) try: self.sender = FileSender() self.lastUploadTime = time.time() d = self.sender.beginFileTransfer(uploadfile, self.transport, self.__monitor) d.addCallback(self.uploadCompleted, filename) except RuntimeError as e: self.logger.warn("__uploadFile|Unexcepted error:" + str(e)) traceback.print_exc(file=sys.stdout) else: self.logger.info("invalid upload file: " + filename)
def _respond_with_file(self, request, media_type, file_path, file_size=None): logger.debug("Responding with %r", file_path) if os.path.isfile(file_path): request.setHeader(b"Content-Type", media_type.encode("UTF-8")) # cache for at least a day. # XXX: we might want to turn this off for data we don't want to # recommend caching as it's sensitive or private - or at least # select private. don't bother setting Expires as all our # clients are smart enough to be happy with Cache-Control request.setHeader(b"Cache-Control", b"public,max-age=86400,s-maxage=86400") if file_size is None: stat = os.stat(file_path) file_size = stat.st_size request.setHeader(b"Content-Length", b"%d" % (file_size, )) with open(file_path, "rb") as f: yield FileSender().beginFileTransfer(f, request) request.finish() else: self._respond_404(request)
def handle_descriptor_response(self, response_dict): if self.file_sender is None: # Expecting Server Info Response if 'send_sd_blob' not in response_dict: raise ReflectorRequestError( "I don't know whether to send the sd blob or not!") if response_dict['send_sd_blob'] is True: self.file_sender = FileSender() else: self.received_descriptor_response = True self.descriptor_needed = response_dict['send_sd_blob'] self.needed_blobs = response_dict.get('needed_blobs', []) return self.get_blobs_to_send() else: # Expecting Server Blob Response if 'received_sd_blob' not in response_dict: raise ValueError( "I don't know if the sd blob made it to the intended destination!" ) else: self.received_descriptor_response = True disconnect = False if response_dict['received_sd_blob']: self.reflected_blobs.append( self.next_blob_to_send.blob_hash) log.info("Sent reflector descriptor %s", self.next_blob_to_send) else: log.warning("Reflector failed to receive descriptor %s", self.next_blob_to_send) disconnect = True d = self.set_not_uploading() if disconnect: d.addCallback(lambda _: self.transport.loseConnection()) return d
def render_POST (self, request): # Read POST into a temporary file tmpfile = tempfile.TemporaryFile() while True: data = request.content.read(512 * 1024) if not data: break tmpfile.write (data) tmpfile.seek(0) # Decrypt it decrypted = self.server.key_manager.decrypt_file (tmpfile) request.setHeader("content-type", "text/plain") # JSON parse op = json.loads (decrypted.data) print op # Operations if op['op'] == 'ping': return self.server.key_manager.crypt ("PONG", decrypted.key_id) elif op['op'] == 'get channel list': channels = self.server.channel_manager.get_local_channels() channels_json = json.dumps(channels) return self.server.key_manager.crypt (channels_json, decrypted.key_id) elif op['op'] == 'get file list': files = self.server.channel_manager.get_local_files(op['channels']) files_json = json.dumps(files) return self.server.key_manager.crypt (files_json, decrypted.key_id) elif op['op'] == 'download file': f = self.server.channel_manager.get_filestream (op['channel'], op['file']) p = self.server.key_manager.crypt_file_popen (f, decrypted.key_id) d = FileSender().beginFileTransfer(p.stdout, request) def cbFinished(ignored): p.kill() p.wait() request.finish() d.addErrback(err).addCallback(cbFinished) return server.NOT_DONE_YET
def render_GET(self, request): encoding = request.args.get('encoding', [None])[0] filename = request.args.get('filename', [self.attachment_id])[0] attachment = self.mail_service.attachment(self.attachment_id, encoding) request.setHeader(b'Content-Type', b'application/force-download') request.setHeader(b'Content-Disposition', bytes('attachment; filename=' + filename)) bytes_io = io.BytesIO(attachment['content']) d = FileSender().beginFileTransfer(bytes_io, request) def cb_finished(_): bytes_io.close() request.finish() d.addErrback(err).addCallback(cb_finished) return server.NOT_DONE_YET
def _setContentDispositionAndSend(file_path, extension, content_type): request.setHeader('content-disposition', 'filename="%s.%s"' % (file_path, extension)) request.setHeader('content-type', content_type) f = open(file_path, "rb") yield FileSender().beginFileTransfer(f, request) f.close() defer.returnValue(0)
def attachment(self, request, attachment_id): encoding = request.args.get('encoding', [None])[0] filename = request.args.get('filename', [attachment_id])[0] attachment = self.querier.attachment(attachment_id, encoding) request.setHeader(b'Content-Type', b'application/force-download') request.setHeader(b'Content-Disposition', bytes('attachment; filename=' + filename)) bytes_io = io.BytesIO(attachment['content']) d = FileSender().beginFileTransfer(bytes_io, request) def cbFinished(ignored): bytes_io.close() request.finish() d.addErrback(err).addCallback(cbFinished) return d
def beginFileTransfer(self, file, consumer, rangeBegin, rangeEnd, transform = None): if not rangeBegin < rangeEnd: raise ValueError('rangeBegin >= rangeEnd') self.rangeBegin = rangeBegin self.rangeEnd = rangeEnd if file: file.seek(rangeBegin) return FileSender.beginFileTransfer(self, file, consumer, transform)
def _setContentDispositionAndSend(file_path): filename = os.path.basename(file_path) request.setHeader('content-disposition', 'filename="%s"' % filename) request.setHeader('content-type', "image/png") f = open(file_path, "rb") yield FileSender().beginFileTransfer(f, request) f.close() defer.returnValue(0)
def render_GET(self, request): # no auth here on purpose, to allow anyone to view, even across home # servers. # TODO: A little crude here, we could do this better. filename = request.path.split('/')[-1] # be paranoid filename = re.sub("[^0-9A-z.-_]", "", filename) file_path = self.directory + "/" + filename logger.debug("Searching for %s", file_path) if os.path.isfile(file_path): # filename has the content type base64_contentype = filename.split(".")[1] content_type = base64.urlsafe_b64decode(base64_contentype) logger.info("Sending file %s", file_path) f = open(file_path, 'rb') request.setHeader('Content-Type', content_type) # cache for at least a day. # XXX: we might want to turn this off for data we don't want to # recommend caching as it's sensitive or private - or at least # select private. don't bother setting Expires as all our matrix # clients are smart enough to be happy with Cache-Control (right?) request.setHeader("Cache-Control", "public,max-age=86400,s-maxage=86400") d = FileSender().beginFileTransfer(f, request) # after the file has been sent, clean up and finish the request def cbFinished(ignored): f.close() request.finish() d.addCallback(cbFinished) else: respond_with_json_bytes(request, 404, json.dumps( cs_error("Not found", code=Codes.NOT_FOUND)), send_cors=True) return server.NOT_DONE_YET
def read(self, write_func): """ This function is only used in StreamBlobDecryptor and should be deprecated in favor of open_for_reading() """ def close_self(*args): self.close_read_handle(file_handle) return args[0] file_sender = FileSender() reader = HashBlobReader_v0(write_func) file_handle = self.open_for_reading() if file_handle is not None: d = file_sender.beginFileTransfer(file_handle, reader) d.addCallback(close_self) else: d = defer.fail(IOError("Could not read the blob")) return d
def _send_attachment(self, encoding, filename, request): attachment = yield self.mail_service.attachment(self.attachment_id, encoding) bytes_io = io.BytesIO(attachment['content']) try: yield FileSender().beginFileTransfer(bytes_io, request) finally: bytes_io.close() request.finish()
def send_file_data(self): fh = open(self.file_name, 'rb') def file_transferred(_): self.gui.write_own_file(self.username, os.path.basename(self.file_name)) fh.close() def finish(_): if not fh.closed: fh.close() def error(e): self.gui.print_debug_info('Error sending the file') sender = FileSender() d = sender.beginFileTransfer(fh, self.protocol.transport) d.addCallbacks(file_transferred, finish) d.addErrback(error)
class FileReader(AccumulatingProtocol): def __init__(self, filename, transform=None, delay=0, verbose=False): self.f = open(filename, 'rb') self.transform = transform self.delay = delay self.producer = FileSender() self.logger = gogo.Gogo(__name__, verbose=verbose).logger def cleanup(self, *args): self.f.close() self.producer.stopProducing() def resumeProducing(self): chunk = self.file.read(self.CHUNK_SIZE) if self.file else '' if not chunk: self.file = None self.consumer.unregisterProducer() if self.deferred and self.delay: callLater(self.delay, self.deferred.callback, self.lastSent) elif self.deferred: self.deferred.callback(self.lastSent) self.deferred = None return def connectionLost(self, reason): self.logger.debug('connectionLost: %s', reason) self.cleanup() def connectionMade(self): self.logger.debug('Connection made from %s', self.transport.getPeer()) args = (self.f, self.transport, self.transform) self.d = self.closedDeferred = self.producer.beginFileTransfer(*args) while not self.d.called: self.producer.resumeProducing() self.d.addErrback(self.logger.error) self.d.addBoth(self.cleanup)