def close_handler(err, file): def open_handler2(err, file): tu.check_context() tu.azzert(err == None) read_stream = file.read_stream tot_read = Buffer.create() def data_handler(data): tot_read.append_buffer(data) read_stream.data_handler(data_handler) def end_handler(stream): tu.azzert(TestUtils.buffers_equal(tot_buff, tot_read)) tu.check_context def close_handler2(err, result): tu.check_context() tu.test_complete() file.close(close_handler2) read_stream.end_handler(end_handler) FileSystem.open(filename, handler=open_handler2)
def request_handler(req): req.pause() filename = '' for i in range(10): filename += string.uppercase[random.randrange(26)] filename += '.uploaded' print "Got request storing in %s" % filename def file_open(err, file): pump = Pump(req, file.write_stream) start_time = datetime.now() def end_handler(stream): def file_close(err, file): end_time = datetime.now() print "Uploaded %d bytes to %s in %s" % ( pump.bytes_pumped, filename, end_time - start_time) req.response.end() file.close(file_close) req.end_handler(end_handler) pump.start() req.resume() FileSystem.open(filename, handler=file_open)
def test_async_file(self): def open_handler(err, file): tu.check_context() tu.azzert(err == None) num_chunks = 100; chunk_size = 1000; tot_buff = Buffer.create() self.written = 0 for i in range(0, num_chunks): buff = TestUtils.gen_buffer(chunk_size) tot_buff.append_buffer(buff) def write_handler(err, res): tu.check_context() self.written += 1 if self.written == num_chunks: # all written tot_read = Buffer.create() self.read = 0 for j in range(0, num_chunks): pos = j * chunk_size def read_handler(err, buff): tu.check_context tu.azzert(err == None) self.read += 1 if self.read == num_chunks: # all read tu.azzert(TestUtils.buffers_equal(tot_buff, tot_read)) def close_handler(err, res): tu.check_context() tu.test_complete() file.close(close_handler) file.read(tot_read, pos, pos, chunk_size, read_handler) file.write(buff, i * chunk_size, write_handler) FileSystem.open(FILEDIR + "/somefile.txt", handler=open_handler)
def test_async_file_streams(self): filename = FILEDIR + "/somefile.txt" def open_handler(err, file): tu.check_context() tu.azzert(err == None) num_chunks = 100; chunk_size = 1000; tot_buff = Buffer.create() write_stream = file.write_stream for i in range(0, num_chunks): buff = TestUtils.gen_buffer(chunk_size) tot_buff.append_buffer(buff) write_stream.write_buffer(buff) def close_handler(err, file): def open_handler2(err, file): tu.check_context() tu.azzert(err == None) read_stream = file.read_stream tot_read = Buffer.create() def data_handler(data): tot_read.append_buffer(data) read_stream.data_handler(data_handler) def end_handler(stream): tu.azzert(TestUtils.buffers_equal(tot_buff, tot_read)) tu.check_context def close_handler2(err, result): tu.check_context() tu.test_complete() file.close(close_handler2) read_stream.end_handler(end_handler) FileSystem.open(filename, handler=open_handler2) file.close(close_handler) FileSystem.open(filename, handler=open_handler)
def request_handler(req): req.pause() filename = '' for i in range(10): filename += string.uppercase[random.randrange(26)] filename += '.uploaded' print "Got request storing in %s"% filename def file_open(err, file): pump = Pump(req, file.write_stream) start_time = datetime.now() def end_handler(stream): def file_close(err, file): end_time = datetime.now() print "Uploaded %d bytes to %s in %s"%(pump.bytes_pumped, filename, end_time-start_time) req.response.end() file.close(file_close) req.end_handler(end_handler) pump.start() req.resume() FileSystem.open(filename, handler=file_open)
def create_file_handler(err, res): tu.check_context() def copy_handler(err, res): tu.check_context() tu.azzert(err == None) tu.test_complete() FileSystem.copy(filename, tofile, copy_handler)
def exists_handler(err, exists): if exists: def delete_handler(err, result): FileSystem.mkdir(FILEDIR, handler=mkdir_handler) FileSystem.delete_recursive(FILEDIR, delete_handler) else: FileSystem.mkdir(FILEDIR, handler=mkdir_handler)
def setup(setup_func): def mkdir_handler(err, result): setup_func() def exists_handler(err, exists): if exists: def delete_handler(err, result): FileSystem.mkdir(FILEDIR, handler=mkdir_handler) FileSystem.delete_recursive(FILEDIR, delete_handler) else: FileSystem.mkdir(FILEDIR, handler=mkdir_handler) FileSystem.exists(FILEDIR, exists_handler)
def test_copy(self): filename = FILEDIR + "/test-file.txt" tofile = FILEDIR + "/to-file.txt" def create_file_handler(err, res): tu.check_context() def copy_handler(err, res): tu.check_context() tu.azzert(err == None) tu.test_complete() FileSystem.copy(filename, tofile, copy_handler) FileSystem.create_file(filename, handler=create_file_handler)
def file_props(err, props): req.put_header("Content-Length", props.size) def open_handler(err, file): rs = file.read_stream pump = Pump(rs, req) def end_handler(stream): req.end() rs.end_handler(end_handler) pump.start() FileSystem.open(filename, handler=open_handler)
def create_file_handler(err, stats): tu.check_context() def props_handler(err, stats): tu.check_context() tu.azzert(err == None) print "creation time %s"% stats.creation_time print "last access time %s"% stats.last_access_time print "last modification time %s"% stats.last_modified_time print "directory? %s"% stats.directory print "regular file? %s"% stats.regular_file print "symbolic link? %s"% stats.symbolic_link print "other? %s"% stats.other print "size %s"% stats.size tu.azzert(stats.regular_file) tu.test_complete() FileSystem.props(filename, props_handler)
def create_file_handler(err, stats): tu.check_context() def props_handler(err, stats): tu.check_context() tu.azzert(err == None) print "creation time %s" % stats.creation_time print "last access time %s" % stats.last_access_time print "last modification time %s" % stats.last_modified_time print "directory? %s" % stats.directory print "regular file? %s" % stats.regular_file print "symbolic link? %s" % stats.symbolic_link print "other? %s" % stats.other print "size %s" % stats.size tu.azzert(stats.regular_file) tu.test_complete() FileSystem.props(filename, props_handler)
def test_async_file(self): def open_handler(err, file): tu.check_context() tu.azzert(err == None) num_chunks = 100 chunk_size = 1000 tot_buff = Buffer.create() self.written = 0 for i in range(0, num_chunks): buff = TestUtils.gen_buffer(chunk_size) tot_buff.append_buffer(buff) def write_handler(err, res): tu.check_context() self.written += 1 if self.written == num_chunks: # all written tot_read = Buffer.create() self.read = 0 for j in range(0, num_chunks): pos = j * chunk_size def read_handler(err, buff): tu.check_context tu.azzert(err == None) self.read += 1 if self.read == num_chunks: # all read tu.azzert( TestUtils.buffers_equal( tot_buff, tot_read)) def close_handler(err, res): tu.check_context() tu.test_complete() file.close(close_handler) file.read(tot_read, pos, pos, chunk_size, read_handler) file.write(buff, i * chunk_size, write_handler) FileSystem.open(FILEDIR + "/somefile.txt", handler=open_handler)
def test_async_file_streams(self): filename = FILEDIR + "/somefile.txt" def open_handler(err, file): tu.check_context() tu.azzert(err == None) num_chunks = 100 chunk_size = 1000 tot_buff = Buffer.create() write_stream = file.write_stream for i in range(0, num_chunks): buff = TestUtils.gen_buffer(chunk_size) tot_buff.append_buffer(buff) write_stream.write_buffer(buff) def close_handler(err, file): def open_handler2(err, file): tu.check_context() tu.azzert(err == None) read_stream = file.read_stream tot_read = Buffer.create() def data_handler(data): tot_read.append_buffer(data) read_stream.data_handler(data_handler) def end_handler(stream): tu.azzert(TestUtils.buffers_equal(tot_buff, tot_read)) tu.check_context def close_handler2(err, result): tu.check_context() tu.test_complete() file.close(close_handler2) read_stream.end_handler(end_handler) FileSystem.open(filename, handler=open_handler2) file.close(close_handler) FileSystem.open(filename, handler=open_handler)
def delete_handler(err, result): FileSystem.mkdir(FILEDIR, handler=mkdir_handler)
def file_system(): """ Return the filesystem """ return FileSystem()
client = vertx.create_http_client() client.port = 8080 client.host = "localhost" def response_handler(resp): print "Response %d" % resp.status_code req = client.put("/someurl", response_handler) filename = "upload/upload.txt" def file_props(err, props): req.put_header("Content-Length", props.size) def open_handler(err, file): rs = file.read_stream pump = Pump(rs, req) def end_handler(stream): req.end() rs.end_handler(end_handler) pump.start() FileSystem.open(filename, handler=open_handler) FileSystem.props(filename, file_props)
def teardown(teardown_func): def delete_handler(err, result): teardown_func() FileSystem.delete_recursive(FILEDIR, delete_handler)
import vertx from core.file_system import FileSystem from core.streams import Pump client = vertx.create_http_client() client.port = 8080 client.host = "localhost" def response_handler(resp): print "Response %d"% resp.status_code req = client.put("/someurl", response_handler) filename = "upload/upload.txt" def file_props(err, props): req.put_header("Content-Length", props.size) def open_handler(err, file): rs = file.read_stream pump = Pump(rs, req) def end_handler(stream): req.end() rs.end_handler(end_handler) pump.start() FileSystem.open(filename, handler=open_handler) FileSystem.props(filename, file_props)