def test_empty(): io = MemoryIO() schema = {"type": "record", "name": "test", "fields": [{"type": "boolean", "name": "a"}]} try: fastavro.load(io, schema) assert False, "read from empty file" except EOFError: pass
def test_empty(): io = MemoryIO() schema = { 'type': 'record', 'name': 'test', 'fields': [ {'type': 'boolean', 'name': 'a'} ], } fastavro.load(io, schema)
def test_empty(): io = MemoryIO() schema = { 'type': 'record', 'name': 'test', 'fields': [{ 'type': 'boolean', 'name': 'a' }], } fastavro.load(io, schema)
def test_empty(): io = MemoryIO() schema = { 'type': 'record', 'name': 'test', 'fields': [ {'type': 'boolean', 'name': 'a'} ], } with pytest.raises(EOFError): fastavro.load(io, schema)
def test_empty(): io = MemoryIO() schema = { 'type': 'record', 'name': 'test', 'fields': [{ 'type': 'boolean', 'name': 'a' }], } try: fastavro.load(io, schema) assert False, 'read from empty file' except EOFError: pass
def test_dump_load(tmpdir): """ Write an Avro record to a file using the dump() function and loads it back using the load() function. """ schema = { "type": "record", "name": "Test", "namespace": "test", "fields": [{ "name": "field", "type": { "type": "string" } }] } record = {"field": "foobar"} temp_path = tmpdir.join('test_dump.avro') with temp_path.open('wb') as fo: fastavro.dump(fo, record, schema) with temp_path.open('rb') as fo: new_record = fastavro.load(fo, schema) assert record == new_record
def start(self) -> None: VALUE = 1 while True: for message in self.consumer: b_msg = io.BytesIO(message.value) offset = message.offset avro_message = fastavro.load(b_msg, TMDC_DATA_SCHEMA) tcu_id = avro_message['tcu_id'] tcu_list, *_ = self.cache.get_hash('system', 'tcu_list') if str(tcu_id) in tcu_list: print(tcu_id) db_name, device_id = self.cache.get_hash( TCU_KEY(tcu_id), 'db_name', 'device_id') self.cache.start_transaction() self.cache.increment_field('system', COUNT, VALUE) self.cache.update_field('system', OFFSET, offset) self.cache.increment_field(DB_KEY(db_name), COUNT, VALUE) self.cache.update_field(DB_KEY(db_name), OFFSET, offset) self.cache.increment_field(DEVICE_KEY(db_name, device_id), COUNT, VALUE) self.cache.update_field(DEVICE_KEY(db_name, device_id), OFFSET, offset) self.cache.increment_field(TCU_KEY(tcu_id), COUNT, VALUE) self.cache.end_transaction()
#!/usr/bin/env python # encoding: utf-8 """Fastavro decoding benchmark.""" from io import BytesIO from itertools import repeat from time import time from fastavro import dump, load, acquaint_schema, reader as avro_reader import sys LOOPS = 2 with open(sys.argv[1]) as reader: records = avro_reader(reader) SCHEMA = records.schema BUFS = [] for record in records: buf = BytesIO() dump(buf, record, SCHEMA) BUFS.append(buf) start = time() n = 0 for _ in repeat(None, LOOPS): for buf in BUFS: n += 1 buf.seek(0) record = load(buf, SCHEMA) print 1000. * (time() - start) / n
# encoding: utf-8 """Fastavro decoding benchmark.""" from io import BytesIO from itertools import repeat from time import time from fastavro import dump, load, acquaint_schema, reader as avro_reader import sys LOOPS = 2 with open(sys.argv[1]) as reader: records = avro_reader(reader) SCHEMA = records.schema BUFS = [] for record in records: buf = BytesIO() dump(buf, record, SCHEMA) BUFS.append(buf) start = time() n = 0 for _ in repeat(None, LOOPS): for buf in BUFS: n += 1 buf.seek(0) record = load(buf, SCHEMA) print 1000. * (time() - start) / n