def test_schema_migration_maps_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "string" }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "long" }, }] } new_file = MemoryIO() records = [{"test": {"foo": "a"}}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) with pytest.raises(fastavro._reader.SchemaResolutionError): list(new_reader)
def test_fastavro_errors_read_enum(): fo = MemoryIO() writer_schema = { "type": "enum", "name": "Suit", "symbols": [ "SPADES", "HEARTS", "DIAMONDS", "CLUBS", ] } reader_schema = { "type": "enum", "name": "Suit", "symbols": [ "SPADES", "HEARTS", "DIAMONDS", ] } given = "CLUBS" write_data(fo, given, writer_schema) fo.seek(0) try: read_data(fo, reader_schema) assert False, 'bad schema did not raise!' except AvroValueError as e: assert '<enum>' in str(e)
def test_schema_migration_array_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "int"] }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "boolean"] }, }] } new_file = MemoryIO() records = [{"test": [1, 2, 3]}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) with pytest.raises(fastavro._reader.SchemaResolutionError): list(new_reader)
def test_schema_migration_maps_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "string" }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "long" }, }] } new_file = MemoryIO() records = [{"test": {"foo": "a"}}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) list(new_reader)
def test_schema_migration_array_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "int"] }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "boolean"] }, }] } new_file = MemoryIO() records = [{"test": [1, 2, 3]}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) list(new_reader)
def test_write_long_union_type(): schema = { 'name': 'test_name', 'namespace': 'test_ns', 'type': 'record', 'fields': [ { 'name': 'time', 'type': ['null', 'long'] }, ], } new_file = MemoryIO() records = [ { u'time': 809066167221092352 }, ] try: fastavro.writer(new_file, schema, records) except ValueError: assert False new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == [{u'time': 809066167221092352}]
def test_schema_migration_array_with_union_promotion(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["boolean", "long"] }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "float"] }, }] } new_file = MemoryIO() records = [{"test": [1, 2, 3]}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records == records
def test_schema_migration_maps_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "string" }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": "long" }, }] } new_file = MemoryIO() records = [{"test": {"foo": "a"}}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) try: list(new_reader) except fastavro._reader.SchemaResolutionError: pass else: assert False
def test_aliases_not_present(): schema = { "type": "record", "name": "test_aliases_not_present", "fields": [{ "name": "test", "type": "double" }] } new_schema = { "type": "record", "name": "test_aliases_not_present_new", "fields": [ { "name": "newtest", "type": "double", "aliases": ["testX"] }, ] } new_file = MemoryIO() records = [{"test": 1.2}] fastavro.writer(new_file, schema, records) new_file.seek(0) reader = fastavro.reader(new_file, new_schema) with pytest.raises(fastavro.read.SchemaResolutionError): list(reader)
def test_schema_migration_schema_mismatch(): schema = { "type": "record", "fields": [{ "name": "test", "type": "string", }] } new_schema = { "type": "enum", "name": "test", "symbols": ["FOO", "BAR"], } new_file = MemoryIO() records = [{"test": "test"}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) try: list(new_reader) except fastavro._reader.SchemaResolutionError: pass else: assert False
def test_schema_migration_reader_union(): schema = { "type": "record", "name": "test_schema_migration_reader_union", "fields": [{ "name": "test", "type": "int" }] } new_schema = { "type": "record", "name": "test_schema_migration_reader_union_new", "fields": [{ "name": "test", "type": ["string", "int"] }] } new_file = MemoryIO() records = [{"test": 1}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records == records
def test_schema_migration_union_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": "boolean" }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": ["string", "int"] }] } new_file = MemoryIO() records = [{"test": True}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) try: list(new_reader) except fastavro._reader.SchemaResolutionError: pass else: assert False
def test_ordered_dict_map(): """ Write an Avro record containing a map field stored in an OrderedDict, then read it back. This tests for a bug where dict was supported but not dict-like types. """ schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": ["string", "int"] }, }] } new_file = MemoryIO() map_ = OrderedDict() map_["foo"] = 1 records = [{"test": map_}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == records
def test_write_read(): new_file = MemoryIO() writer(new_file, parsed_schema, records) new_file.seek(0) new_records = list(reader(new_file, parsed_schema)) assert new_records == records
def roundtrip(record, writer_schema, reader_schema): new_file = MemoryIO() fastavro.writer(new_file, writer_schema, [record]) new_file.seek(0) new_records = list(fastavro.reader(new_file, reader_schema)) return new_records[0]
def test_write_union_shortcut(): schema = { "type": "record", "name": "A", "fields": [{ "name": "a", "type": [{ "type": "record", "name": "B", "fields": [{ "name": "b", "type": "string" }] }, { "type": "record", "name": "C", "fields": [{ "name": "c", "type": "string" }] }] }] } new_file = MemoryIO() records = [{"a": ("B", {"b": "test"})}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == [{"a": {"b": "test"}}]
def test_nullable_values(): schema = { "type": "record", "fields": [{ "name": "nullable_field", "type": ["string", "null"] }, { "name": "field", "type": "string" }] } new_file = MemoryIO() records = [{"field": "val"}, {"field": "val", "nullable_field": "no_null"}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) print(new_records) assert new_records == [{ 'nullable_field': None, 'field': 'val' }, { 'nullable_field': 'no_null', 'field': 'val' }]
def test_schema_migration_union_failure(): schema = { "type": "record", "name": "test_schema_migration_union_failure", "fields": [{ "name": "test", "type": "boolean" }] } new_schema = { "type": "record", "name": "test_schema_migration_union_failure_new", "fields": [{ "name": "test", "type": ["string", "int"] }] } new_file = MemoryIO() records = [{"test": True}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) with pytest.raises(fastavro.read.SchemaResolutionError): list(new_reader)
def check(filename): with open(filename, 'rb') as fo: reader = fastavro.reader(fo) assert hasattr(reader, 'schema'), 'no schema on file' if basename(filename) in NO_DATA: return records = list(reader) assert len(records) > 0, 'no records found' new_file = MemoryIO() fastavro.writer(new_file, reader.schema, records, reader.codec) new_file.seek(0) new_reader = fastavro.reader(new_file) assert hasattr(new_reader, 'schema'), "schema wasn't written" assert new_reader.schema == reader.schema assert new_reader.codec == reader.codec new_records = list(new_reader) assert new_records == records # Test schema migration with the same schema new_file.seek(0) schema_migration_reader = fastavro.reader(new_file, reader.schema) assert schema_migration_reader.reader_schema == reader.schema new_records = list(schema_migration_reader) assert new_records == records
def test_compression_level(): """https://github.com/fastavro/fastavro/issues/377""" schema = { "doc": "A weather reading.", "name": "Weather", "namespace": "test", "type": "record", "fields": [ { "name": "station", "type": "string" }, { "name": "time", "type": "long" }, { "name": "temp", "type": "int" }, ], } records = [ { "station": "011990-99999", "temp": 0, "time": 1433269388 }, { "station": "011990-99999", "temp": 22, "time": 1433270389 }, { "station": "011990-99999", "temp": -11, "time": 1433273379 }, { "station": "012650-99999", "temp": 111, "time": 1433275478 }, ] file = MemoryIO() fastavro.writer(file, schema, records, codec="deflate", codec_compression_level=9) file.seek(0) out_records = list(fastavro.reader(file)) assert records == out_records
def test_schema_migration_maps_with_union_promotion(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": ["string", "int"] }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "map", "values": ["string", "long"] }, }] } new_file = MemoryIO() records = [{"test": {"foo": 1}}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records == records
def test_schema_migration_array_failure(): schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "int"] }, }] } new_schema = { "type": "record", "fields": [{ "name": "test", "type": { "type": "array", "items": ["string", "boolean"] }, }] } new_file = MemoryIO() records = [{"test": [1, 2, 3]}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) try: list(new_reader) except fastavro._reader.SchemaResolutionError: pass else: assert False
def test_ordered_dict_record(): """ Write an Avro record using an OrderedDict and read it back. This tests for a bug where dict was supported but not dict-like types. """ schema = { "type": "record", "name": "Test", "namespace": "test", "fields": [{ "name": "field", "type": { "type": "string" } }] } new_file = MemoryIO() record = OrderedDict() record["field"] = "foobar" records = [record] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == records
def test_aliases_in_reader_schema(): schema = { "type": "record", "name": "test_aliases_in_reader_schema", "fields": [{ "name": "test", "type": "int" }] } new_schema = { "type": "record", "name": "test_aliases_in_reader_schema_new", "fields": [{ "name": "newtest", "type": "int", "aliases": ["test"] }] } new_file = MemoryIO() records = [{"test": 1}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records[0]["newtest"] == records[0]["test"]
def test_schema_is_custom_dict_type(): """https://github.com/tebeka/fastavro/issues/168""" class CustomDict(dict): pass schema = { 'type': 'record', 'fields': [{ 'name': 'description', "type": ["null", { "type": "array", "items": "string" }, "string"], }], "name": "description", "doc": "A description of the thing." } other_type_schema = CustomDict(schema) record = { 'description': 'value', } new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, other_type_schema) assert record == new_record
def test_schemaless_writer_and_reader_with_union(): """Testing basic functionality of reader with union when option to return_record_name is true. """ schema = { "name": "Message", "type": "record", "namespace": "test", "fields": [{ "name": "id", "type": "long" }, { "name": "payload", "type": [ { "name": "ApplicationCreated", "type": "record", "fields": [{ "name": "applicationId", "type": "string" }, { "name": "data", "type": "string" }] }, { "name": "ApplicationSubmitted", "type": "record", "fields": [{ "name": "applicationId", "type": "string" }, { "name": "data", "type": "string" }] }, ] }] } record = input_record = { "id": 123, "payload": ("test.ApplicationSubmitted", { "applicationId": "123456789UT", "data": "..." }) } new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, schema, None, True) assert record == new_record
def roundtrip(schema, records, new_schema): new_file = MemoryIO() fastavro.writer(new_file, schema, records) new_file.seek(0) reader = fastavro.reader(new_file, new_schema) new_records = list(reader) return new_records
def roundtrip(schema, records): new_file = MemoryIO() fastavro.writer(new_file, schema, records) new_file.seek(0) reader = fastavro.reader(new_file) new_records = list(reader) return new_records
def test_repo_caching_issue(): schema = { "type": "record", "name": "B", "fields": [{ "name": "b", "type": { "type": "record", "name": "C", "fields": [{ "name": "c", "type": "string" }] } }] } new_file = MemoryIO() records = [{"b": {"c": "test"}}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == records other_schema = { "name": "A", "type": "record", "fields": [{ "name": "a", "type": { "type": "record", "name": "B", "fields": [{ "name": "b", "type": { "type": "record", "name": "C", "fields": [{ "name": "c", "type": "int" }] } }] } }, { "name": "aa", "type": "B" }] } new_file = MemoryIO() records = [{"a": {"b": {"c": 1}}, "aa": {"b": {"c": 2}}}] fastavro.writer(new_file, other_schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == records
def test_default_values(): schema = {"type": "record", "fields": [{"name": "default_field", "type": "string", "default": "default_value"}]} new_file = MemoryIO() records = [{}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == [{"default_field": "default_value"}]
def test_metadata(): schema = {"type": "record", "fields": []} new_file = MemoryIO() records = [{}] metadata = {"key": "value"} fastavro.writer(new_file, schema, records, metadata=metadata) new_file.seek(0) new_reader = fastavro.reader(new_file) assert new_reader.metadata["key"] == metadata["key"]
def test_metadata(): schema = {"type": "record", "fields": []} new_file = MemoryIO() records = [{}] metadata = {'key': 'value'} fastavro.writer(new_file, schema, records, metadata=metadata) new_file.seek(0) new_reader = fastavro.reader(new_file) assert new_reader.metadata['key'] == metadata['key']
def test_schemaless_write_read(): new_file = MemoryIO() fastavro.schemaless_writer(new_file, parsed_schema, records[0]) new_file.seek(0) # bytes로 변환? new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, parsed_schema) assert records[0] == new_record
def test_builtin_codecs(codec): schema = { "doc": "A weather reading.", "name": "Weather", "namespace": "test", "type": "record", "fields": [ { "name": "station", "type": "string" }, { "name": "time", "type": "long" }, { "name": "temp", "type": "int" }, ], } records = [ { "station": "011990-99999", "temp": 0, "time": 1433269388 }, { "station": "011990-99999", "temp": 22, "time": 1433270389 }, { "station": "011990-99999", "temp": -11, "time": 1433273379 }, { "station": "012650-99999", "temp": 111, "time": 1433275478 }, ] file = MemoryIO() fastavro.writer(file, schema, records, codec=codec) file.seek(0) out_records = list(fastavro.reader(file)) assert records == out_records
def test_schema_migration_add_default_field(): schema = {"type": "record", "fields": []} new_schema = {"type": "record", "fields": [{"name": "test", "type": "string", "default": "default"}]} new_file = MemoryIO() records = [{}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records == [{"test": "default"}]
def test_schema_migration_reader_union(): schema = {"type": "record", "fields": [{"name": "test", "type": "int"}]} new_schema = {"type": "record", "fields": [{"name": "test", "type": ["string", "int"]}]} new_file = MemoryIO() records = [{"test": 1}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) new_records = list(new_reader) assert new_records == records
def roundtrip(schema, records, pass_schema_to_reader=False): new_file = MemoryIO() fastavro.writer(new_file, schema, records) new_file.seek(0) if pass_schema_to_reader: reader = fastavro.reader(new_file, schema) else: reader = fastavro.reader(new_file) new_records = list(reader) return new_records
def test_schemaless_writer_and_reader(): schema = { "type": "record", "name": "Test", "namespace": "test", "fields": [{"name": "field", "type": {"type": "string"}}], } record = {"field": "test"} new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, schema) assert record == new_record
def make_blocks(num_records=2000, codec='null'): records = make_records(num_records) new_file = MemoryIO() fastavro.writer(new_file, schema, records, codec=codec) new_file.seek(0) block_reader = fastavro.block_reader(new_file, schema) blocks = list(block_reader) new_file.close() return blocks, records
def check_concatenate(source_codec='null', output_codec='null'): blocks1, records1 = make_blocks(codec=source_codec) blocks2, records2 = make_blocks(codec=source_codec) new_file = MemoryIO() w = fastavro.write.Writer(new_file, schema, codec=output_codec) for block in blocks1: w.write_block(block) for block in blocks2: w.write_block(block) # Read the file back to make sure we get back the same stuff new_file.seek(0) new_records = list(fastavro.reader(new_file, schema)) assert new_records == records1 + records2
def test_boolean_roundtrip(): schema = {"type": "record", "fields": [{"name": "field", "type": "boolean"}]} record = {"field": True} new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, schema) assert record == new_record record = {"field": False} new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, schema) assert record == new_record
def test_reader_schema_attributes_throws_deprecation(): """https://github.com/fastavro/fastavro/issues/246""" schema = { "type": "record", "name": "test_reader_schema_attributes_throws_deprecation", "fields": [] } stream = MemoryIO() fastavro.writer(stream, schema, [{}]) stream.seek(0) reader = fastavro.reader(stream) with pytest.warns(DeprecationWarning): reader.schema
def test_schemaless_writer_and_reader(): schema = { "type": "record", "name": "Test", "namespace": "test", "fields": [{ "name": "field", "type": {"type": "string"} }] } record = {"field": "test"} new_file = MemoryIO() fastavro.schemaless_writer(new_file, schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader(new_file, schema) assert record == new_record
def test_schema_migration_enum_failure(): schema = {"type": "enum", "name": "test", "symbols": ["FOO", "BAR"]} new_schema = {"type": "enum", "name": "test", "symbols": ["BAZ", "BAR"]} new_file = MemoryIO() records = ["FOO"] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) try: list(new_reader) except fastavro._reader.SchemaResolutionError: pass else: assert False
def test_default_values(): schema = { "type": "record", "fields": [{ "name": "default_field", "type": "string", "default": "default_value" }] } new_file = MemoryIO() records = [{}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file) new_records = list(new_reader) assert new_records == [{"default_field": "default_value"}]
def test_fastavro_errors_read_record(): fo = MemoryIO() writer_schema = { "type": "record", "name": "extension_test", "doc": "Complex schema with avro extensions", "fields": [ {"name": "x", "type": { "type": "record", "name": "inner", "fields": [ {"name": "y", "type": "int"} ] }} ] } reader_schema = { "type": "record", "name": "extension_test", "doc": "Complex schema with avro extensions", "fields": [ {"name": "x", "type": { "type": "record", "name": "inner", "fields": [ {"name": "y", "type": "float"} ] }} ] } given = {"x": {"y": 0}} write_data(fo, given, writer_schema) fo.seek(0) try: read_data(fo, reader_schema) assert False, 'bad schema did not raise!' except AvroValueError as e: assert '<record>.x.<record>.y' in str(e)
def test_schema_migration_enum_failure(): schema = { "type": "enum", "name": "test", "symbols": ["FOO", "BAR"], } new_schema = { "type": "enum", "name": "test", "symbols": ["BAZ", "BAR"], } new_file = MemoryIO() records = ["FOO"] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) list(new_reader)
def test_fastavro_complex_nested(): fo = MemoryIO() with open(join(data_dir, 'complex-nested.avsc')) as f: schema = json.load(f) records = [{ "test_boolean": True, "test_int": 10, "test_long": 20, "test_float": 2.0, "test_double": 2.0, "test_bytes": b'asdf', "test_string": 'qwerty', "second_level": { "test_int2": 100, "test_string2": "asdf", "default_level": { "test_int_def": 1, "test_string_def": "nope", } }, "fixed_int8": 1, "fixed_int16": 2, "fixed_int32": 3, "fixed_int64": 4, "fixed_uint8": 1, "fixed_uint16": 2, "fixed_uint32": 3, "fixed_uint64": 4, "fixed_int8_2": 12, }] fastavro.writer(fo, schema, records, enable_extensions=True) fo.seek(0) new_reader = fastavro.reader(fo, enable_extensions=True) assert new_reader.schema == schema new_records = list(new_reader) assert new_records == records
def test_schema_migration_schema_mismatch(): schema = { "type": "record", "fields": [{ "name": "test", "type": "string", }] } new_schema = { "type": "enum", "name": "test", "symbols": ["FOO", "BAR"], } new_file = MemoryIO() records = [{"test": "test"}] fastavro.writer(new_file, schema, records) new_file.seek(0) new_reader = fastavro.reader(new_file, new_schema) list(new_reader)
def test_default_values_in_reader(): writer_schema = { 'name': 'name1', 'type': 'record', 'namespace': 'namespace1', 'fields': [{ 'doc': 'test', 'type': 'int', 'name': 'good_field' }], 'doc': 'test' } reader_schema = { 'name': 'name1', 'doc': 'test', 'namespace': 'namespace1', 'fields': [{ 'name': 'good_field', 'doc': 'test', 'type': 'int' }, { 'name': 'good_compatible_field', 'doc': 'test', 'default': 1, 'type': 'int' }], 'type': 'record' } record = {'good_field': 1} new_file = MemoryIO() fastavro.schemaless_writer(new_file, writer_schema, record) new_file.seek(0) new_record = fastavro.schemaless_reader( new_file, writer_schema, reader_schema, ) assert new_record == {'good_field': 1, 'good_compatible_field': 1}
def test_fastavro_errors_read_map(): fo = MemoryIO() writer_schema = { "type": "map", "values": "float" } reader_schema = { "type": "map", "values": "double" } given = {"x": 0} write_data(fo, given, writer_schema) fo.seek(0) try: read_data(fo, reader_schema) assert False, 'bad schema did not raise!' except AvroValueError as e: assert '<map>.x.<double>' in str(e)
def test_fastavro_errors_read_array(): fo = MemoryIO() writer_schema = { "type": "array", "items": "int", } reader_schema = { "type": "array", "items": "float", } given = [10, 20, 30] write_data(fo, given, writer_schema) fo.seek(0) try: read_data(fo, reader_schema) assert False, 'bad schema did not raise!' except AvroValueError as e: # .[1] because the first element is read succesfully # (but would be corrupt) assert '<array>.[1].<float>' in str(e)
def test_fastavro_extensions(): fo = MemoryIO() schema = { "type": "record", "name": "extension_test", "doc": "Complex schema with avro extensions", "fields": [ {"name": "fixed_int8", "type": {"type": "fixed", "name": "int8_t", "size": 1}}, {"name": "fixed_int16", "type": {"type": "fixed", "name": "int16_t", "size": 2}}, {"name": "fixed_int32", "type": {"type": "fixed", "name": "int32_t", "size": 4}}, {"name": "fixed_int64", "type": {"type": "fixed", "name": "int64_t", "size": 8}}, {"name": "fixed_uint8", "type": {"type": "fixed", "name": "uint8_t", "size": 1}}, {"name": "fixed_uint16", "type": {"type": "fixed", "name": "uint16_t", "size": 2}}, {"name": "fixed_uint32", "type": {"type": "fixed", "name": "uint32_t", "size": 4}}, {"name": "fixed_uint64", "type": {"type": "fixed", "name": "uint64_t", "size": 8}}, {"name": "fixed_uint64_2", "type": "uint64_t"}, ] } records = [ { "fixed_int8": 127, "fixed_int16": -32768, "fixed_int32": 2147483647, "fixed_int64": 9223372036854775807, "fixed_uint8": 2**8 - 1, "fixed_uint16": 2**16 - 1, "fixed_uint32": 2**32 - 1, "fixed_uint64": 2**64 - 1, "fixed_uint64_2": 0, }, { "fixed_int8": 1, "fixed_int16": -2, "fixed_int32": 3, "fixed_int64": -4, "fixed_uint8": 10, "fixed_uint16": 20, "fixed_uint32": 30, "fixed_uint64": 40, "fixed_uint64_2": 1000, } ] fastavro.writer(fo, schema, records, enable_extensions=True) fo.seek(0) new_reader = fastavro.reader(fo, enable_extensions=True) assert new_reader.schema == schema new_records = list(new_reader) assert new_records == records