def test_spss_storage_constraints(tmpdir): # Export/Import source = Package("data/storage/constraints.json") storage = source.to_spss(basepath=tmpdir, force=True) target = Package.from_spss(basepath=tmpdir) # Assert metadata assert target.get_resource("constraints").schema == { "fields": [ { "name": "required", "type": "string" }, # constraint removal { "name": "minLength", "type": "string" }, # constraint removal { "name": "maxLength", "type": "string" }, # constraint removal { "name": "pattern", "type": "string" }, # constraint removal { "name": "enum", "type": "string" }, # constraint removal { "name": "minimum", "type": "integer" }, # constraint removal { "name": "maximum", "type": "integer" }, # constraint removal ], } # Assert data assert target.get_resource("constraints").read_rows() == [ { "required": "passing", "minLength": "passing", "maxLength": "passing", "pattern": "passing", "enum": "passing", "minimum": 5, "maximum": 5, }, ] # Cleanup storage storage.delete_package(target.resource_names)
def test_spss_storage_types(tmpdir): # Export/Import source = Package("data/storage/types.json") storage = source.to_spss(basepath=tmpdir, force=True) target = Package.from_spss(basepath=tmpdir) # Assert metadata assert target.get_resource("types").schema == { "fields": [ { "name": "any", "type": "string" }, # type fallback { "name": "array", "type": "string" }, # type fallback { "name": "boolean", "type": "string" }, # type fallback { "name": "date", "type": "date" }, { "name": "date_year", "type": "date" }, # format removal { "name": "datetime", "type": "datetime" }, { "name": "duration", "type": "string" }, # type fallback { "name": "geojson", "type": "string" }, # type fallback { "name": "geopoint", "type": "string" }, # type fallback { "name": "integer", "type": "integer" }, { "name": "number", "type": "number" }, { "name": "object", "type": "string" }, # type fallback { "name": "string", "type": "string" }, { "name": "time", "type": "time" }, { "name": "year", "type": "integer" }, # type downgrade { "name": "yearmonth", "type": "string" }, # type fallback ], } # Assert data assert target.get_resource("types").read_rows() == [ { "any": "中国人", "array": '["Mike", "John"]', "boolean": "true", "date": datetime.date(2015, 1, 1), "date_year": datetime.date(2015, 1, 1), "datetime": datetime.datetime(2015, 1, 1, 3, 0), "duration": "P1Y1M", "geojson": '{"type": "Point", "coordinates": [33, 33.33]}', "geopoint": "30,70", "integer": 1, "number": 7.0, "object": '{"chars": 560}', "string": "english", "time": datetime.time(3, 0), "year": 2015, "yearmonth": "2015-01", }, ] # Cleanup storage storage.delete_package(target.resource_names)
def test_spss_storage_integrity(tmpdir): # Export/Import source = Package("data/storage/integrity.json") storage = source.to_spss(basepath=tmpdir, force=True) target = Package.from_spss(basepath=tmpdir) # Assert metadata (main) assert target.get_resource("integrity_main").schema == { "fields": [ # added required { "name": "id", "type": "integer" }, { "name": "parent", "type": "integer" }, { "name": "description", "type": "string" }, ], # primary key removal # foreign keys removal } # Assert metadata (link) assert target.get_resource("integrity_link").schema == { "fields": [ { "name": "main_id", "type": "integer" }, { "name": "some_id", "type": "integer" }, # constraint removal { "name": "description", "type": "string" }, # constraint removal ], # primary key removal # foreign keys removal } # Assert data (main) assert target.get_resource("integrity_main").read_rows() == [ { "id": 1, "parent": None, "description": "english" }, { "id": 2, "parent": 1, "description": "中国人" }, ] # Assert data (link) assert target.get_resource("integrity_link").read_rows() == [ { "main_id": 1, "some_id": 1, "description": "note1" }, { "main_id": 2, "some_id": 2, "description": "note2" }, ] # Cleanup storage storage.delete_package(target.resource_names)