def test_integration(): tg = TableGroup.from_file(FIXTURES / 'csv.txt-metadata.json') orig = tg.read() db = Database(tg) db.write_from_tg() for table, items in db.read().items(): assert items == orig[table]
def test_extra_columns(tmpdir): tmpdir.join('md.json').write_text("""{ "@context": ["http://www.w3.org/ns/csvw",{"@language": "en"}], "dialect": {"header": true,"encoding": "utf-8-sig"}, "tables": [ {"url": "csv.txt","tableSchema": {"columns": [{"name": "ID", "datatype": "string"}]}} ] } """, encoding='utf8') tmpdir.join('csv.txt').write_text('ID,extra\n1,ex', encoding='utf8') tg = TableGroup.from_file(str(tmpdir.join('md.json'))) with warnings.catch_warnings(): warnings.simplefilter("ignore") db = Database(tg, fname=str(tmpdir.join('test.sqlite'))) with pytest.raises(ValueError): db.write_from_tg() db.write_from_tg(_force=True, _skip_extra=True)
import csvw from csvw.db import Database tg = csvw.TableGroup.from_file( 'tests/testModules/myTestModule1/metadata_inkl_schemas.json') tg.check_referential_integrity() db = Database(tg, fname='test.sqlite') db.write_from_tg() # for row in tg.tables[1].iterdicts(): # print("Row:") # for k, v in row.items(): # print(f" {k}: {v}") # tg.check_referential_integrity()