Exemple #1
0
def test_db_write_extra_tables(tmpdir):
    md = pathlib.Path(str(tmpdir)) / 'metadata.json'
    ds = Generic.in_dir(md.parent)
    ds.add_table('extra.csv', 'ID', 'Name')
    ds.write(md, **{'extra.csv': [dict(ID=1, Name='Name')]})

    db = Database(ds, fname=md.parent / 'db.sqlite')
    db.write_from_tg()
    assert len(db.query("""select * from "extra.csv" """)) == 1
Exemple #2
0
def createdb(args):
    """
    cldf createdb <DATASET> <SQLITE_DB_PATH>

    Load CLDF dataset <DATASET> into a SQLite DB, where <DATASET> may be the path to
    - a CLDF metadata file
    - a CLDF core data file
    """
    if len(args.args) < 2:
        raise ParserError('not enough arguments')
    ds = _get_dataset(args)
    db = Database(ds, fname=args.args[1])
    db.write_from_tg()
    args.log.info('{0} loaded in {1}'.format(ds, db.fname))
Exemple #3
0
def test_db_write_extra_columns(tmpdir):
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        md = pathlib.Path(str(tmpdir)) / 'metadata.json'
        ds = Generic.in_dir(md.parent)
        t = ds.add_table('extra.csv', 'ID', 'Name')
        ds.write(md, **{'extra.csv': [dict(ID=1, Name='Name')]})
        t.tableSchema.columns = [
            c for c in t.tableSchema.columns if c.name != 'Name'
        ]
        ds.write_metadata(md)

        db = Database(ds, fname=md.parent / 'db.sqlite')
        assert len(db.dataset['extra.csv'].tableSchema.columns) == 1
        db.write_from_tg()
        assert len(db.query("""select * from "extra.csv" """)[0]) == 1
Exemple #4
0
def test_db_write(tmpdir, data):
    #import shutil
    ds = Dataset.from_metadata(data / 'ds1.csv-metadata.json')
    db = Database(ds, fname=str(tmpdir.join('db.sqlite')))
    db.write_from_tg()
    #shutil.copy(str(tmpdir.join('db.sqlite')), 'db.sqlite')
    assert len(db.query("select * from ValueTable where cldf_parameterReference = 'fid1'")) == 1
    assert len(db.query('select * from SourceTable')) == 2
    assert len(db.query("select * from ValueTable_SourceTable where context = '2-5'")) == 1

    assert db.read()['ValueTable'][0]['cldf_source'] == ['80086', 'meier2015[2-5]']
    db.to_cldf(str(tmpdir.join('cldf')))
    assert tmpdir.join('cldf', 'ds1.bib').check()
    assert '80086;meier2015[2-5]' in tmpdir.join('cldf', 'ds1.csv').read_text('utf8')
Exemple #5
0
def createdb(args):
    """
    cldf createdb <DATASET> <SQLITE_DB_PATH>

    Load CLDF dataset <DATASET> into a SQLite DB, where <DATASET> may be the path to
    - a CLDF metadata file
    - a CLDF core data file
    """
    if len(args.args) < 2:
        raise ParserError('not enough arguments')
    db = Database(args.args[1])
    db.create()
    ds = _get_dataset(args)
    db.load(ds)
    args.log.info('{0} loaded in {1}'.format(ds, db.fname))
Exemple #6
0
def db(tmpdir):
    from pycldf.db import Database

    return Database(str(tmpdir / 'db.sqlite'))
Exemple #7
0
def test_db_write_tables_with_fks(tmpdir, mocker):
    md = pathlib.Path(str(tmpdir)) / 'metadata.json'

    ds = Generic.in_dir(md.parent)
    t1 = ds.add_table('t1.csv', 'ID', 'Name')
    t2 = ds.add_table('t2.csv', 'ID', {'name': 'T1_ID', 'separator': ' '})
    t2.add_foreign_key('T1_ID', 't1.csv', 'ID')
    ds.write(
        md, **{
            't1.csv': [dict(ID='1', Name='Name')],
            't2.csv': [dict(ID='1', T1_ID=['1'])],
        })
    with pytest.raises(AssertionError):
        _ = Database(ds, fname=md.parent / 'db.sqlite')

    # Primary keys must be inferred ...
    db = Database(ds, fname=md.parent / 'db.sqlite', infer_primary_keys=True)
    db.write_from_tg()

    # ... or declared explicitly:
    t2.tableSchema.primaryKey = ['ID']
    db = Database(ds, fname=md.parent / 'db.sqlite')
    with pytest.raises(sqlite3.OperationalError):
        db.write_from_tg(_force=True)

    t1.tableSchema.primaryKey = ['ID']
    db = Database(ds, fname=md.parent / 'db.sqlite')
    db.write_from_tg(_force=True)

    ds = Generic.in_dir(md.parent)
    ds.add_table('t1.csv', 'ID', 'Name', primaryKey='ID')
    table = ds.add_table('t2.csv',
                         'ID', {
                             'name': 'T1_ID',
                             'separator': ' '
                         },
                         primaryKey='ID')
    table.add_foreign_key('T1_ID', 't1.csv', 'ID')
    ds.write(
        md, **{
            't1.csv': [dict(ID='1', Name='Name')],
            't2.csv': [dict(ID=1, T1_ID=['1'])],
        })
    db = Database(ds, fname=md.parent / 'db.sqlite')
    db.write_from_tg(_force=True)
Exemple #8
0
def test_db_write(tmpdir, data):
    ds = Dataset.from_metadata(data / 'ds1.csv-metadata.json')
    db = Database(ds, fname=str(tmpdir.join('db.sqlite')))
    db.write_from_tg()
    #shutil.copy(str(tmpdir.join('db.sqlite')), 'db.sqlite')
    assert len(
        db.query(
            "select * from ValueTable where cldf_parameterReference = 'fid1'")
    ) == 1
    assert len(db.query('select * from SourceTable')) == 3
    assert len(
        db.query(
            "select valuetable_cldf_id from ValueTable_SourceTable where context = '2-5'"
        )) == 1

    assert db.read()['ValueTable'][0]['cldf_source'] == [
        '80086', 'meier2015[2-5]'
    ]
    db.to_cldf(str(tmpdir.join('cldf')))
    assert tmpdir.join('cldf', 'ds1.bib').check()
    assert '80086;meier2015[2-5]' in tmpdir.join('cldf',
                                                 'ds1.csv').read_text('utf8')

    with pytest.raises(ValueError):
        db.write_from_tg()

    with pytest.raises(NotImplementedError):
        db.write_from_tg(_exists_ok=True)

    db.write_from_tg(_force=True)
Exemple #9
0
def test_db_geocoords():
    item = dict(cldf_latitude=decimal.Decimal(3.123456))
    assert pytest.approx(
        Database.round_geocoordinates(item)['cldf_latitude'],
        decimal.Decimal(3.1235))