def test_timestamp(session): view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) view.write(members) assert all(View('member', ['created_at']).read())
def test_update_relation_filter(session): # Add all members to table inject('member', 'acl-write', []) view = View('member', ['registration_code', 'name']) View('member', member_cols).write(members) # Test update with filter on relation inject('member', 'acl-write', ['(= team.name "Blue")']) view = View('member', ['registration_code', 'name']) view.write([ ('001', 'BOB'), ('002', 'ALICE'), ]) res = view.read('(in registration_code "001" "002")').all() assert sorted(res) == [('001', 'BOB'), ('002', 'Alice')] # Nasty test, when we change the value of the column supporting the filter view = View('member', ['registration_code', 'team.name', 'team.country.name']) view.write([ ('001', 'Red', 'Belgium'), # Blue to Red transition ('002', 'Blue', 'Belgium'), # Red to Blue transition ]) res = view.read('(in registration_code "001" "002")').all() assert sorted(res) == [('001', 'Blue', 'Belgium'), ('002', 'Red', 'Belgium')]
def test_filter_args(session): # init members full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Let's insert some names (the index is registration_code) fltr = '(= registration_code {})' member_view = View('member', ['registration_code', 'name']) data = [ ('004', 'Carol'), ('005', 'Dan'), ] member_view.write(data, filters=fltr, args=['004']) expected = [ ( '001', 'Bob', ), ('002', 'Alice'), ('003', 'Trudy'), ('004', 'Carol'), ] res = member_view.read() check(expected, res)
def test_partial_write(session): ''' We want to update only some columns ''' # member table is empty by default full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Collect ids and name name_view = View('member', ['id', 'name']) id2name = dict(name_view.read().all()) partial_view = View('member', ['name', 'registration_code']) partial_view.write([['Bob', '001']]) # Makes sur no other column is set to null res = full_view.read('(= name "Bob")').one() assert all(res) # compare ids for member_id, name in name_view.read(): assert id2name[member_id] == name
def test_mixed(session): view = View('country', ['name']) view.write([('Italy', )]) countries = [c for c, in view.read()] in_q = Queue() out_q = Queue() # Needed because table creation and content is not committed yet ctx.connection.commit() # We re-use the current config to create a nested context with connect(ctx.cfg): t = TankerThread(target=read, args=(in_q, out_q)) t.start() res = [] for _ in countries: in_q.put('tic') res.append(out_q.get()[1]) # Release thread loop & wait for it in_q.put('tic') t.join() assert 'Italy' in res assert res == countries
def test_array(session): data = { 'int': [(1, [1, 2])], 'float': [(1, [1.0, 2.0])], 'bool': [(1, [True, False])], # TODO add timestamp & date } for kind in data: datum = data[kind] view = View('kitchensink', ['index', '%s_array' % kind]) view.write(datum) res = view.read().all() assert res == datum if ctx.flavor == 'sqlite': return # postgres-specific operations flrt = '(= 1 (any int_array))' res = view.read(flrt).all() assert len(res) == 1 flrt = '(!= 3 (all int_array))' res = view.read(flrt).all() assert len(res) == 1 res = View('kitchensink', ['index', '(unnest int_array)']).read().all() assert len(res) == 2
def test_update_filters(session): # init members full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Let's update some names (the index is registration_code) fltr = '(= registration_code "001")' member_view = View('member', ['registration_code', 'name']) data = [ ('001', 'BOB'), ('003', 'TRUDY'), ] cnt = member_view.write(data, filters=fltr) assert cnt['filtered'] == 1 expected = [ ( '001', 'BOB', ), ('002', 'Alice'), ('003', 'Trudy'), ] res = member_view.read() check(expected, res)
def test_lru(session): tanker.LRU_SIZE = 10 tanker.LRU_PAGE_SIZE = 5 factor = 3 nb_record = tanker.LRU_SIZE * factor values = [('c%s' % i, ) for i in range(nb_record)] country_view = View('country', ['name']) team_view = View('team', ['name', 'country.name']) # Fill country table, clean team table country_view.write(values) team_view.delete() # Fill team table to trigger lru on country fk values = [( 't%s' % i, 'c%s' % i, ) for i in range(nb_record)] seed(1) # Reset seed to get determinism shuffle(values) team_view.write(values) teams = team_view.read().all() assert len(teams) == nb_record for team_name, country_name in teams: assert team_name[0] == 't' assert country_name[0] == 'c' assert team_name[1:] == country_name[1:]
def test_purge_filters(session): teams = [ ['Red', 'Belgium'], ] # Blue-Belgium is missing fltr = '(= country.name "Belgium")' # We restrict writes to belgium team_view = View('team', ['name', 'country.name']) team_view.write(teams, purge=True, filters=fltr) expected = [( 'Red', 'Belgium', ), ( 'Blue', 'France', )] res = team_view.read() check(expected, res) # Opposite filter fltr = '(!= country.name "Belgium")' # We don't purge belgium team_view.write(teams, purge=True, filters=fltr) expected = [( 'Red', 'Belgium', )] res = team_view.read() check(expected, res)
def test_function(session): # TODO add support for slite on epoch and floor if ctx.flavor == 'sqlite': return input_record = { 'index': 1, 'floor': 1.1, 'epoch': datetime(1970, 1, 1), 'year': datetime(1970, 1, 1), } output_record = { '(floor floor)': 1, '(extract (epoch) epoch)': 0, '(extract (year) year)': 1970, 'floor': 1.1, '(* floor 2)': 2.2 } ks_view = View('kitchensink') ks_view.write([input_record]) keys, values = zip(*list(output_record.items())) res = View('kitchensink', keys).read().all() assert res[0] == values
def test_bytea(session): payload = b'\x1d\xea\xdb\xee\xff' data = [(1, payload)] view = View('kitchensink', ['index', 'bytea']) view.write(data) res = view.read().all() assert bytes(res[0][1]) == payload
def test_read_write(session): view = View('country', {'Name': 'name'}) db_df = view.read().df() new_df = DataFrame({'Name': ['Italy']}) view.write(new_df) updated_df = view.read().df() expected = db_df.append(new_df).reset_index(drop=True) assert all(expected == updated_df)
def test_no_fields(session): # No fields are provided, should fallback to table definition team_view = View('country') team_view.write([ ('Italy', ), ]) expected = [('Belgium', ), ('Italy', ), ('France', ), ('Holland', )] res = team_view.read() check(expected, res)
def test_delete_data_extra_col(session): full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) assert len(full_view.read().all()) == len(members) full_view.delete(data=members) res = full_view.read().all() assert res == []
def test_str(session): japan = '日本' team_view = View('country', ['name']) team_view.write([(japan, )]) row = team_view.read(filters={'name': japan}).one() assert row[0] == japan fltr = '(= name "%s")' % japan row = team_view.read(fltr).one() assert row[0] == japan
def test_reserved_words(session): record = { 'index': 1, 'true': True, 'false': False, 'null': None, 'integer': 1, 'bigint': 10000000000, 'float': 1.0, 'bool': True, 'timestamp': datetime(1970, 1, 1), 'date': date(1970, 1, 1), 'varchar': 'varchar', 'bytea': b'\x00', 'int_array': [1, 2], 'bool_array': [True, False], 'ts_array': [datetime(1970, 1, 1), datetime(1970, 1, 2)], 'char_array': ['ham', 'spam'], 'jsonb': '{"ham": "spam"}', } # Write actual values ks_view = View('kitchensink', list(record.keys())) ks_view.write([record]) res = list(ks_view.read().dict())[0] for k, v in record.items(): if ctx.flavor == 'sqlite' and k.endswith('array'): # Array support with sqlite is incomplete continue if k == 'bytea': assert bytes(res[k]) == v elif k == 'jsonb': assert res[k] == json.loads(v) else: assert res[k] == v # Filters for k, v in record.items(): if isinstance(v, list): continue cond = '(is %s {})' if k == 'null' else '(= %s {})' res = ks_view.read(cond % k, args=[v]).all() assert len(res) == 1 # Write nulls for k in record: if k == 'index': continue record[k] = None ks_view.write([record]) res = list(ks_view.read().dict())[0] for k, v in record.items(): assert res[k] == v
def test_time_basic(session): view = View('timeseries') for ts_fmt, ts_tz_fmt in zip(ts_formats, ts_tz_formats): view.write([record]) keys = list(record) if ctx.flavor == 'sqlite': # sqlite dbapi does not adapt tz keys.remove('timestamptz') for col in keys: value, = View('timeseries', [col]).read().one() assert value == record[col]
def test_null_key(session, bogus_value, fk_field): ''' Insertion should fail if any value part of the key is null (because null != null in sql). ''' view = View('team', ['name', fk_field]) row = ['Pink', bogus_value] expected = (psycopg2.IntegrityError, sqlite3.IntegrityError, ValueError, TypeError) with pytest.raises(Exception) as exc: view.write([row]) assert isinstance(exc.value, expected)
def test_unicode(session): korea = u'Corée' if PY2: korea = korea.encode('utf-8') team_view = View('country', ['name']) team_view.write([(korea, )]) row = team_view.read(filters={'name': korea}).one() assert row[0] == korea fltr = '(= name "%s")' % korea row = team_view.read(fltr).one() assert row[0] == korea
def test_jsonb(session): data = [(1, {'ham': 'spam'})] view = View('kitchensink', ['index', 'jsonb']) view.write(data) res = view.read().all() assert res[0][1]['ham'] == 'spam' if ctx.flavor == 'sqlite': return # postgres-specific operator flrt = '(= "spam" (->> jsonb "ham"))' res = view.read(flrt).all() assert len(res) == 1 assert res[0][1]['ham'] == 'spam'
def test_time_formats(session): view = View('timeseries') for ts_fmt, ts_tz_fmt in zip(ts_formats, ts_tz_formats): fmt_record = { 'timestamp': record['timestamp'].strftime(ts_fmt), 'timestamptz': record['timestamptz'].strftime(ts_tz_fmt), 'date': record['date'].strftime(date_fmt), } if ctx.flavor == 'sqlite': # strftime doesn't know %s in py2 fmt_record.pop('timestamptz') view.write([fmt_record]) for col in fmt_record.keys(): value, = View('timeseries', [col]).read().one() assert value == record[col]
def test_manual_conn(session): country_view = View('country', ['name']) res = country_view.read({'name': 'Prussia'}).one() assert res is None # Needed to not lock other connections ctx.connection.commit() # Manually start and stop of the connection cfg = {'db_uri': session, 'schema': SCHEMA} connect(cfg, 'enter') country_view.write([['Prussia']]) connect(cfg, 'leave') # Makes sure result is not lost with connect(cfg): assert country_view.read({'name': 'Prussia'}).one()[0] == 'Prussia'
def test_kitchensink(session): df = DataFrame({ 'index': arange(10), 'bigint': arange(10), 'float': arange(10).astype('f8'), 'true': asarray([True] * 10), 'false': asarray([False] * 10), 'varchar': ['spam'] * 10, 'timestamp': asarray(range(10), dtype="M8[s]"), 'date': date_range('1970-01-01', '1970-01-10', freq='D') }) cols = list(df.columns) view = View('kitchensink', cols) view.write(df) read_df = view.read().df() for col in cols: assert all(read_df[col] == df[col])
def test_write(session): team_view = View('team', ['name', 'country.name']) team_view.write([('Orange', 'Holland')]) expected = [( 'Red', 'Belgium', ), ( 'Blue', 'Belgium', ), ( 'Blue', 'France', ), ( 'Orange', 'Holland', )] res = team_view.read() check(expected, res)
def test_write_by_id(session): country_view = View('country', ['id', 'name']) res = country_view.read('(= name "Belgium")').one() record_id = res[0] res = country_view.write([(record_id, 'BELGIUM')]) res = country_view.read('(= name "Belgium")').one() assert res is None res = country_view.read('(= name "BELGIUM")').one() assert res[0] == record_id
def test_no_insert(session): team_view = View('team', ['name', 'country.name']) team_view.write( [ ('Orange', 'Holland'), # This is an insert ('Blue', 'Belgium'), ], insert=False) expected = [( 'Red', 'Belgium', ), ( 'Blue', 'Belgium', ), ( 'Blue', 'France', )] res = team_view.read() check(expected, res)
def test_time_pandas(session): timestamp = date_range('2018-01-01 00:00:00', '2018-01-05 00:00:00', freq='D') timestamptz = date_range('2018-01-01 00:00:00+01', '2018-01-05 00:00:00+01', freq='D') df = DataFrame({ 'timestamp': timestamp, 'timestamptz': timestamptz, }) if ctx.flavor == 'sqlite': df = df[['timestamp']] view = View('timeseries', list(df.columns)) view.write(df) res = view.read().df() for col in df.columns: assert (res[col].values == df[col].values).all()
def test_sneaky_update_filters(session): full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Same but we express the filter on the updated column fltr = '(= name "Bob")' member_view = View('member', ['registration_code', 'name']) data = [ ('001', 'Trudy'), # Try to update 001 from Bob to Trudy ] cnt = member_view.write(data, filters=fltr) assert cnt['filtered'] == 1 expected = [ ('001', 'Bob'), ('002', 'Alice'), ('003', 'Trudy'), ] res = member_view.read() check(expected, res)
def test_nullable_fk(session): ''' If we pass None value in m2o field(s), we should put null in the fk col ''' member_view = View('member', [ 'registration_code', 'team.name', 'team.country.name', ]) res = member_view.write([('test', None, None)]) member_view = View('member', ['team']) res = member_view.read('(= registration_code "test")').one() assert res == (None, )
def test_update_simple_filter(session): # Add all members to table inject('member', 'acl-write', []) view = View('member', ['registration_code', 'name']) View('member', member_cols).write(members) # Test on update inject('member', 'acl-write', ['(= registration_code "001")']) # Test that main table is filtered on insertion view = View('member', ['registration_code', 'name']) cnt = view.write([ ('001', 'BOB'), ('002', 'ALICE'), ]) assert cnt['filtered'] == 1 res = View('member', ['name']).read().all() assert sorted(name for name, in res) == ['Alice', 'BOB', 'Trudy']