def test_timestamp(session): view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) view.write(members) assert all(View('member', ['created_at']).read())
def test_one(session): expected = ('Belgium', ) assert expected == View('country', ['name']).read().one() expected = None fltr = '(= name "Prussia")' assert expected == View('country', ['name']).read(fltr).one()
def test_function(session): # TODO add support for slite on epoch and floor if ctx.flavor == 'sqlite': return input_record = { 'index': 1, 'floor': 1.1, 'epoch': datetime(1970, 1, 1), 'year': datetime(1970, 1, 1), } output_record = { '(floor floor)': 1, '(extract (epoch) epoch)': 0, '(extract (year) year)': 1970, 'floor': 1.1, '(* floor 2)': 2.2 } ks_view = View('kitchensink') ks_view.write([input_record]) keys, values = zip(*list(output_record.items())) res = View('kitchensink', keys).read().all() assert res[0] == values
def test_mixed(session): view = View('country', ['name']) view.write([('Italy', )]) countries = [c for c, in view.read()] in_q = Queue() out_q = Queue() # Needed because table creation and content is not committed yet ctx.connection.commit() # We re-use the current config to create a nested context with connect(ctx.cfg): t = TankerThread(target=read, args=(in_q, out_q)) t.start() res = [] for _ in countries: in_q.put('tic') res.append(out_q.get()[1]) # Release thread loop & wait for it in_q.put('tic') t.join() assert 'Italy' in res assert res == countries
def test_bytea(session): payload = b'\x1d\xea\xdb\xee\xff' data = [(1, payload)] view = View('kitchensink', ['index', 'bytea']) view.write(data) res = view.read().all() assert bytes(res[0][1]) == payload
def test_env(session): view = View('member', { 'created_date': '(cast created_at (date))', }) exp = Expression(view.table, base_env=view.base_env()) expected = 'CAST ("member"."created_at" AS date)' assert exp.parse('(cast created_at (date))').eval() == expected assert exp.parse('created_date').eval() == expected
def test_field_eval(session): view = View('country', ['(= name "Belgium")']) res = view.read(order='name').all() assert res == [ (True, ), (False, ), (False, ), ]
def test_chain(session): expected = ['Belgium', 'France', 'Holland'] res = sorted(View('country', ['name']).read().chain()) assert expected == res expected = ['Blue', 'Belgium', 'Blue', 'France', 'Red', 'Belgium'] view = View('team', ['name', 'country.name']) res = view.read(order=['name', 'country.name']).chain() assert expected == list(res)
def test_delete_filter_dict(session): # Not sure why sqlite fail on this one if ctx.flavor == 'sqlite': return view = View('country', ['name']) view.delete(filters={'name': 'France'}) res = view.read().all() assert res == [('Belgium', ), ('Holland', )]
def test_no_fields(session): # No fields are provided, should fallback to table definition team_view = View('country') team_view.write([ ('Italy', ), ]) expected = [('Belgium', ), ('Italy', ), ('France', ), ('Holland', )] res = team_view.read() check(expected, res)
def read(in_q, out_q): t_id = current_thread().ident countries = View('country').read() while True: in_q.get() in_q.task_done() c = countries.one() if c is None: break out_q.put((t_id, c[0]))
def test_filter_args(session): # init members full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Let's insert some names (the index is registration_code) fltr = '(= registration_code {})' member_view = View('member', ['registration_code', 'name']) data = [ ('004', 'Carol'), ('005', 'Dan'), ] member_view.write(data, filters=fltr, args=['004']) expected = [ ( '001', 'Bob', ), ('002', 'Alice'), ('003', 'Trudy'), ('004', 'Carol'), ] res = member_view.read() check(expected, res)
def test_read(session): inject('country', 'acl-read', ['(= name "Belgium")']) # Test that main table is filtered res = View('country', ['name']).read().all() assert res == [('Belgium', )] # Test with a relation inject('team', 'acl-read', ['(= country.name "Belgium")']) res = View('team', ['name']).read().all() assert res == [('Blue', ), ('Red', )]
def test_time_basic(session): view = View('timeseries') for ts_fmt, ts_tz_fmt in zip(ts_formats, ts_tz_formats): view.write([record]) keys = list(record) if ctx.flavor == 'sqlite': # sqlite dbapi does not adapt tz keys.remove('timestamptz') for col in keys: value, = View('timeseries', [col]).read().one() assert value == record[col]
def test_null_key(session, bogus_value, fk_field): ''' Insertion should fail if any value part of the key is null (because null != null in sql). ''' view = View('team', ['name', fk_field]) row = ['Pink', bogus_value] expected = (psycopg2.IntegrityError, sqlite3.IntegrityError, ValueError, TypeError) with pytest.raises(Exception) as exc: view.write([row]) assert isinstance(exc.value, expected)
def test_like_ilike(session): view = View('country', ['name']) fltr = '(like name "%e%")' res = view.read(fltr).all() assert res == [('Belgium', ), ('France', )] fltr = '(ilike name "H%")' res = view.read(fltr).all() assert res == [('Holland', )] fltr = '(ilike name {prefix})' res = view.read(fltr, args={'prefix': 'H%'}).all() assert res == [('Holland', )]
def test_array(session): data = { 'int': [(1, [1, 2])], 'float': [(1, [1.0, 2.0])], 'bool': [(1, [True, False])], # TODO add timestamp & date } for kind in data: datum = data[kind] view = View('kitchensink', ['index', '%s_array' % kind]) view.write(datum) res = view.read().all() assert res == datum if ctx.flavor == 'sqlite': return # postgres-specific operations flrt = '(= 1 (any int_array))' res = view.read(flrt).all() assert len(res) == 1 flrt = '(!= 3 (all int_array))' res = view.read(flrt).all() assert len(res) == 1 res = View('kitchensink', ['index', '(unnest int_array)']).read().all() assert len(res) == 2
def test_update_filters(session): # init members full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) # Let's update some names (the index is registration_code) fltr = '(= registration_code "001")' member_view = View('member', ['registration_code', 'name']) data = [ ('001', 'BOB'), ('003', 'TRUDY'), ] cnt = member_view.write(data, filters=fltr) assert cnt['filtered'] == 1 expected = [ ( '001', 'BOB', ), ('002', 'Alice'), ('003', 'Trudy'), ] res = member_view.read() check(expected, res)
def test_purge_filters(session): teams = [ ['Red', 'Belgium'], ] # Blue-Belgium is missing fltr = '(= country.name "Belgium")' # We restrict writes to belgium team_view = View('team', ['name', 'country.name']) team_view.write(teams, purge=True, filters=fltr) expected = [( 'Red', 'Belgium', ), ( 'Blue', 'France', )] res = team_view.read() check(expected, res) # Opposite filter fltr = '(!= country.name "Belgium")' # We don't purge belgium team_view.write(teams, purge=True, filters=fltr) expected = [( 'Red', 'Belgium', )] res = team_view.read() check(expected, res)
def test_nested_read(session): # Needed because table creation and content is not committed yet ctx.connection.commit() # Start read from parent ctx cursor = View('country').read() first = cursor.one() # We re-use the current config to create a nested context with connect(ctx.cfg): nested_res = View('country').read().all() res = [first] + list(cursor) assert res == nested_res
def test_reserved_words(session): record = { 'index': 1, 'true': True, 'false': False, 'null': None, 'integer': 1, 'bigint': 10000000000, 'float': 1.0, 'bool': True, 'timestamp': datetime(1970, 1, 1), 'date': date(1970, 1, 1), 'varchar': 'varchar', 'bytea': b'\x00', 'int_array': [1, 2], 'bool_array': [True, False], 'ts_array': [datetime(1970, 1, 1), datetime(1970, 1, 2)], 'char_array': ['ham', 'spam'], 'jsonb': '{"ham": "spam"}', } # Write actual values ks_view = View('kitchensink', list(record.keys())) ks_view.write([record]) res = list(ks_view.read().dict())[0] for k, v in record.items(): if ctx.flavor == 'sqlite' and k.endswith('array'): # Array support with sqlite is incomplete continue if k == 'bytea': assert bytes(res[k]) == v elif k == 'jsonb': assert res[k] == json.loads(v) else: assert res[k] == v # Filters for k, v in record.items(): if isinstance(v, list): continue cond = '(is %s {})' if k == 'null' else '(= %s {})' res = ks_view.read(cond % k, args=[v]).all() assert len(res) == 1 # Write nulls for k in record: if k == 'index': continue record[k] = None ks_view.write([record]) res = list(ks_view.read().dict())[0] for k, v in record.items(): assert res[k] == v
def test_time_formats(session): view = View('timeseries') for ts_fmt, ts_tz_fmt in zip(ts_formats, ts_tz_formats): fmt_record = { 'timestamp': record['timestamp'].strftime(ts_fmt), 'timestamptz': record['timestamptz'].strftime(ts_tz_fmt), 'date': record['date'].strftime(date_fmt), } if ctx.flavor == 'sqlite': # strftime doesn't know %s in py2 fmt_record.pop('timestamptz') view.write([fmt_record]) for col in fmt_record.keys(): value, = View('timeseries', [col]).read().one() assert value == record[col]
def test_read_thread(session): countries = View('country').read().all() nb_cty = len(countries) assert nb_cty > 2 read_threads = [] out_q = Queue() in_queues = [] for i in range(NB_THREADS): in_q = Queue(maxsize=1) in_queues.append(in_q) t = TankerThread(target=read, args=(in_q, out_q)) t.start() read_threads.append(t) # Launch metronome to feed input lists metro_thread = Thread(target=metronome, args=(in_queues, nb_cty)) metro_thread.start() # Loop on results is_full = lambda x: len(x) == nb_cty per_thread = defaultdict(list) while True: t_id, c = out_q.get() out_q.task_done() per_thread[t_id].append(c) if all(map(is_full, per_thread.values())): break # Join everything metro_thread.join() for t in read_threads: t.join()
def session(request): cfg = { 'db_uri': request.param['uri'], 'schema': SCHEMA, } is_sqlite = request.param['uri'].startswith('sqlite') use_schema = '#' in request.param['uri'] # DB cleanup if is_sqlite and os.path.isfile('test.db'): os.unlink('test.db') else: with connect(cfg): to_clean = [t['table'] for t in SCHEMA] + ['tmp', 'sponsor'] for table in to_clean: if use_schema: table = 'test_schema.' + table qr = 'DROP TABLE IF EXISTS %s' % table if not is_sqlite: qr += ' CASCADE' execute(qr) # Create tables with connect(cfg): create_tables() if request.param['auto']: cfg.pop('schema') with connect(cfg, _auto_rollback=True): View('team', ['name', 'country.name']).write(teams) yield request.param['uri']
def test_delete_filter(session): # Not sure why sqlite fail on this one if ctx.flavor == 'sqlite': return # Use a list of filters view = View('country', ['name']) view.delete(['(> id 0 )', '(< id 0)']) res = view.read(order='name').all() assert res == [('Belgium', ), ('France', ), ('Holland', )] # Filter with args view = View('country', ['name']) view.delete('(in name {names})', args={'names': ['France', 'Holland']}) res = view.read().all() assert res == [('Belgium', )]
def test_simple_purge(session): team_view = View('team', ['name', 'country.name']) cnt = team_view.write( [ ('Orange', 'Holland'), # this is an insert ('Blue', 'France'), # belgium is missing ], purge=True, insert=False, update=False) assert cnt['deleted'] == 2 expected = [( 'Blue', 'France', )] res = team_view.read() check(expected, res)
def test_kitchensink(session): df = DataFrame({ 'index': arange(10), 'bigint': arange(10), 'float': arange(10).astype('f8'), 'true': asarray([True] * 10), 'false': asarray([False] * 10), 'varchar': ['spam'] * 10, 'timestamp': asarray(range(10), dtype="M8[s]"), 'date': date_range('1970-01-01', '1970-01-10', freq='D') }) cols = list(df.columns) view = View('kitchensink', cols) view.write(df) read_df = view.read().df() for col in cols: assert all(read_df[col] == df[col])
def test_delete_data_extra_col(session): full_view = View( 'member', ['name', 'team.country.name', 'team.name', 'registration_code']) full_view.write(members) assert len(full_view.read().all()) == len(members) full_view.delete(data=members) res = full_view.read().all() assert res == []
def test_filter_purge(session): team_view = View('team', ['name', 'country.name']) fltr = "(= country.name 'Belgium')" # Restrict purge to belgium cnt = team_view.write( [ ('Red', 'Belgium'), # ('Blue', 'Belgium') is removed ('Blue', 'France'), # already in db ('Purple', 'France'), # new row (but must be ignored) ], purge=True, filters=fltr) assert cnt['deleted'] == 1 expected = [ ('Red', 'Belgium'), ('Blue', 'France'), ] res = team_view.read() check(expected, res)
def test_write(session): team_view = View('team', ['name', 'country.name']) team_view.write([('Orange', 'Holland')]) expected = [( 'Red', 'Belgium', ), ( 'Blue', 'Belgium', ), ( 'Blue', 'France', ), ( 'Orange', 'Holland', )] res = team_view.read() check(expected, res)