def test_delete(engine, cli, datadir, tsh): serie = genserie(datetime(2020, 1, 1), 'D', 3) tsh.update(engine, serie, 'bfoo', 'Babar') tsh.update(engine, serie, 'bbar', 'Babar') tsh.update(engine, serie, 'bquux', 'Babar') r = cli('delete', engine.url, deletefile=datadir / 'delete.csv', namespace=tsh.namespace) tsh = timeseries(tsh.namespace) tsh._testing = True assert not tsh.exists(engine, 'bfoo') assert tsh.get(engine, 'bfoo') is None assert tsh.get(engine, 'bbar') is None assert tsh.get(engine, 'bquux') is not None tsh.update(engine, serie, 'bbq', 'Babar') tsh = timeseries(tsh.namespace) tsh._testing = True r = cli('delete', engine.url, series='bbq', namespace=tsh.namespace) assert not tsh.exists(engine, 'bbq')
def test_multisources(client, engine): series = genserie(utcdt(2020, 1, 1), 'D', 3) tsh = tsio.timeseries('other') tsh.update(engine, series, 'test-other', 'Babar') client.update('test-mainsource', series, 'Babar') with pytest.raises(ValueError) as err: client.update('test-other', series, 'Babar') assert err.value.args[0] == 'not allowed to update to a secondary source' with pytest.raises(ValueError) as err: client.replace('test-other', series, 'Babar') assert err.value.args[0] == 'not allowed to replace to a secondary source' cat = client.catalog() assert cat == { ('db://localhost:5433/postgres', 'other'): [['test-other', 'primary']], ('db://localhost:5433/postgres', 'tsh'): [['test-naive', 'primary'], ['test', 'primary'], ['staircase', 'primary'], ['staircase-naive', 'primary'], ['in-a-formula', 'primary'], ['test-mainsource', 'primary'], ['new-formula', 'formula']] } cat = client.catalog(allsources=False) assert ('db://localhost:5433/postgres', 'tsh') in cat assert ('db://localhost:5433/postgres', 'other') not in cat
def history(db_uri, seriename, from_insertion_date, to_insertion_date, from_value_date, to_value_date, diff, json, namespace='tsh'): """show a serie full history """ engine = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) with engine.begin() as cn: hist = tsh.history(cn, seriename, from_insertion_date, to_insertion_date, from_value_date, to_value_date, diffmode=diff) if json: out = { str(idate): {str(vdate): val for vdate, val in ts.to_dict().items()} for idate, ts in hist.items() } print(dumps(out)) else: for idate in hist: print(hist[idate])
def info(db_uri, namespace='tsh'): """show global statistics of the repository""" engine = create_engine(find_dburi(db_uri)) info = timeseries(namespace).info(engine) info['serie names'] = ', '.join(info['serie names']) print(INFOFMT.format(**info))
def check(db_uri, series=None, namespace='tsh'): "coherence checks of the db" e = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) if series is None: series = tsh.list_series(e) else: series = [series] for idx, s in enumerate(series): t0 = time() with e.begin() as cn: hist = tsh.history(cn, s) start, end = None, None mon = True for ts in hist.values(): cmin = ts.index.min() cmax = ts.index.max() start = min(start or cmin, cmin) end = max(end or cmax, cmax) mon = ts.index.is_monotonic_increasing ival = tsh.interval(e, s) if ival.left != start: print(' start:', s, f'{ival.left} != {start}') if ival.right != end: print(' end:', s, f'{ival.right} != {end}') monmsg = '' if mon else 'non-monotonic' print( idx, s, 'inserts={}, read-time={} {}'.format(len(hist), time() - t0, monmsg))
def tsh(request, engine): namespace = request.param sch = schema.tsschema(namespace) sch.create(engine) if namespace == 'z-z': Snapshot._max_bucket_size = 5 yield tsio.timeseries(namespace)
def delete_series(engine, series, namespace='tsh'): from tshistory.tsio import timeseries tsh = timeseries(namespace=namespace) for name in series: with engine.begin() as cn: if not tsh.exists(cn, name): print('skipping unknown', name) continue print('delete', name) tsh.delete(cn, name)
def get(db_uri, seriename, json, namespace='tsh'): """show a serie in its current state """ engine = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) ts = tsh.get(engine, seriename) if json: print(ts.to_json()) else: with pd.option_context('display.max_rows', None, 'display.max_columns', 3): print(ts)
def rename(db_uri, mapfile, namespace='tsh'): """rename series by providing a map file (csv format) map file header must be `old,new` """ seriesmap = {p.old: p.new for p in pd.read_csv(mapfile).itertuples()} engine = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) for old, new in seriesmap.items(): with engine.begin() as cn: print('rename', old, '->', new) tsh.rename(cn, old, new)
def log(db_uri, limit, series, from_insertion_date=None, to_insertion_date=None, namespace='tsh'): """show revision history of entire repository or series""" engine = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) for rev in tsh.log(engine, series, limit=limit, fromdate=from_insertion_date, todate=to_insertion_date): print(format_rev(rev)) print()
def shell(db_uri, namespace='tsh'): e = create_engine(find_dburi(db_uri)) tsh = timeseries(namespace) import pdb pdb.set_trace()
def ptsh(engine): sch = schema.tsschema() sch.create(engine) return tsio.timeseries()
def test_multisource(client, engine): series = genserie(utcdt(2020, 1, 1), 'D', 3) res = client.patch('/series/state', params={ 'name': 'test-multi', 'series': util.tojson(series), 'author': 'Babar', 'insertion_date': utcdt(2018, 1, 1, 10), 'tzaware': util.tzaware_serie(series) }) assert res.status_code == 201 tsh = tsio.timeseries('other') tsh.update(engine, series, 'test-other-source', 'Babar') out = client.get('/series/state', params={ 'name': 'test-multi', }) assert out.json == { '2020-01-01T00:00:00.000Z': 0.0, '2020-01-02T00:00:00.000Z': 1.0, '2020-01-03T00:00:00.000Z': 2.0 } out = client.get('/series/state', params={ 'name': 'test-other-source', }) assert out.json == { '2020-01-01T00:00:00.000Z': 0.0, '2020-01-02T00:00:00.000Z': 1.0, '2020-01-03T00:00:00.000Z': 2.0 } res = client.patch('/series/state', params={ 'name': 'test-multi', 'series': util.tojson(series), 'author': 'Babar', 'insertion_date': utcdt(2018, 1, 1, 10), 'tzaware': util.tzaware_serie(series) }) assert res.status_code == 200 res = client.patch('/series/state', params={ 'name': 'test-other-source', 'series': util.tojson(series), 'author': 'Babar', 'insertion_date': utcdt(2018, 1, 1, 10), 'tzaware': util.tzaware_serie(series) }) assert res.status_code == 405 assert res.json == { 'message': 'not allowed to update to a secondary source' } res = client.get('/series/metadata?name=test-other-source', params={'all': True}) meta = res.json assert meta == { 'tzaware': True, 'index_type': 'datetime64[ns, UTC]', 'value_type': 'float64', 'index_dtype': '|M8[ns]', 'value_dtype': '<f8' } res = client.put('/series/metadata', params={ 'metadata': json.dumps({'description': 'banana spot price'}), 'name': 'test-other-source' }) assert res.status_code == 405 assert res.json == { 'message': 'not allowed to update metadata to a secondary source' } res = client.delete('/series/state', params={'name': 'test-other-source'}) assert res.status_code == 405 assert res.json == { 'message': 'not allowed to delete to a secondary source' } res = client.delete('/series/state', params={'name': 'test-other-source'}) assert res.status_code == 405 assert res.json == { 'message': 'not allowed to delete to a secondary source' } res = client.put('/series/state', params={ 'name': 'test-other-source', 'newname': 'test2-other-source' }) assert res.status_code == 405 assert res.json == { 'message': 'not allowed to rename to a secondary source' } # catalog res = client.get('/series/catalog') assert res.status_code == 200 assert res.json == { 'db://localhost:5433/postgres!other': [['test-other-source', 'primary']], 'db://localhost:5433/postgres!tsh': [['test-naive', 'primary'], ['test2', 'primary'], ['test3', 'primary'], ['staircase', 'primary'], ['test_fast', 'primary'], ['test-multi', 'primary']] } res = client.get('/series/catalog', params={'allsources': False}) assert res.status_code == 200 assert 'db://localhost:5433/postgres!tsh' in res.json assert 'db://localhost:5433/postgres!other' not in res.json