def test_unique_ids(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ path = str(tmpdir.join('test.db.json')) # Verify ids are unique when reopening the DB and inserting with Flata(path) as _db: _db.table('t').insert({'x': 1}) with Flata(path) as _db: _db.table('t').insert({'x': 1}) with Flata(path) as _db: data = _db.table('t').all() assert data[0].id != data[1].id # Verify ids stay unique when inserting/removing with Flata(path) as _db: _db.purge_tables() _db.table('t').insert_multiple({'x': i} for i in range(5)) _db.table('t').remove(where('x') == 2) assert len(_db.table('t').all()) == 4 ids = [e.id for e in _db.table('t').all()] assert len(ids) == len(set(ids))
def test_unicode_json(tmpdir): unic_str1 = 'a'.decode('utf-8') byte_str1 = 'a' unic_str2 = 'ß'.decode('utf-8') byte_str2 = 'ß' path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: _db.purge_tables() _db.table('t').insert({'value': byte_str1}) _db.table('t').insert({'value': byte_str2}) assert _db.table('t').contains(where('value') == byte_str1) assert _db.table('t').contains(where('value') == unic_str1) assert _db.table('t').contains(where('value') == byte_str2) assert _db.table('t').contains(where('value') == unic_str2) with Flata(path) as _db: _db.purge_tables() _db.table('t').insert({'value': unic_str1}) _db.table('t').insert({'value': unic_str2}) assert _db.table('t').contains(where('value') == byte_str1) assert _db.table('t').contains(where('value') == unic_str1) assert _db.table('t').contains(where('value') == byte_str2) assert _db.table('t').contains(where('value') == unic_str2)
def test_lastid_after_open(tmpdir): NUM = 100 path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: _db.table('t').insert_multiple({'i': i} for i in range(NUM)) with Flata(path) as _db: assert _db.table('t')._last_id == NUM
def test_empty_write(tmpdir): path = str(tmpdir.join('test.db.json')) class ReadOnlyMiddleware(Middleware): def write(self, data): raise AssertionError('No write for unchanged db') Flata(path).close() Flata(path, storage=ReadOnlyMiddleware()).close()
def test_multiple_dbs(): db1 = Flata(storage=MemoryStorage) db2 = Flata(storage=MemoryStorage) db1.table('t').insert({'int': 1, 'char': 'a'}) db1.table('t').insert({'int': 1, 'char': 'b'}) db1.table('t').insert({'int': 1, 'value': 5.0}) db2.table('t').insert({'color': 'blue', 'animal': 'turtle'}) assert len(db1.table('t').all()) == 3 assert len(db2.table('t').all()) == 1
def test_update_with_external_cache(db): _cache = CachingMiddleware(MemoryStorage)() db = Flata(cache=_cache) tb = db.table('t') tb.insert({'int': 1, 'char': 'a'}) assert tb.count(where('int') == 1) == 1 db2 = Flata(cache=_cache) tb = db2.table('t') db2.table('t').update({'int': 2}, where('char') == 'a') assert db2.table('t').count(where('int') == 2) == 1
def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db.json')) # Create Flata instance db = Flata(path, storage=JSONStorage) # Create table test_table tb = db.table('test_table') item = {'data': 'data exists'} item2 = {'data': 'data not exists'} get = lambda s: tb.get(where('data') == s) tb.insert(item) assert dict(get('data exists'))['data'] == 'data exists' assert get('data not exists') is None tb.remove(where('data') == 'data exists') assert get('data exists') is None db.close()
class SendData: url = "http://10.210.0.17:5000/api/events/27/activations/{0}/matches" username = "******" password = "******" db = Flata('db.json', storage=JSONStorage) def send_to_calindra(self, data, activationCode): try: print(data) req = urllib2.Request(self.url.format(activationCode)) credentials = '{username}:{password}'.format( username=self.username, password=self.password).encode() req.add_header('Authorization', 'Basic ' + base64.b64encode(credentials)) req.add_header('Content-Type', 'application/json;charset=UTF-8') req.add_header('Content-Length', str(len(data))) response = urllib2.urlopen(req, data) if response.getcode() != 201: d = json.loads(data) self.db.table('errors').insert(d) threading.Thread(target=SendDataZabbix().send_zabbix, args=("gxp-pm01", "http", 9), kwargs={}).start() else: threading.Thread(target=SendDataZabbix().send_zabbix, args=("gxp-pm01", "http", 1), kwargs={}).start() print(response.getcode()) except urllib2.HTTPError, e: d = json.loads(data) self.db.table('errors').insert(d) threading.Thread(target=SendDataZabbix().send_zabbix, args=("gxp-pm01", "http", 99), kwargs={}).start() print e.code
def test_purge_table(): table_name = 'some-other-table' db = Flata(storage=MemoryStorage) db.table(table_name) assert set([table_name]) == db.tables() db.purge_table(table_name) assert not set([table_name]) == db.tables()
def saveTemplate(path_db, item, dic): db = Flata(path_db, storage=JSONStorage) if db.get(item): tb = db.table(item) tb.purge() else: tb = db.table(item) tb.insert(dic)
def saveInJson(db_path, dic, mg): db = Flata(db_path, storage=JSONStorage) tb = db.table(mg) dic_mu = {} for name, mu_list in dic.items(): dic_mu[name] = listIR2dict(mu_list) tb.insert(dic_mu) return tb
def testids_json(tmpdir): path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: _db.purge_tables() assert _db.table('t').insert({ 'int': 1, 'char': 'a' }) == { 'id': 1, 'char': 'a', 'int': 1 } assert _db.table('t').insert({ 'int': 1, 'char': 'a' }) == { 'id': 2, 'char': 'a', 'int': 1 } _db.purge_tables() assert _db.table('t').insert_multiple([{ 'int': 1, 'char': 'a' }, { 'int': 1, 'char': 'b' }, { 'int': 1, 'char': 'c' }]) == [{ 'id': 1, 'char': 'a', 'int': 1 }, { 'id': 2, 'char': 'b', 'int': 1 }, { 'id': 3, 'char': 'c', 'int': 1 }] assert _db.table('t').contains(ids=[1, 2]) assert not _db.table('t').contains(ids=[88]) _db.table('t').update({'int': 2}, ids=[1, 2]) assert _db.table('t').count(where('int') == 2) == 2 el = _db.table('t').all()[0] assert _db.table('t').get(id=el.id) == el assert _db.table('t').get(id=float('NaN')) is None _db.table('t').remove(ids=[1, 2]) assert len(_db.table('t').all()) == 1
def test_caching_json_write(tmpdir): path = str(tmpdir.join('test.db')) with Flata(path, storage=CachingMiddleware(JSONStorage)) as db: db.table('t').insert({'key': 'value'}) # Verify database filesize statinfo = os.stat(path) assert statinfo.st_size != 0 # Assert JSON file has been closed assert db._storage._handle.closed del db # Repoen database with Flata(path, storage=CachingMiddleware(JSONStorage)) as db: assert db.table('t').all() == [{'id': 1, 'key': 'value'}]
def init_db(db_path, table_name): #Create db db_init = Flata(db_path, storage=JSONStorage) #Create first table db_init.table(table_name, id_field='id') database = db_init.get(table_name) return database
def create(**kwargs): _dbf = kwargs[CONFIG_DB] or DEFAULT_DB _storage = MemoryStorage if kwargs[ CONFIG_STORAGE] == MEMORY_STORAGE else JSONStorage _cache = kwargs.pop(CONFIG_CACHE, None) _tb = kwargs[RESOURCE_DOCUMENT] _data = kwargs[RESOURCE_DATA] with Flata(_dbf, storage=_storage, cache=_cache) as db: tb = db.table(_tb) obj = tb.insert(loads(_data.decode())) return obj
def test_remove_with_not_default_id(tmpdir): path = str(tmpdir.join('test.db.json')) db = Flata(path) table = db.table('foo', id_field='_not_default_id') table.insert({'something': 'else'}) assert db.table('foo').count(where('something') == 'else') == 1 db.table('foo').remove(where('something') == 'else') assert db.table('foo').all() == []
def test_insert_invalid_dict(tmpdir): path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: _db.purge_tables() data = [{'int': 1}, {'int': 2}] _db.table('t').insert_multiple(data) with pytest.raises(TypeError): _db.table('t').insert({'int': set(['bark'])}) # Fails assert len(_db.table('t').all()) == 2 # Table only has 2 records
def json_update(path_db, mg, key, val, new_dict): db = Flata(path_db, storage=JSONStorage) table = db.table(mg) dic_re = table.all()[0] for row in dic_re['out_Report']: if row[key] == val: if new_dict: for k, v in new_dict.items(): row[k] = v table.purge() table.insert(dic_re) return table
def edit(**kwargs): _dbf = kwargs[CONFIG_DB] or DEFAULT_DB _storage = MemoryStorage if kwargs[ CONFIG_STORAGE] == MEMORY_STORAGE else JSONStorage _cache = kwargs.pop(CONFIG_CACHE, None) _tb = kwargs[RESOURCE_DOCUMENT] _data = kwargs[RESOURCE_DATA] _id = kwargs[RESOURCE_QUERY][RESOURCE_ID] with Flata(_dbf, storage=_storage, cache=_cache) as db: tb = db.table(_tb) ids, objs = tb.update(loads(_data.decode()), ids=[_id]) return objs
def query(**kwargs): _dbf = kwargs[CONFIG_DB] or DEFAULT_DB _storage = MemoryStorage if kwargs[ CONFIG_STORAGE] == MEMORY_STORAGE else JSONStorage _cache = kwargs.pop(CONFIG_CACHE, None) _tb = kwargs[RESOURCE_DOCUMENT] _id = kwargs[RESOURCE_QUERY].pop(RESOURCE_ID) \ if RESOURCE_QUERY in kwargs and RESOURCE_ID in kwargs[RESOURCE_QUERY] else None _embed = kwargs[RESOURCE_QUERY].pop(RESOURCE_EMBED) \ if RESOURCE_QUERY in kwargs and RESOURCE_EMBED in kwargs[RESOURCE_QUERY] else None _expand = kwargs[RESOURCE_QUERY].pop(RESOURCE_EXPAND) \ if RESOURCE_QUERY in kwargs and RESOURCE_EXPAND in kwargs[RESOURCE_QUERY] else None _query = extract_query( **kwargs[RESOURCE_QUERY]) if RESOURCE_QUERY in kwargs else None with Flata(_dbf, storage=_storage, cache=_cache) as db: tb = db.table(_tb) if _id: obj = tb.get(id=_id) if _embed and obj: embed = db.table(_embed) embed_id = _tb[:-1] + 'Id' embeds = embed.search(where(embed_id) == _id) obj[_embed] = embeds if _expand and obj: expand_id_field = expand_field = _expand[:-1] + 'Id' if expand_id_field in obj: expand_id = obj[expand_id_field] expand = db.table(_expand) expand_field = _expand[:-1] expand_elem = expand.get(id=_id) obj[expand_field] = expand_elem elif _query: obj = tb.search(_query) else: obj = tb.all() return obj
def test_insert_invalid_array_string(tmpdir): path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: data = [{'int': 1}, {'int': 2}] _db.table('t').insert_multiple(data) with pytest.raises(ValueError): _db.table('t').insert([1, 2, 3]) # Fails with pytest.raises(ValueError): _db.table('t').insert('fails') # Fails assert len(_db.table('t').all()) == 2
def test_insert_object(tmpdir): path = str(tmpdir.join('test.db.json')) with Flata(path) as _db: _db.purge_tables() data = [{'int': 1, 'object': {'object_id': 2}}] _db.table('t').insert_multiple(data) assert _db.table('t').all() == [{ 'id': 1, 'int': 1, 'object': { 'object_id': 2 } }]
def test_gc(tmpdir): path = str(tmpdir.join('test.db.json')) db = Flata(path) table = db.table('foo') table.insert({'something': 'else'}) table.insert({'int': 13}) assert len(table.search(where('int') == 13)) == 1 assert table.all() == [{ 'id': 1, 'something': 'else' }, { 'id': 2, 'int': 13 }] db.close()
def remove(**kwargs): _dbf = kwargs[CONFIG_DB] or DEFAULT_DB _storage = MemoryStorage if kwargs[ CONFIG_STORAGE] == MEMORY_STORAGE else JSONStorage _cache = kwargs.pop(CONFIG_CACHE, None) _tb = kwargs[RESOURCE_DOCUMENT] _id = kwargs[RESOURCE_QUERY][RESOURCE_ID] \ if RESOURCE_QUERY in kwargs and \ RESOURCE_ID in kwargs[RESOURCE_QUERY] else None with Flata(_dbf, storage=_storage, cache=_cache) as db: tb = db.table(_tb) if _id: rst = tb.remove(ids=[_id]) else: rst = tb.purge() return rst
def test_storage_closed_once(): class Storage(object): def __init__(self): self.closed = False def read(self): return {} def write(self, data): pass def close(self): assert not self.closed self.closed = True with Flata(storage=Storage) as db: db.close() del db
def test_json_kwargs(tmpdir): db_file = tmpdir.join('test.db.json') db = Flata(str(db_file), sort_keys=True, indent=4, separators=(',', ': ')) # Create table test_table tb = db.table('test_table') # Write contents tb.insert({'b': 1}) # tb.insert({'a': 1}) print(db_file.read()) assert db_file.read() == '''{ "test_table": [ { "b": 1, "id": 1 } ] }''' db.close()
def test_query_memory_storage(): db = Flata(storage=MemoryStorage) db.table('t').insert_multiple([{ 'name': 'foo', 'value': 42 }, { 'name': 'bar', 'value': -1337 }]) query = where('value') > 0 results = db.table('t').search(query) assert len(results) == 1 # Now, modify the result ist results.extend([1]) assert db.table('t').search(query) == [{ 'id': 1, 'name': 'foo', 'value': 42 }]
import platform from flata import Flata, where, Query from flata.storages import JSONStorage import json import threading import time t = None logger = logging.getLogger('discord') logger.setLevel(logging.DEBUG) handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w') handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) db = Flata('db.json', storage=JSONStorage) db.table('stats') tb = db.get('stats') # Start if script client = Bot(description="Discord BOT that collects info about people being in voice channel", command_prefix="$", pm_help = False) @client.event async def on_ready(): print('Logged in as '+client.user.name+' (ID:'+client.user.id+') | Connected to '+str(len(client.servers))+' servers | Connected to '+str(len(set(client.get_all_members())))+' users') print('--------') print('Current Discord.py Version: {} | Current Python Version: {}'.format(discord.__version__, platform.python_version())) print('--------') print('Use this link to invite {}:'.format(client.user.name)) print('https://discordapp.com/oauth2/authorize?client_id={}&scope=bot&permissions=8'.format(client.user.id)) print('--------')
def test_not_defaultid(tmpdir): path = str(tmpdir.join('test.db.json')) db = Flata(path) table = db.table('foo', id_field='_not_default_id') table.insert({'something': 'else'}) assert table.all() == [{'_not_default_id': 1, 'something': 'else'}]
def test_caching_read(): db = Flata(storage=CachingMiddleware(MemoryStorage)) assert not db.all()