def test_upsert(db: TinyDB): assert len(db) == 3 # Document existing db.upsert({'int': 5}, where('char') == 'a') assert db.count(where('int') == 5) == 1 # Document missing assert db.upsert({'int': 9, 'char': 'x'}, where('char') == 'x') == [4] assert db.count(where('int') == 9) == 1
def test_serializer(tmpdir): path = str(tmpdir.join('db.json')) serializer = SerializationMiddleware(JSONStorage) serializer.register_serializer(DateTimeSerializer(), 'TinyDate') db = TinyDB(path, storage=serializer) date = datetime(2000, 1, 1, 12, 0, 0) db.insert({'date': date}) db.insert({'int': 2}) assert db.count(where('date') == date) == 1 assert db.count(where('int') == 2) == 1
def test_update_multiple_operation(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform assert db.count(where('int') == 1) == 3 db.update_multiple([(increment('int'), where('char') == 'a'), (increment('int'), where('char') == 'b')]) assert db.count(where('int') == 2) == 2
def test_insert(db: TinyDB): db.drop_tables() db.insert({'int': 1, 'char': 'a'}) assert db.count(where('int') == 1) == 1 db.drop_tables() db.insert({'int': 1, 'char': 'a'}) db.insert({'int': 1, 'char': 'b'}) db.insert({'int': 1, 'char': 'c'}) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1
def test_update_multiple(db: TinyDB): assert len(db) == 3 db.update_multiple([ ({ 'int': 2 }, where('char') == 'a'), ({ 'int': 4 }, where('char') == 'b'), ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 4) == 1
def test_serializer_recursive(tmpdir): path = str(tmpdir.join('db.json')) serializer = SerializationMiddleware(JSONStorage) serializer.register_serializer(DateTimeSerializer(), 'TinyDate') db = TinyDB(path, storage=serializer) date = datetime(2000, 1, 1, 12, 0, 0) datenow = datetime.utcnow() dates = [{'date': date, 'hp': 100}, {'date': datenow, 'hp': 1}] data = {'dates': dates, 'int': 10} db.insert(data) db.insert({'int': 2}) assert db.count(where('dates').any(where('date') == date)) == 1 assert db.count(where('int') == 2) == 1
def print_stats(): path = built_ins['db']['recordings'] rec_db = TinyDB(path) shows = Query() shows_qry = shows.data field_title = '{:17}' print("Overview") print("-" * 50) print(f"Built: {file_time_str(path)}") cnt = len(rec_db.all()) print('{:10}'.format("Total Recordings") + ": " + f'{cnt}') cnt = rec_db.count(shows_qry.user_info.watched == True) # noqa: E712 print('{:10}'.format("Total Watched") + ": " + f'{cnt}') print() print("By Current Recording State") print("-" * 50) cnt = rec_db.count(shows_qry.video_details.state == 'finished') print(field_title.format("Finished") + ": " + f'{cnt}') cnt = rec_db.count(shows_qry.video_details.state == 'failed') print(field_title.format("Failed") + ": " + f'{cnt}') cnt = rec_db.count(shows_qry.video_details.state == 'recording') print(field_title.format("Recording") + ": " + f'{cnt}') print() print("By Recording Type") print("-" * 50) cnt = rec_db.count(shows.path.matches(f'.*episode.*', flags=re.IGNORECASE)) print(field_title.format("Episodes/Series") + ": " + f'{cnt}') cnt = rec_db.count(shows.path.matches(f'.*movie.*', flags=re.IGNORECASE)) print(field_title.format("Movies") + ": " + f'{cnt}') cnt = rec_db.count(shows.path.matches(f'.*sports.*', flags=re.IGNORECASE)) print(field_title.format("Sports/Events") + ": " + f'{cnt}') cnt = rec_db.count(shows.path.matches(f'.*programs.*', flags=re.IGNORECASE)) print(field_title.format("Programs") + ": " + f'{cnt}') print() print("By Show") print("-" * 50) shows = {} max_width = 0 for item in rec_db.all(): title = item['data']['airing_details']['show_title'] max_width = max(max_width, len(title)) key = _sortable_title(title) if key not in shows.keys(): shows[key] = {'cnt': 1, 'title': title} else: shows[key]['cnt'] += 1 for key in sorted(shows.keys()): # print(f"{shows[key]['title']} - {shows[key]['cnt']}") print(('{:' + str(max_width) + '}').format(shows[key]['title']) + ' - {:>2}'.format(shows[key]['cnt']))
def test_update(db: TinyDB): assert len(db) == 3 db.update({'int': 2}, where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 1) == 2
def insert_ordebook(booking_all, My_traid_symbol): konto_ask = booking_all["ask"] #verkauf konto_ask.reverse() asking_ar = [] ask_str = "" for konto in konto_ask: asking_ar.append(Decimal(konto['size']) * Decimal(konto['price'])) prov = Decimal(konto['size']) * Decimal(konto['price']) ask_str += str(prov) + "," konto_bid = booking_all["bid"] #kauf konto_bid.reverse() bidding_ar = [] bid_str = "" for konto in konto_ask: bidding_ar.append(Decimal(konto['size']) * Decimal(konto['price'])) prov = Decimal(konto['size']) * Decimal(konto['price']) bid_str += str(prov) + "," db_coins = TinyDB('datenbank/coins.json') User = Query set_count = db_coins.count(where('symbol') == My_traid_symbol) db_coins.insert({ 'symbol': My_traid_symbol, 'asking_ar': ask_str, 'bidding_ar': bid_str, 'time': str(datetime.datetime.now().time()), 'leer3': str(0) })
def count_status(): db = TinyDB("data.json") People = Query() for i in range(5): c = db.count(People.status == i) print('status', i, 'have', c, 'people') db.close()
class User_Repository: def __init__(self, file_path, password_salt=""): self.file_path = file_path self.db = TinyDB(self.file_path) self.password_salt = password_salt self.pw_manager = PM(salt=self.password_salt) #takes a job request object and inserts it into our database def create_user(self, user): user_id = uuid.uuid4() user_id_string = str(user_id) create_date = datetime.datetime.now() create_date_string = str(create_date) user_type_int = UserTypeMapping[user['type']] hash = self.pw_manager.sha512_hash(user['password']) user = { 'first': user['first'], 'last': user['last'], 'name': user['name'], 'id': user_id_string, 'create_date': create_date_string, 'password': hash, 'type': user_type_int } self.db.insert(user) return user def delete_user(self, user_id): user = Query() self.db.update(delete('email'), user.id == user_id) return True def update_user_password(self, user_id): user = Query() self.db.update(set('password'), user.id == user_id) def get_user(self, user_id): user = Query() #query tinyDB for a user matching user_id result = self.db.search(user.id == user_id) return result def get_user_from_name(self, name): user = Query() #query tinyDB for a user matching user name result = self.db.search(user.name == name) #return first matching name return result[0] def get_all_users(self): #query all users result = self.db.all() return result def get_count_all_users(self): user = Query() #get all entries result = self.db.count(user.id == user.id) return result
def test_upgrade(tmpdir): db_file = tmpdir.join('db.json') db_file.write(v1_0) # Run upgrade assert migrate(str(db_file)) is True db = TinyDB(str(db_file)) assert db.count(where('key') == 'value') == 1
class CredentialRepo(): ''' CredentialRepo ''' def __init__(self): ''' Credential Repo constructor ''' path = os.path.abspath(__file__) dir_path = os.path.dirname(path) self.db = TinyDB('{}/credentials_db.json'.format(dir_path)) self.query = Query() def addCredential(self, userName, password, title): insertObj = { 'userName': userName, 'password': password, 'title': title } self.db.insert(insertObj) count = self.db.count(self.query.title == title) return True if count == 1 else False def deleteCredential(self, title): self.db.remove(self.query.title == title) count = self.db.count(self.query.title == title) return True if count == 0 else False def updateCredential(self, userName, password, title): self.db.update({ 'userName': userName, 'password': password }, self.query.title == title) credentialAfterUpdate = self.db.search(self.query.title == title) return credentialAfterUpdate[0][ 'userName'] == userName and credentialAfterUpdate[0][ 'password'] == password def getCredential(self, title): return self.db.search(self.query.title == title) def getAllCredentials(self): return self.db.all()
def test_cutom_mapping_type_with_json(tmpdir): from tinydb.database import Mapping class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.purge() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 # Insert multiple db.insert_multiple([ CustomDocument({ 'int': 2, 'char': 'a' }), CustomDocument({ 'int': 3, 'char': 'a' }) ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 3) == 1 # Write back doc_id = db.get(where('int') == 3).doc_id db.write_back([CustomDocument({'int': 4, 'char': 'a'})], [doc_id]) assert db.count(where('int') == 3) == 0 assert db.count(where('int') == 4) == 1
def Function_Dbinsert(Insert_List): from tinydb import TinyDB,Query global db db=TinyDB('Save_List.jon') Query_Element=Query() Row_Check_Count=db.count((Query_Element.name==Insert_List[0]) & (Query_Element.age==Insert_List[1])&(Query_Element.gender==Insert_List[2])) if Row_Check_Count > 0: print ("The entry is existing already") else: db.insert ({'name':Insert_List[0],'age':Insert_List[1],'gender':Insert_List[2]}) print ("The Following Row has been added") print (db.search((Query_Element.name==Insert_List[0]) & (Query_Element.age==Insert_List[1])&(Query_Element.gender==Insert_List[2])))
def test_update_transform(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform def delete(field): def transform(el): del el[field] return transform assert db.count(where('int') == 1) == 3 db.update(increment('int'), where('char') == 'a') db.update(delete('char'), where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('char') == 'a') == 0 assert db.count(where('int') == 1) == 2
def addZone(): request_data = request.get_json() newZone = request_data["newZone"] role = request_data["role"] data1 = TinyDB("data.json") data1 = data1.table("Zone") query = Query() if data1.count(query.zoneName == newZone) > 0 or newZone == '' or role == '': return "the Zone is exist", 500 else: counter = len(data1) + 1 data1.insert({"zoneId": counter, "zoneName": newZone, "accessLevel": role, "items": []}) return "The zone successfully Added", 200
def test_insert_multiple(db: TinyDB): db.drop_tables() assert not db.contains(where('int') == 1) # Insert multiple from list db.insert_multiple([{ 'int': 1, 'char': 'a' }, { 'int': 1, 'char': 'b' }, { 'int': 1, 'char': 'c' }]) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1 # Insert multiple from generator function def generator(): for j in range(10): yield {'int': j} db.drop_tables() db.insert_multiple(generator()) for i in range(10): assert db.count(where('int') == i) == 1 assert db.count(where('int').exists()) == 10 # Insert multiple from inline generator db.drop_tables() db.insert_multiple({'int': i} for i in range(10)) for i in range(10): assert db.count(where('int') == i) == 1
def ord_book_bid_ask(booking_all, My_traid_symbol): konto_ask = booking_all["ask"] #verkauf konto_ask.reverse() asking_ar = [] ask_str = "" for konto in konto_ask: asking_ar.append(Decimal(konto['size']) * Decimal(konto['price'])) prov = Decimal(konto['size']) * Decimal(konto['price']) ask_str += str(prov) + "," konto_bid = booking_all["bid"] #kauf konto_bid.reverse() bidding_ar = [] bid_str = "" for konto in konto_ask: bidding_ar.append(Decimal(konto['size']) * Decimal(konto['price'])) prov = Decimal(konto['size']) * Decimal(konto['price']) bid_str += str(prov) + "," db_coins = TinyDB('datenbank/coins.json') User = Query set_count = db_coins.count(where('symbol') == My_traid_symbol) if set_count == 0: db_coins.insert({ 'symbol': My_traid_symbol, 'asking_ar': ask_str, 'bidding_ar': bid_str, 'leer2': str(0), 'leer3': str(0) }) if set_count == 1: db_set = db_coins.search(where('symbol') == My_traid_symbol) asking_ar_db = db_set[0][ 'asking_ar'] #[{'ki_erwartung': '0', 'leer1': '0', 'leer2': '0', 'leer3': '0', 'symbol': 'EOSBTC'}] bidding_ar_db = db_set[0][ 'bidding_ar'] #[{'ki_erwartung': '0', 'leer1': '0', 'leer2': '0', 'leer3': '0', 'symbol': 'EOSBTC'}] db_coins.update({ 'asking_ar': ask_str, 'bidding_ar': bid_str }, where('symbol') == My_traid_symbol) asking_ar_db = asking_ar_db.split(',') bidding_ar_db = bidding_ar_db.split(',') asking_ar_db2 = [] bidding_ar_db2 = [] counter = 0 for konto in asking_ar_db: if counter >= 2 and konto != "": #verursacht versetzung wegen ergebnis row asking_ar_db2.append(Decimal(konto)) bidding_ar_db2.append(Decimal(bidding_ar_db[counter])) counter += 1 return asking_ar_db2, bidding_ar_db2
def test_cutom_mapping_type_with_json(tmpdir): from tinydb.database import Mapping class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.purge() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 # Insert multiple db.insert_multiple([ CustomDocument({'int': 2, 'char': 'a'}), CustomDocument({'int': 3, 'char': 'a'}) ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 3) == 1 # Write back doc_id = db.get(where('int') == 3).doc_id db.write_back([CustomDocument({'int': 4, 'char': 'a'})], [doc_id]) assert db.count(where('int') == 3) == 0 assert db.count(where('int') == 4) == 1
def is_duplicate(db: TinyDB, todo: Dict) -> bool: query = Query() # not gauranteed that Habitica doesn't re-use ids after todo deleted from server # thus more complex condition to check for duplicate duplicates_count = db.count( (query.id == todo.get('id')) & (query.title == todo.get('title')) & (query.date_completed == todo.get('date_completed'))) if duplicates_count > 0: print( f'Duplicate encoutered! A todo in input file matches {duplicates_count} todo(s) in database (matches id, name and completed date)' ) print( 'Skipping insertion. Please verify that this behavior is correct') print(f'{todo}') return True return False
def add_table(Coin_symbol, Tag, table_name, kuerzel, quelle, close_array, volume_array, volume_avrage, close_avrage): if len(close_array)==len(volume_array): db_kalender = TinyDB('aa_Tabellen/'+table_name+'.json') db_administartor= TinyDB('aa_Tabellen/administartor.json') set_count = db_administartor.count(where('symbol') == Coin_symbol) if set_count == 0: db_administartor.insert({'symbol': Coin_symbol ,'volume_avrage': volume_avrage, 'close_avrage': close_avrage}) db_kalender.purge() for i in np.arange(len(close_array)): db_kalender.insert({"aa_Tag": Tag[i], "kuerzel": Coin_symbol, "fullName": Coin_symbol, "close": close_array[i], "volume": volume_array[i]}) else: win32api.MessageBox(0,"fehler g69j. array len sind ungleich" + str(len(close_array))+ " "+ str(len(volume_array)), 'Insert Fehler') if len(close_array)==len(db_kalender): #win32api.MessageBox(0,"Table Neueintrag erfolgreich", '!') print(str(len(close_array))+" "+Coin_symbol+" Table Neueintrag erfolgreich "+ str(len(db_administartor)) ) else: win32api.MessageBox(0,Coin_symbol+ " close_array(" + str(len(close_array))+ ") table_sets("+ str(len(db_kalender))+") passen nicht", '!')
class Vanguard(object): def __init__(self, db_filename): self.db = TinyDB(db_filename) member = self.get_member() self.name = member['name'] self.slack_id = member['slackid'] self.element_id = member.eid def get_member(self): members = Query() if self.db.count(members.vanguarded == False) == 0: self.db.update({'vanguarded': False}) member = self.db.search(members.vanguarded == False)[0] return member def update_member(self): self.db.update({'vanguarded': True}, eids=[self.element_id])
def test_insert_valid_mapping_type(db: TinyDB): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) db.drop_tables() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1
class FileStorage(object): def __init__(self): filepath = Path.home().joinpath(".leavemanager") conf = filepath.joinpath("data.json") datafile = str(conf) self.db = TinyDB(datafile) self.leave = Query() def get(self, date): return self.db.get(leave.rawdate == date) def put(self, date, data): s = self.db.get(self.leave.rawdate == date) if s: return False else: self.db.insert(data) return True def delete(self, date): s = self.db.get(self.leave.rawdate == date) if s: self.db.remove(doc_ids=[s.doc_id]) return True else: return False def update(self, date, update): self.db.update(update, self.leave.rawdate == date) def countdays(self, year): return self.db.count(self.leave.year == year) def all(self): return self.db.all() def all_pending(self, year): return self.db.search(self.leave.year == year and self.leave.approved == False) def search(self, year): return self.db.search(self.leave.year == year)
def insert_ordebook(booking_all,My_traid_symbol): time_this = booking_all["timestamp"] konto_ask = booking_all["ask"]#verkauf konto_ask.reverse() ask_count ="" ask_price ="" for konto in konto_ask: ask_count += str(konto['size'])+"," ask_price += str(konto['price'])+"," konto_bid = booking_all["bid"]#kauf konto_bid.reverse() bid_count ="" bid_price ="" for konto in konto_bid: bid_count += str(konto['size'])+"," bid_price += str(konto['price'])+"," db_coins = TinyDB('datenbank/orderbook.json') User = Query set_count = db_coins.count(where('symbol') == My_traid_symbol) #db_coins.insert({'symbol': My_traid_symbol ,'ask_count': ask_count,'ask_price': ask_price,'bid_count': bid_count,'bid_price': bid_price,'time': str(datetime.datetime.now().time()),'leer3': str(0)}) db_coins.insert({'symbol': My_traid_symbol ,'ask_count': ask_count,'ask_price': ask_price,'bid_count': bid_count,'bid_price': bid_price,'time': time_this,'leer3': str(0)})
class Tiny(object): """TinyDB Tools""" def __init__(self, pth): """Create database file pth (str): TinyDB filename """ self.db = TinyDB(pth) def purge(self): """Clear out all db entries""" self.db.purge() def close(self): """Release db file""" self.db.close() def insert(self, _obj): """Insert into database with additional checks _obj (dict): new object to insert """ if "_id" in _obj: _obj["_id"] = "N/A" # Remove non-ascii MongoDB "_id" self.db.insert(_obj) def upsert(self, _obj, _query): """Insert to TinyDB if new -or- update if already present _obj (dict): new object to update/insert _query (dict): TinyDB query syntax """ if _query and self.db.contains(_query): # Attempt to update the existing entry if self.db.count(_query) > 1: raise ValueError("Multiple matches with `{}`".format(_query)) self.db.update(_obj, _query) else: # Insert new, unique entry self.db.insert(_obj)
#rows = db.get(('task'), where('priority') == 10) #rows = db.search(where('id')) start = datetime.datetime.now() for x in range(0, 1000): db.insert({'id': 1, 'task': 'Read A-byte-of-python to get a good introduction into Python', 'status': '0'}) db.insert({'id': 1, 'task': 'Read A-byte-of-python to get a good introduction into Python', 'status': '0', 'priority': 10}) end = datetime.datetime.now() exc_time = end - start print ("Execution time : ", exc_time) stime = datetime.datetime.now() rows = db.search(where('id')) etime = datetime.datetime.now() total_time = etime - stime print ("Fecthing time : ", total_time) count = db.count(where('id')) print (count) #import sqlite3 #import datetime #con = sqlite3.connect('homework_sqlite.db') #cursor = con.cursor() #start = datetime.datetime.now() #for x in range(0, 10000): #con.execute("CREATE TABLE Homework (id INTEGER PRIMARY KEY, task char(100) NOT NULL, status bool NOT NULL)") # con.execute("INSERT INTO Homework (task,status) VALUES ('Finish software testing Homework',0)") # con.execute("INSERT INTO Homework (task,status) VALUES ('Finished SQlite part',1)") #con.commit() #end = datetime.datetime.now() #exc_time = end - start
CHANNELS = 3 DIM = WIDTH * HEIGHT * CHANNELS tdb = TinyDB(dimensions = DIM, parse_args = None) tdb.arg_parser().add_argument("--mean", required = True) tdb.arg_parser().add_argument("--std", required = True) tdb.arg_parser().add_argument("--rows", type = int, default = 20000) tdb.arg_parser().add_argument("-o", required = True) args = tdb.parse_args() # read the mean for each dimension mean = np.array([float(i) for i in open(args.mean).readline().strip().split(" ")], np.float64) assert(len(mean) == DIM) # read the standard deviation for each dimension std = np.array([float(i) for i in open(args.std).readline().strip().split(" ")], np.float64) assert(len(std) == DIM) def compute(m): k = np.matrix([np.fromstring(i, np.uint8) for i in m]) - mean k = k / std return k.transpose() * k jobs = process(tdb.groups(args.rows), compute) m = reduce(lambda acc, x: acc + x, jobs, np.zeros((DIM, DIM), np.float64)) print >> sys.stderr, "processed rows:", tdb.count() sio.savemat(args.o, {"c": m, "n": tdb.count()}, do_compression = True)
from tinydb import TinyDB, Query # https://tinydb.readthedocs.io/en/latest/usage.html db = TinyDB("score.json") db.purge() db.insert({"name": "math", "score": 100}) db.insert({"name": "english", "score": 90}) db.insert({"name": "japanese", "score": 80}) S = Query() print(db.count(S.score >= 90)) print(db.search(S.name.matches(".*h$"))) print(db.search(S.name.search("h$"))) print(db.search(S.name.test(lambda v: v in ["math", "japanese"]))) print(db.search((S.name == "math") | (S.name == "japanese"))) print(db.search((S.name == "math") & (S.score >= 80))) # for collection, using all,any
def test_update_ids(db: TinyDB): db.update({'int': 2}, doc_ids=[1, 2]) assert db.count(where('int') == 2) == 2
def test_remove(db: TinyDB): db.remove(where('char') == 'b') assert len(db) == 2 assert db.count(where('int') == 1) == 2
#!/usr/bin/env python from tinydb import TinyDB import numpy as np tdb = TinyDB(parse_args=False) tdb.arg_parser().add_argument("-o", required=True) tdb.arg_parser().add_argument("--mean", required=True) args = tdb.parse_args() # read the mean for each dimension mean = np.array([float(i) for i in open(args.mean).readline().strip().split(" ")], np.float64) assert len(mean) == 3072 z = np.zeros(tdb.dim(), np.float64) for i in tdb.chunks(): z += np.power(np.float64(np.fromstring(i, np.uint8)) - mean, 2) z = np.sqrt(z / tdb.count()) open(args.o, "w").write(" ".join([repr(i) for i in z.flatten()]))
class ThoughtCache(object): """ Quickly and simply cache python objects (inc. functions and classes) into a NoSQL database for later tag-based retrieval """ def __init__(self, path='tc_db.json', encoder=jsonpickle.encode, decoder=jsonpickle.decode): """ Initialize. If specifying encoder and decoder, it must accept a signature of f(o). If not, use `partial`, lambda, etc. to wrap it. :param path: path to TinyDB json database :param encoder: function to encode objects :param decoder: function to decode objects :return: None """ self.db = TinyDB(path) self.encode, self.decode = encoder, decoder self._tags = collections.Counter() def store(self, o, metadata): """ Store object `o` in the database as `metadata['name']`. `metadata['name']` must be provided, but need not be unique. If no UUID is provided in the metadata, a random one is created and used. UUIDs are the authoritative source of identity, so they are the only basis on which a record will be overwritten without regard to any other fields. If an existing UUID is specified, *ALL* fields will be overwritten, not only the ones specified. UUID may be a valid UUID string or None. If you don't intend to overwrite an existing record, simply remove the UUID key from the dict entirely, or leave as None. :param o: The actual object to be stored. :param metadata: dict of metadata with the following format: { 'time':time.time(), 'location':'work', 'tag':['general networking'], 'name':'ExtendedPingResult' # 'name' is mandatory because it cannot be reliably inferred at runtime. 'uuid':'85280d8e-66bf-4e65-814f-507a65c0375c' } :return: """ store = self._store if 'uuid' not in metadata: metadata['uuid'] = str(uuid.uuid4()) elif self._test_uuid(metadata['uuid']): store = self._update metadata['json'] = self.encode(o) store(metadata) def retrieve_json(self, metadata_filter): """ Retrieve list of JSON objects that match the given filter. If uuid is specified, all other criteria are ignored. All non-uuid fields that are specified are, unless otherwise noted, treated as a series of 'OR' clauses that are 'AND'ed together. `if (location in locations) and (tag in tags): ` etc... :param metadata_filter: dict of values to filter against. Currently supported: { 'time': day, month, year or range #NOT IMPLEMENTED 'location': 'work', 'home', etc... 'name': where('name').contains Value. Case Sensitive 'tag': single entry or list of entries 'uuid': single entry or list of entries } :return: list of JSON strings representing the objects """ query = metadata_filter_to_query(metadata_filter) return [x['json'] for x in sequenceify(self.db.search(query))] def retrieve(self, metadata_filter): """ Returns reconstructed objects that match filter. :param metadata_filter: See ThoughtCache.retrieve_json.__doc__ :return: """ json_list = self.retrieve_json(metadata_filter) return list(map(self.decode, json_list)) def tag_search(self, *tags): """ Given a string or list of strings, return entries that with one or more matching tags (if A in B) :param tags: string or list of strings :return: list of json strings """ metadata_filter = {'tag':sequenceify(tags)} return self.retrieve(metadata_filter) def update_tags(self): """ updates internal tag Counter. :return: None """ self._tags.clear() elements = self.db.search(where('tag')) for element in elements: self._tags.update(element['tag']) @property def tags(self): """ Returns list of all tags currently in use in the database :return: list of tags (str). """ self.update_tags() return list(self._tags.keys()) def matching_tags(self, substr): """ Returns list of all tags currently in use in the database that contain substr. :param substr: substring to search for. Case insensitive. :return: list of tags (str). """ return [x for x in self.tags if (x.lower().find(substr) > -1)] def _test_uuid(self, o_uuid): """ Tests for the existence of a provided object UUID string in the database. :param o_uuid: object UUID string :return: True if UUID exists, else False """ i = self.db.count(where('uuid') == o_uuid) # number of matching queries assert i < 2, "uuid {o_uuid} is not unique in database!" return bool(i) def _store(self, metadata): """ Store new entry into db using provided metadata :param json: JSON Object :param metadata: Dict of metadata. Must include 'uuid', 'name', and 'json'. :return: """ self.db.insert(metadata) def _update(self, metadata): """ update existing db record. Preserves TinyDB behavior: overwrites existing fields, adds new fields, does not delete fields. :param metadata: Dict of metadata, must include 'name', and 'uuid', and 'json' :return: """ element = self.db.get(where('uuid') == metadata['uuid']) self.db.update(metadata, eids=[element.eid])
def test_count(db: TinyDB): assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'd') == 0
def test_update_all(db: TinyDB): assert db.count(where('int') == 1) == 3 db.update({'newField': True}) assert db.count(where('newField') == True) == 3 # noqa
#!/usr/bin/env python from tinydb import TinyDB import numpy as np tdb = TinyDB(parse_args = False) tdb.arg_parser().add_argument("-o", required = True) args = tdb.parse_args() z = np.zeros(tdb.dim(), np.int64) for i in tdb.chunks(): z += np.fromstring(i, np.uint8) z = np.float64(z) / tdb.count() open(args.o, "w").write(" ".join([repr(i) for i in z.flatten()]))