def download(name,force=False): db=TinyDB(path_db_) temp = Query() data=requests.get("https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json") data_json=data.json() if name in list(data_json.keys()): temp_name=data_json[name] print("Download : "+name) if len(db.search(temp.name==name))==0: print(name+" "+temp_name['version']) download_(temp_name['download'],temp_name['file_name']) db.insert({'name': name, 'version': temp_name['version'],'file':temp_name['file_name']}) else: if len(db.search(temp.name==name and temp.version==temp_name['version']))==0: print("have update") print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version']) yes_no="y" if force==False: yes_no=str(input("y or n : ")).lower() if "y"==yes_no: download_(temp_name['download'],temp_name['file_name']) db.update({'version':temp_name['version']},temp.name==name) else: print("re-download") print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version']) yes_no="y" if force==False: yes_no=str(input("y or n : ")).lower() if "y"==yes_no: download_(temp_name['download'],temp_name['file_name']) db.update({'version':temp_name['version']},temp.name==name) db.close()
def log_output(filename, source): filename = os.path.abspath(filename) if option(CONFIG, 'general', 'debug'): print("Output to %s using %s" % (filename, source)) #Update object in DB db = TinyDB(DBFILE) db.update(append("outputs", filename), eids=[RUN_ID]) db.close()
def hide_all(): db = TinyDB(DB_FILENAME) eids = [ r.eid for r in db.search( (Query().hidden == False) & (Query().starred == False) ) ] db.update({'hidden': True}, eids=eids) db.close() return 'OK'
def annotate(): notes = request.form['notes'] run_id = int(request.form['run_id']) query = request.args.get('query', '') db = TinyDB(recipyGui.config.get('tinydb')) db.update({'notes': notes}, eids=[run_id]) db.close() return redirect(url_for('run_details', id=run_id, query=query))
def RemovePokemon(self, username, chatid, pokemon): db = TinyDB('users.json') Username = Query() user = db.search((Username.username == username) & (Username.chatid == chatid)) # print(pokemon) my_pokemon_cur = user[0]['pokemon'][pokemon] # print(my_pokemon_cur) my_pokemon_new = self.DecrementPokeArr(user[0]['pokemon'], pokemon) # print (my_pokemon_new) db.update({'pokemon': my_pokemon_new}, ((Username.username == username ) & (Username.chatid == chatid))) pass # RETURN: check bool
class TinyDBStore(object): def __init__(self): self.drafts_db = TinyDB('event_drafts.json') self.events_db = TinyDB('events.json') # Drafts def contains_draft(self, user_id): return self.drafts_db.contains(Query().user_id == user_id) def new_draft(self, user_id): if self.contains_draft(user_id): self.drafts_db.remove(Query().user_id == user_id) self.drafts_db.insert({ 'user_id': user_id, 'current_field': 0, 'event': {} }) def update_draft(self, user_id, event, current_field): self.drafts_db.update({ 'user_id': user_id, 'current_field': current_field, 'event': event }, Query().user_id == user_id) def get_draft(self, user_id): return self.drafts_db.get(Query().user_id == user_id) def remove_draft(self, user_id): self.drafts_db.remove(Query().user_id == user_id) # Events def insert_event(self, event): event_id = self.events_db.insert(event) event['id'] = event_id return event def update_event(self, event): self.events_db.update(event, eids=[event.eid]) def remove_event(self, event): self.events_db.remove(eids=[event['id']]) def get_events(self, user_id, name=None): if name: return self.events_db.search((Query().user_id == user_id) & (Query().name.test(lambda v: name in v))) return self.events_db.search(Query().user_id == user_id) def get_event(self, event_id): return self.events_db.get(eid=int(event_id))
class Test_003_Modify_existing_data_by_valid_query_Function(unittest.TestCase): def setUp(self): self.db = TinyDB('db.json') def tearDown(self): self.db.purge() self.db.all() def test_simple_modify_valid_exist(self): print("case 3 modify existing data by valid query") self.db.insert({'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1}) self.db.update({'int': 10}, where('Name') == 'Greg') result=self.db.search(where('Name') == 'Greg') self.assertEqual(result,[{'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 10, 'char':1}])
class RequestDB(object): def __init__(self): self.db = TinyDB(os.path.join(dirname, 'requestdb.json')) def add(self, request): if self.db.contains(where('request_id') == request.request_id): self.db.update(request.__dict__, where('request_id') == request.request_id) else: self.db.insert(request.__dict__) def remove(self, request_id): self.db.remove(where('request_id') == request_id) def get_request_by_id(self, request_id): return self.db.search(where('request_id') == request_id)
def AddPokemon(self, username, chatid, pokemon): db = TinyDB('users.json') Username = Query() user = db.search((Username.username == username) & (Username.chatid == chatid)) # print(pokemon) my_pokemon_cur = user[0]['pokemon'][pokemon] # print(my_pokemon_cur) my_pokemon_new = self.IncrementPokeArr(user[0]['pokemon'], pokemon) # print (my_pokemon_new) db.update({'pokemon': my_pokemon_new}, ((Username.username == username) & (Username.chatid == chatid))) pass # RETURN: check bool
class ConnectModel: query = Query() def __init__(self, db_path): dbpath = os.path.join(db_path, 'server_config.json') self._db = TinyDB(dbpath) def req_fields_json(self, dict_result): req_items = ['serverIP', 'username', 'password'] return { key: value for key, value in dict_result.items() if key in req_items } def search_op(self, req_dict): return True if self.search_res(req_dict['serverIP']) else False def search_res(self, servIP): return self._db.search(ConnectModel.query.serverIP == servIP) def insert_op(self, req_dict): if not self.search_op(req_dict): self._db.insert(req_dict) return self._db.all() return "None_Insert" def delete_op(self, req_dict): if self.search_op(req_dict): self._db.remove(where('serverIP') == req_dict['serverIP']) return self._db.all() return "None_Delete" def update_op(self, old_srv_IP, req_dict): if not self.search_res(old_srv_IP): return False self._db.update( { 'serverIP': req_dict['serverIP'], 'username': req_dict['username'], 'password': req_dict['password'] }, ConnectModel.query.serverIP == old_srv_IP) return self._db.all() def stop_db(self): self._db.close()
class NumberStore(): def __init__(self, filename): self.db = TinyDB(filename) def initNumber(self, number): if not self.getNumberDict(number): self.db.insert({'number': number, 'accesses': [], 'info': '#yolo'}) def touchNumber(self, number): self.initNumber(number) #print(self.getNumberDict(number)) #accesses = self.getNumberDict(number)['accesses'].append(datetime.datetime.now()) #self.db.update({'accesses': accesses}, where('number') == number) def getNumberDict(self, number): return self.db.get(where('number') == number) def getNumberList(self): return (entry['number'] for entry in self.db.all()) def getAccesses(self, number): # if not number in self.db['numbers']: # return None # if not 'info' in self.db['numbers'][number]: # return None # # return self.db['numbers'][number]['info'] return [] def getInfo(self, number): return self.getNumberDict(number)['info'] def setInfo(self, number, info): self.initNumber(number) self.db.update({'info': info}, where('number') == number) print(self.db.all()) def deleteNumber(self, number): self.db.remove(where('number') == number) print(self.db.all()) def close(self): self.db.close()
def update(model_path, db_path): """ Update data for all parking spots in the db """ db = TinyDB(db_path) parkings = db.all() all_images_proc_time = 0 for parking in parkings: global test_dataset, weather_sunny camera_image = Image.open(test_dataset + weather_sunny + parking['url']) print('processing image ' + parking['url']) # Process each parking spot parking_spots = parking['spots'] updated_parking_spots = [] single_img_proc_time = 0 global model_path model = tf.keras.models.load_model(model_path) for spot in parking_spots: spot_image = crop_img(camera_image, spot['crop']) spot_image = img_to_array(spot_image, path=False) start = time.time() prediction = model.predict(np.array([spot_image])) end = time.time() if prediction[0][0] > prediction[0][1]: spot['occupied'] = False else: spot['occupied'] = True updated_parking_spots.append(spot) # tf.keras.backend.clear_session() print(str(end - start)) single_img_proc_time += end - start tf.keras.backend.clear_session() all_images_proc_time += single_img_proc_time print("Total processing time: " + str(single_img_proc_time)) db.update({'spots': updated_parking_spots}, eids=[parking.eid]) print("Average processing time per image: " + str(all_images_proc_time / len(parkings)))
def update_rank_tournament(cls, players, tournament): """Update player's rank in the database and in the current tournament.""" db = TinyDB("USERS.json") query = Query() first_name = Input.for_string("First name ? ").capitalize() last_name = Input.for_string("Last name ? ").capitalize() new_rank = Input.for_integer("Please enter player's new rank : ") db.update( {"rank": new_rank}, query["first_name"] == first_name and query["last_name"] == last_name, ) for player in players: if first_name == player.first_name and last_name == player.last_name: player.rank = new_rank tournament.update_players(players) print("Rank updated !\n")
class LocalDataStorage(DataStorage): def __init__(self, path: str, create: bool = False): self.__db = TinyDB(join(expanduser(path), "brain.json"), sort_keys=True, indent=4, create_dirs=create, ensure_ascii=False).table("thoughts") def get(self, key, options=None): q = Query() data = self.__db.get(q.identity.key == key) return data def add(self, thought): self.__db.insert(thought) def find(self, query): db_query = self.__tiny_db_query(query) return self.__db.search(db_query) def update(self, key, data): self.__db.update(_update_operation(data), Query().identity.key == key) def remove(self, thought): raise NotImplementedError() @staticmethod def __tiny_db_query(query): result = Query() field = query["field"] value = query["value"] for p in field.split("."): result = result[p] if query["operator"] in ["eq", "="]: result = result == value if query["operator"] in ["~eq", "~="]: result = result.test(lambda x: value.lower() == x.lower()) if query["operator"] in ["not-none"]: result = result.exists() if query["operator"] in ["matches"]: result = result.matches(value) if query["operator"] in ["contains", "~"]: result = result.test(lambda x: value in x) if query["operator"] in ["contains_case_insensitive", "~~"]: result = result.test(lambda x: value.lower() in x.lower()) return result
def test_yaml(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ try: import yaml except ImportError: return pytest.skip('PyYAML not installed') def represent_doc(dumper, data): # Represent `Document` objects as their dict's string representation # which PyYAML understands return dumper.represent_data(dict(data)) yaml.add_representer(Document, represent_doc) class YAMLStorage(Storage): def __init__(self, filename): self.filename = filename touch(filename, False) def read(self): with open(self.filename) as handle: data = yaml.safe_load(handle.read()) return data def write(self, data): with open(self.filename, 'w') as handle: yaml.dump(data, handle) def close(self): pass # Write contents path = str(tmpdir.join('test.db')) db = TinyDB(path, storage=YAMLStorage) db.insert(doc) assert db.all() == [doc] db.update({'name': 'foo'}) assert '!' not in tmpdir.join('test.db').read() assert db.contains(where('name') == 'foo') assert len(db) == 1
def first_chat(data): #print(data) return_data = {} query = Query() db = TinyDB('tinyDB/patient_login.json') search_result = db.search(query.email == data['email']) return_data['status'] = 200 return_data['testRequired'] = model.predict(data) data['testRequired'] = return_data['testRequired'] db.update( data, query.email == data['email']) return return_data
class PlayerStatsDao: __instance = None @staticmethod def get_instance(): if PlayerStatsDao.__instance is None: with threading.Lock(): if PlayerStatsDao.__instance is None: # Double locking mechanism PlayerStatsDao() return PlayerStatsDao.__instance def __init__(self): if PlayerStatsDao.__instance is not None: raise Exception("This is a singleton!") else: PlayerStatsDao.__instance = self self.db = TinyDB('player_stats.json') self.lock = threading.Lock() def add(self, stat): self.lock.acquire() player_stats = Query() dart_count = 0 dart_total = 0 for d in stat[2]: dart_count = dart_count + 1 dart_total = dart_total + Dart(d[0], d[1]).get_score() # maybe a static get_score would use less resources if not self.db.contains(player_stats.player == stat[0]): self.db.insert({ 'player': stat[0], 'numDarts': dart_count, 'totalScore': dart_total }) else: my_stat = self.db.get(player_stats.player == stat[0]) print(my_stat) self.db.update( { 'player': stat[0], 'numDarts': my_stat["numDarts"] + dart_count, 'totalScore': my_stat["totalScore"] + dart_total }, player_stats.player == stat[0]) self.lock.release()
class Vanguard(object): def __init__(self, db_filename): self.db = TinyDB(db_filename) member = self.get_member() self.name = member['name'] self.slack_id = member['slackid'] self.element_id = member.eid def get_member(self): members = Query() if self.db.count(members.vanguarded == False) == 0: self.db.update({'vanguarded': False}) member = self.db.search(members.vanguarded == False)[0] return member def update_member(self): self.db.update({'vanguarded': True}, eids=[self.element_id])
def add_hashtag(self, hashtag_object): Hashtag = Query() database = TinyDB('database/db.json') has_register = database.search(Hashtag.tag == hashtag_object['tag']) if has_register == hashtag_object: return 200, 'up to date' elif not has_register: database.insert(hashtag_object) return 201 else: database.update(hashtag_object) return 200, 'up to date'
def download(name, force=False): db = TinyDB(path_db_) temp = Query() data = requests.get( "https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json" ) data_json = data.json() if name in list(data_json.keys()): temp_name = data_json[name] print("Download : " + name) if len(db.search(temp.name == name)) == 0: print(name + " " + temp_name['version']) download_(temp_name['download'], temp_name['file_name']) db.insert({ 'name': name, 'version': temp_name['version'], 'file': temp_name['file_name'] }) else: if len( db.search(temp.name == name and temp.version == temp_name['version'])) == 0: print("have update") print("from " + name + " " + db.search(temp.name == name)[0]['version'] + " update to " + name + " " + temp_name['version']) yes_no = "y" if force == False: yes_no = str(input("y or n : ")).lower() if "y" == yes_no: download_(temp_name['download'], temp_name['file_name']) db.update({'version': temp_name['version']}, temp.name == name) else: print("re-download") print("from " + name + " " + db.search(temp.name == name)[0]['version'] + " update to " + name + " " + temp_name['version']) yes_no = "y" if force == False: yes_no = str(input("y or n : ")).lower() if "y" == yes_no: download_(temp_name['download'], temp_name['file_name']) db.update({'version': temp_name['version']}, temp.name == name) db.close()
def addIthems(): request_data = request.get_json() zoneId = request_data["zoneId"] newitems = request_data["eachZone.items"] zoneName = request_data["zoneName"] # print(newitems) # print(zoneId) data = TinyDB("data.json") data = data.table("Zone") query = Query() try: data.update({"items": newitems}, query.zoneId == zoneId) # add to excell for make index makeExcellFile.makeFile(zoneId, zoneName, newitems) return "ok", 200 except: return "not ok", 500
def saveActualStates(self): db = TinyDB(self.dbPath) for states in self.statesList: # Changing np.arrays to lists, so we can serialize them states['prev_state']['image'] = states['prev_state'][ 'image'].tolist() states['curr_state']['image'] = states['curr_state'][ 'image'].tolist() date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") try: states['update_date'] = date db.update(Document(states, doc_id=states.doc_id)) except AttributeError: states['create_date'] = date db.insert(states)
async def on_member_join(self, member): if os.path.isfile(f'atari/data/guilds/{member.guild.id}/automod.json'): # automod channel = member.guild.system_channel guild = member.guild if channel is not None: await channel.send('Welcome {0.mention}'.format(member)) if not os.path.isfile(f'atari/data/guilds/{member.guild.id}/raidmembers.json'): db = TinyDB(f'atari/data/guilds/{member.guild.id}/raidmembers.json') db.insert({'m_id': str(member.id)}) if not os.path.isfile(f'atari/data/guilds/{member.guild.id}/raidprotect.json'): db = TinyDB(f'atari/data/guilds/{member.guild.id}/raidprotect.json') db.insert({'joins': '0'}) else: db = TinyDB(f'atari/data/guilds/{member.guild.id}/raidprotect.json') out = db.all() out = str(out).replace("{","").replace("}","").replace(":",",").replace("'",'"').replace("]","").replace("[","").replace('"',"") out = list(out.split(",")) print(str(out[1])) update = str(int(out[1])+1) db.update({'joins': str(update)}) out = db.all() out = str(out).replace("{","").replace("}","").replace(":",",").replace("'",'"').replace("]","").replace("[","").replace('"',"") out = list(out.split(",")) print("int:"+str(out[1])) if int(out[1]) == 1: print("T-30s") await asyncio.sleep(30) db = TinyDB(f'atari/data/guilds/{member.guild.id}/raidprotect.json') db.update({'joins': '0'}) os.remove(f"atari/data/guilds/{member.guild.id}/raidmembers.json") print("List clear.") if int(out[1]) > 2: db = TinyDB(f'atari/data/guilds/{member.guild.id}/raidmembers.json') out = db.all() out = str(out).replace("{","").replace("}","").replace(":",",").replace("'",'"').replace("]","").replace("[","").replace('"',"") out = list(out.split(",")) i=1 for x in out: try: await guild.kick(discord.Object(id=int(out[i]))) i=i+2 except: print("members kicked.") return
def db_insert(c_posts, check=True): # based on asset_id being the unique record for DNN-based content # must create record with defaults first with fields I won't want affected # c_posts may be many or 1, so convert to list # by upserting from whatever's in DNN feed # if check is True, we have to check if record exists in database # if check is False, we don't have to, use update method db = TinyDB('db.json') Record = Query() if not isinstance(c_posts, list): c_posts = [c_posts] for post in c_posts: if check is True: # does record exist? result = db.search(Record.asset_id == post['asset_id']) # print(f"Result of search for record id is:\n{result}") # If not, insert with defaults if not result: new_post = cfg.config['db_fields_dflt'].copy() new_post['asset_id'] = post['asset_id'] new_post['desc_user'] = post['desc_api'] new_post['label_user'] = post['label_api'] new_post['title_user'] = post['title_api'] new_post['sections_user'] = post['sections_api'] new_post['categories_user'] = post['categories_api'] new_post['topics_user'] = post['topics_api'] new_post['tags_user'] = post['tags_api'] # print("+++++++++++\n") # print(f"Post title is: {post['title_api']}") # if munge has set draft_api to true # set default draft state to 1 (draft by algorithm) # print(f"Post draft_api is: {post['draft_api']}") if post['draft_api'] is True: # print("Setting draft to 1 ...") new_post['draft_user'] = '******' # print("Defaults going in") # print(new_post) db.insert(new_post) # print("+++++++++++++++") # now update with all DNN fields db.update(post, Record.asset_id == post['asset_id']) # print("Upserting the following post:") # print(post) db.close() return
def modifyGroup(self, group, groupName): fileLocation = "files/person_" + self.phone[0] + ".json" db = TinyDB(fileLocation) Person = Query() didFindGroup = False x = 0 while x < len(self.groups): tGroup = self.groups[x] if groupName in tGroup: self.groups[x] = group didFindGroup = True x = len(self.groups) else: x = x + 1 if didFindGroup == False: self.groups.append(group) db.update({'groups': self.groups}, Person.phone == self.phone) db.close()
def remove_item_for_user(user_key, item_key): users_db = TinyDB('users.json', indent=4, separators=(',', ': ')) r = requests.put(endpoints['mediator'].get_prefix() + "remove_auction_item/" + str(item_key), data=json.dumps(request.json), headers=headers) if r.json()['success'] == False: return jsonify(Acknowledgement_base(False).serialize()), 404 record = users_db.get(doc_id=user_key) new_items = record['items'] if (r.json()['item_key'] in new_items): new_items.remove(r.json()['item_key']) users_db.update({'items': new_items}, doc_ids=[user_key]) return jsonify(Acknowledgement_base(True).serialize()), 200 else: jsonify(Acknowledgement_base(False).serialize()), 404
def insert_ratings_db(stock_symbol, data_dict): stock_firstLetter = stock_symbol[0] if stock_firstLetter.isalpha(): stock_firstLetter = stock_firstLetter.lower() else: stock_firstLetter = '' dbFilePath = str(dbPath) + '/db/ratingsDB_' + stock_firstLetter + '.json' print('dbFilePath: ', dbFilePath) ratings_db = TinyDB(dbFilePath) print("record to be inserted: ", data_dict) print("first remove... ") ratings_db.update(delete('stockSymbol'), where('stockSymbol') == stock_symbol) print("... then insert ") ratings_db.insert_multiple(data_dict) return
def download(name, force=False): db = TinyDB(path_db_) temp = Query() data = requests.get(CORPUS_DB_URL) data_json = data.json() if name in list(data_json.keys()): temp_name = data_json[name] print("Download : " + name) if not db.search(temp.name == name): print(name + " " + temp_name["version"]) download_(temp_name["download"], temp_name["file_name"]) db.insert({ "name": name, "version": temp_name["version"], "file": temp_name["file_name"], }) else: if not db.search(temp.name == name and temp.version == temp_name["version"]): print("Alert: New version is ready to be updated.") print("from " + name + " " + db.search(temp.name == name)[0]["version"] + " update to " + name + " " + temp_name["version"]) yes_no = "y" if not force: yes_no = str(input("y or n : ")).lower() if "y" == yes_no: download_(temp_name["download"], temp_name["file_name"]) db.update({"version": temp_name["version"]}, temp.name == name) else: print("Redownload") print("from " + name + " " + db.search(temp.name == name)[0]["version"] + " update to " + name + " " + temp_name["version"]) yes_no = "y" if not force: yes_no = str(input("y or n : ")).lower() if "y" == yes_no: download_(temp_name["download"], temp_name["file_name"]) db.update({"version": temp_name["version"]}, temp.name == name) db.close()
class KVStoreAgent(Agent): def __init__(self, name=None, *, path=None): super().__init__(name=name) self.db = TinyDB(path or STORE_PATH) def _set(self, key, value): Record = Query() records = self.db.search(Record.key == key) eids = [r.eid for r in records] element_data = {'key': key, 'value': value} if len(eids) > 1: self.db.remove(eids=eids[:-1]) if eids: return self.db.update(element_data, eids=[eids[-1]])[0] return self.db.insert(element_data) def _get(self, key): Record = Query() values = self.db.search(Record.key == key) return values[0]['value'] if values else None @on_event('kvstore-set') def event_kvstore_set(self, event): key = event.data.key value = event.data.value eid = self._set(key, value) self.emit('kvstore-updated', data={ 'id': eid, 'key': key, 'value': value }) @on_event('kvstore-get') def event_kvstore_set(self, event): key = event.data.key value = self._get(key) self.emit('kvstore-get-result', data={ 'id': value['eid'], 'key': key, 'value': value['value'] }) @on_message('kvstore {key}={value}', parse=True) def message_kvstore_set(self, message): key = message.data.key value = message.data.value eid = self._set(key, value) return 'Set {}: {!r} = {!r}'.format(eid, key, value) @on_message('kvstore {key}', parse=True) def message_kvstore_get(self, message): key = message.data.key value = self._get(key) return '{!r} = {!r}'.format(key, value)
class CredentialRepo(): ''' CredentialRepo ''' def __init__(self): ''' Credential Repo constructor ''' path = os.path.abspath(__file__) dir_path = os.path.dirname(path) self.db = TinyDB('{}/credentials_db.json'.format(dir_path)) self.query = Query() def addCredential(self, userName, password, title): insertObj = { 'userName': userName, 'password': password, 'title': title } self.db.insert(insertObj) count = self.db.count(self.query.title == title) return True if count == 1 else False def deleteCredential(self, title): self.db.remove(self.query.title == title) count = self.db.count(self.query.title == title) return True if count == 0 else False def updateCredential(self, userName, password, title): self.db.update({ 'userName': userName, 'password': password }, self.query.title == title) credentialAfterUpdate = self.db.search(self.query.title == title) return credentialAfterUpdate[0][ 'userName'] == userName and credentialAfterUpdate[0][ 'password'] == password def getCredential(self, title): return self.db.search(self.query.title == title) def getAllCredentials(self): return self.db.all()
async def hdb(ctx, arg1="", arg2="", arg3=""): litcoinlist = TinyDB("litcoin/LITCOIN" + ctx.message.server.id + ".json") levellist = TinyDB("level/LEVEL" + ctx.message.server.id + ".json") if ctx.message.author != Server.get_member(ctx.message.server, "166953638961479681"): return await hydroBot.say("Debugging is not for plebs. This is serious business.") elif arg1 == "say": return await hydroBot.say(arg2) elif arg1 == "lc_p": litcoinlist.purge_tables() elif arg1 == "lc_s": user = arg2 balance = arg3 litcoinlist.update({"balance": int(balance)}, query["user"] == user) return await hydroBot.say( "Set the balance of " + Server.get_member(ctx.message.server, user).name + "'s account to `" + str( balance) + "`") return
class FileStorage(object): def __init__(self): filepath = Path.home().joinpath(".leavemanager") conf = filepath.joinpath("data.json") datafile = str(conf) self.db = TinyDB(datafile) self.leave = Query() def get(self, date): return self.db.get(leave.rawdate == date) def put(self, date, data): s = self.db.get(self.leave.rawdate == date) if s: return False else: self.db.insert(data) return True def delete(self, date): s = self.db.get(self.leave.rawdate == date) if s: self.db.remove(doc_ids=[s.doc_id]) return True else: return False def update(self, date, update): self.db.update(update, self.leave.rawdate == date) def countdays(self, year): return self.db.count(self.leave.year == year) def all(self): return self.db.all() def all_pending(self, year): return self.db.search(self.leave.year == year and self.leave.approved == False) def search(self, year): return self.db.search(self.leave.year == year)
def setVariance(): Id = int(request.form["Id"]) Variance = request.form["Variance"] db = TinyDB("/mnt/scripts/RasPiBrew2/db.json") Record = Query() existing = db.search(Record.ID == Id) if len(existing) == 1: record = existing[0] record["Variance"] = int(Variance) db.update(record, Record.ID == record["ID"]) existing = db.search(Record.ID == Id) return jsonify(existing[0]) else: return "Not Found for Id %s Found %s records." % (Id, len(existing)), 404
class Student: def __init__(self): self.db = TinyDB('./db.json') def get_data(self, user_id): query = Query() result = self.db.search(query.user_id == user_id) if len(result) > 0: return result[0] else: return False def insert_or_update_data(self, data): query = Query() result = (self.db.search(query.user_id == data["user_id"]))[0] if len(result) > 0: self.db.update(data, query.user_id == data["user_id"]) else: self.db.insert(data)
def UpdateGameroomPlayerInfo(self, low_id): db = TinyDB('Database/Gameroom/gameroom.db') query = Query() player = Query() data = db.search(query.room_id == self.player.room_id) gameroom_data = data[0] gameroom_data["info"][str(low_id)]["Team"] = self.player.team gameroom_data["info"][str(low_id)]["Ready"] = self.player.isReady gameroom_data["info"][str( low_id)]["brawlerID"] = self.player.brawler_id gameroom_data["info"][str(low_id)]["starpower"] = self.player.starpower gameroom_data["info"][str(low_id)]["gadget"] = self.player.gadget gameroom_data["info"][str( low_id)]["profileIcon"] = self.player.profile_icon gameroom_data["info"][str( low_id)]["namecolor"] = self.player.name_color db.update(gameroom_data, query.room_id == self.player.room_id)
def warnUser(bot, update): userId = update.message.reply_to_message.from_user.id db = TinyDB('user_warns.json') User = Query() userInDb = db.get(User.user_id == userId) if None == userInDb: db.insert({'user_id': userId, 'username': update.message.reply_to_message.from_user.username, 'count': 1}); bot.sendMessage(chatId, u'Предупрежден: 1/2', reply_to_message_id = update.message.reply_to_message.message_id) else: db.update({'count': 2}, User.user_id == userId) # bot.restrictChatMember(chatId, update.message.reply_to_message.from_user.id) bot.restrictChatMember(chatId, userId) bot.kickChatMember(chat_id = chatId, user_id = userId) bot.sendMessage(chatId, u'Пользователь заблокирован', reply_to_message_id = update.message.reply_to_message.message_id)
def create_item_for_user(user_key): users_db = TinyDB('users.json', indent=4, separators=(',', ': ')) r = requests.put(endpoints['mediator'].get_prefix() + "create_auction_item", data=json.dumps(request.json), headers=headers) record = users_db.get(doc_id=user_key) new_items = record['items'] new_items.append(r.json()['item_key']) users_db.update({'items': new_items}, doc_ids=[user_key]) em = { 'to': record['email'], 'subject': 'New item has been created', 'body': 'hope you get rich !' } r = requests.post(endpoints['mediator'].get_prefix() + "send_email", data=json.dumps(em), headers=headers) return jsonify(Acknowledgement_base(True).serialize()), 200
def save(self, filename=None): ''' save the list into the database. doing like a blob save. eg, saving the list contents under a single key, rather than saving the lines of the list ''' fn = self.resolve_filename(filename) if not fn: warnings.warn( 'The list not saved as a filename has never been set') return # no filename has been set db = TinyDB(fn) el = db.get(self.query.key == (fn)) if el: db.update({'data': self[:]}, eids=[el.eid]) else: e = db.insert({'key': self.filename, 'data': self[:]}) db.close()
class Test_006_Modify_Not_existing_data_by_valid_query_Function(unittest.TestCase): def setUp(self): self.db = TinyDB('db.json') def tearDown(self): self.db.purge() self.db.all() def test_simple_modify_not_exist(self): print("case 6 modify Non-existing data by valid query") result=self.db.update({'int': 10}, where('Name') == 'Wendy') self.assertEqual(result,None)
def batch_update(info): db = TinyDB('db.json') rows = {(i['contestId'], i['index']): i for i in db.all()} insert = {} update = {} print "Calculating insert row & update row" for k, v in info.items(): if rows.get(k, None) is None: insert[k] = v elif rows[k]['isSolved'] != v['isSolved']: update[k] = {'isSolved': v['isSolved'], 'name': v['name']} print "Calculation finish, %d rows going to be inserted, %d rows going to be updated" % \ (len(insert.keys()), len(update.keys())) raw_input("Press any key to continue.") problem = Query() for v in insert.values(): print "insert row: %d, %s, %s" % (v['contestId'], v['index'], v['name']) db.insert(v) for k, v in update.items(): print "update row: %d, %s, %s, solved: %r" % (k[0], k[1], v['name'], v['isSolved']) db.update(v, (problem.contestId == k[0]) & (problem.index == k[1])) print 'Done!'
def finish(dirname): fnames = [] for f in ls(dirname): if f.split('.')[-1] == 'cpp': fnames.append(f) if not path.exists('./Archive/%s' % dirname): pathname = './Archive/%s' % dirname makedirs(pathname) print '>>>> create directory:[%s]' % pathname db = TinyDB("data/db.json") for f in fnames: oldpath = '%s/%s' % (dirname, f) newpath = './Archive/%s/%s' % (dirname, f) move(oldpath, newpath) print '>>>> move [%s] to [%s]' % (oldpath, newpath) q = Query() pname = path.splitext(f)[0] if db.search(q.name == pname): db.update({'solved': True}, (q.name == pname)) print '>>>> db update, problem: `%s` solved' % pname rmtree(dirname)
def update(): db = TinyDB("data/db.json") cur = './Archive' problems = [] for f in ls(cur): if f in ignore: continue if f.startswith('.'): continue if not path.isdir(cur + '/' + f): continue # print 'find round: %s' % f for problem_name in ls(cur + '/' + f): problems.append(problem_name.split('.')[0]) counter = 0 for p in problems: q = Query() dp = db.search(q.name == p) if (len(dp) > 0 and dp[0]['solved'] is False): print 'db update, problem :`%s` solved' % p db.update({'solved': True}, (q.name == p)) counter += 1 print '>>>> %d rows have been updated.' % counter
class Model(object): table = 'default' _exclude_fields = [ 'db', 'table', 'submit', '_exclude_fields', 'exclude_fields', '_deleted_args' ] _deleted_args = list() def __init__(self, **kwargs): table = os.path.join(current_app.config.get('DB_PATH', 'gallery_db'), '%s.json' % self.table) self.db = TinyDB(table, storage = S3Storage) self.eid = Field(type = int, required = False, primary = False) exclude_fields = getattr(self, 'exclude_fields', None) if exclude_fields: self._exclude_fields += exclude_fields for key, value in kwargs.items(): if key == '_deleted_args': self._deleted_args = value if key not in self._exclude_fields: self.setattr(key, value) def all(self): rows = list() for row in self.db.all(): rows.append( self.as_obj(row) ) return rows def filter(self, **kwargs): rows = list() eids = list() for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): founds = self.db.search(where(field) == value.value) for found in founds if founds else []: if found.eid not in eids: eids.append(found.eid) rows.append( self.as_obj(found) ) return rows def get(self, eid): row = self.db.get(eid = eid) if row: return self.as_obj(row) return False def search(self, **kwargs): for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): row = self.db.search(where(field) == value.value) if row: if type(row) == list: row = row[0] return self.as_obj(row) return False def create(self): insert = self.as_dict() return self.db.insert(insert) def update(self): update = self.as_dict() for arg in self._deleted_args: try: self.db.update(delete(arg), eids = [ self.eid.value ]) except: pass return self.db.update(update, eids = [ self.eid.value ]) def save(self): if self.eid.value: self.eid.validate() return self.update() else: create = self.create() self.eid.value = create return self def delete(self): self.db.remove( eids = [ self.eid.value ] ) def as_dict(self): args = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr: if attr.validate(): args[key] = attr.value return args def clean(self): for key in self.__dict__.keys(): if key not in self._exclude_fields: delattr(self, key) def as_obj(self, row): self.clean() if not getattr(self, 'eid', None): self.eid = Field(value = row.eid, type = int, required = False, primary = False) for key, value in row.items(): self.setattr(key, value) return copy.copy( self ) def setattr(self, key, value): attr = getattr(self, key, Field()) if type(attr) != Field: attr = Field() attr.value = value if key not in self._exclude_fields: setattr(self, key, attr) return attr if key == '_deleted_args': self._deleted_args.append(value) return False def from_form(self, form): for key, value in form.items(): self.setattr(key, value) return self def as_form(self): fields = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr and type(attr) == Field: fields[key] = attr return fields def __repr__(self): if self.eid: return '<%s: %s>' % (self.__class__.__name__, self.eid.value) else: return '<%s>' % (self.__class__.__name__)
class Database(object): def __init__(self, db_path): self.db = TinyDB(db_path, storage=UnicodeJSONStorage) # Words that were already translated. self.already_translated = self.db.search( where('translation') != None) # Words that were attempted to translate but failed. self.non_translated = self.db.search( where('translation') == None) def _word_already_translated(self, word): for cached_word in self.already_translated: if word == cached_word['original']: return True for cached_word in self.non_translated: if word == cached_word['original']: return True def insert_translation(self, original, translation_func): if not self._word_already_translated(original): log.info('Inserting translation for %s into db', original) translation = translation_func(original) self.db.insert({ 'original': original, 'translation': translation.to_dict() if translation else None}) def insert_or_update_counter(self, count, original, date_str): counter = {'count': count, 'original': original, 'date': date_str} if self.db.search( (where('date') == date_str) & (where('original') == original)): log.info('Updating counter for %s into db', original) self.db.update(counter, where('date') == date_str) else: log.info('Inserting counter for %s into db', original) self.db.insert(counter) def get_top_words_of_the_day(self, date_str, type=None, n=10): query = Query() counters = self.db.search(query.date == date_str) for counter in counters: query = Query() result = self.db.search( (query.original == counter['original']) & query.translation.exists()) if result: # There will be only one translation in database counter['translation'] = result[0]['translation'] else: # This is unlikely to happen counter['translation'] = None sorted_counters = \ sorted(counters, key=lambda x: x['count'], reverse=True) sorted_counters = \ filter(lambda x: x['translation'], sorted_counters) if type: sorted_counters = \ filter( lambda x: x['translation']['type'] == type, sorted_counters) return islice(sorted_counters, n)
class IconObjectDB(object): """The Class for inserting objects in the icon_object.json db using tinydb""" def __init__(self): super(IconObjectDB, self).__init__() try: self.db = TinyDB( os.path.abspath( os.path.join( os.path.dirname(__file__), "..", "..", "..", "EI", "icons", "icon_objects.json", ) ) ) except Exception as err: raise ValueError("Database is locked") def insert_object(self, obj, override=False): if not self.db.contains(where("name") == obj["name"]): print("Inserting: " + str(obj['name'])) self.db.insert(obj) else: if self.warn_user(override): print("Overwriting: " + str(obj['name'])) self.db.update(obj, where("name") == obj["name"]) else: return False def get_values(self, value): obj_list = list() for obj in self.get_all(): if value in obj: obj_list.append(obj[value]) return obj_list def get_obj_by_mtl(self, mtl): if self.db.contains(where("mtl") == str(mtl)): return self.db.search(where("mtl") == str(mtl))[0] def get_obj_by_brush(self, brush): if self.db.contains(where("brush") == str(brush)): return self.db.search(where("brush") == str(brush))[0] def get_obj_by_brush_and_mtl(self, brush, mtl): if self.db.contains( (where("brush") == str(brush)) & (where("mtl") == str(mtl)) ): return self.db.search( (where("brush") == str(brush)) & (where("mtl") == str(mtl)) )[0] def get_objs_by_brush(self, brush): if self.db.contains(where("brush") == str(brush)): return self.db.search(where("brush") == str(brush)) def get_obj_by_name(self, name): if self.db.contains(where("name") == name): return self.db.search(where("name") == name)[0] def get_all(self): return self.db.search(lambda x: True) @staticmethod def warn_user(override): return override
CoolOff() HeatOff() db = TinyDB("/mnt/scripts/RasPiBrew2/db.json") State = OFF print("Initializing Records") InitializeRecord(db) InitializeAmbient(db) print("Starting cycle") while True: cycle(db) temp = read_temp(abient_sensor) Record = Query() dbRecords = db.search(Record.ID == AMBIENT_ID) dbRecord = dbRecords[0] dbRecord["LastUpdated"] = datetime.datetime.now().isoformat() dbRecord["Temp_F"] = temp[1] dbRecord["Temp_C"] = temp[0] g.send("ambient.temp.fahrenheit", temp[1]) g.send("ambient.temp.celsius", temp[0]) db.update(dbRecord, Record.ID == AMBIENT_ID) time.sleep(2)
class ThoughtCache(object): """ Quickly and simply cache python objects (inc. functions and classes) into a NoSQL database for later tag-based retrieval """ def __init__(self, path='tc_db.json', encoder=jsonpickle.encode, decoder=jsonpickle.decode): """ Initialize. If specifying encoder and decoder, it must accept a signature of f(o). If not, use `partial`, lambda, etc. to wrap it. :param path: path to TinyDB json database :param encoder: function to encode objects :param decoder: function to decode objects :return: None """ self.db = TinyDB(path) self.encode, self.decode = encoder, decoder self._tags = collections.Counter() def store(self, o, metadata): """ Store object `o` in the database as `metadata['name']`. `metadata['name']` must be provided, but need not be unique. If no UUID is provided in the metadata, a random one is created and used. UUIDs are the authoritative source of identity, so they are the only basis on which a record will be overwritten without regard to any other fields. If an existing UUID is specified, *ALL* fields will be overwritten, not only the ones specified. UUID may be a valid UUID string or None. If you don't intend to overwrite an existing record, simply remove the UUID key from the dict entirely, or leave as None. :param o: The actual object to be stored. :param metadata: dict of metadata with the following format: { 'time':time.time(), 'location':'work', 'tag':['general networking'], 'name':'ExtendedPingResult' # 'name' is mandatory because it cannot be reliably inferred at runtime. 'uuid':'85280d8e-66bf-4e65-814f-507a65c0375c' } :return: """ store = self._store if 'uuid' not in metadata: metadata['uuid'] = str(uuid.uuid4()) elif self._test_uuid(metadata['uuid']): store = self._update metadata['json'] = self.encode(o) store(metadata) def retrieve_json(self, metadata_filter): """ Retrieve list of JSON objects that match the given filter. If uuid is specified, all other criteria are ignored. All non-uuid fields that are specified are, unless otherwise noted, treated as a series of 'OR' clauses that are 'AND'ed together. `if (location in locations) and (tag in tags): ` etc... :param metadata_filter: dict of values to filter against. Currently supported: { 'time': day, month, year or range #NOT IMPLEMENTED 'location': 'work', 'home', etc... 'name': where('name').contains Value. Case Sensitive 'tag': single entry or list of entries 'uuid': single entry or list of entries } :return: list of JSON strings representing the objects """ query = metadata_filter_to_query(metadata_filter) return [x['json'] for x in sequenceify(self.db.search(query))] def retrieve(self, metadata_filter): """ Returns reconstructed objects that match filter. :param metadata_filter: See ThoughtCache.retrieve_json.__doc__ :return: """ json_list = self.retrieve_json(metadata_filter) return list(map(self.decode, json_list)) def tag_search(self, *tags): """ Given a string or list of strings, return entries that with one or more matching tags (if A in B) :param tags: string or list of strings :return: list of json strings """ metadata_filter = {'tag':sequenceify(tags)} return self.retrieve(metadata_filter) def update_tags(self): """ updates internal tag Counter. :return: None """ self._tags.clear() elements = self.db.search(where('tag')) for element in elements: self._tags.update(element['tag']) @property def tags(self): """ Returns list of all tags currently in use in the database :return: list of tags (str). """ self.update_tags() return list(self._tags.keys()) def matching_tags(self, substr): """ Returns list of all tags currently in use in the database that contain substr. :param substr: substring to search for. Case insensitive. :return: list of tags (str). """ return [x for x in self.tags if (x.lower().find(substr) > -1)] def _test_uuid(self, o_uuid): """ Tests for the existence of a provided object UUID string in the database. :param o_uuid: object UUID string :return: True if UUID exists, else False """ i = self.db.count(where('uuid') == o_uuid) # number of matching queries assert i < 2, "uuid {o_uuid} is not unique in database!" return bool(i) def _store(self, metadata): """ Store new entry into db using provided metadata :param json: JSON Object :param metadata: Dict of metadata. Must include 'uuid', 'name', and 'json'. :return: """ self.db.insert(metadata) def _update(self, metadata): """ update existing db record. Preserves TinyDB behavior: overwrites existing fields, adds new fields, does not delete fields. :param metadata: Dict of metadata, must include 'name', and 'uuid', and 'json' :return: """ element = self.db.get(where('uuid') == metadata['uuid']) self.db.update(metadata, eids=[element.eid])
return shorten_chars return bytes_to_string(shorten_bytes) # scan share directory. flist = filter(lambda file: os.path.isfile(os.path.join(config['share_dir'], file)), os.listdir(config['share_dir'])) files = {byte_to_hex(md5((os.path.join(config['share_dir'], fname)))): fname for fname in flist if fname != os.path.basename(config['database'])} # remove records of which the file does not exist in share directory for file in db.all(): if file['md5'] in files: db.update({'name': files[file['md5']]}, File.md5==file['md5']) else: db.remove(File.md5==file['md5']) # move files in share directory to upload directory if not recorded for file in files: if not db.contains(File.md5==file): os.rename(os.path.join(config['share_dir'], files[file]), os.path.join(config['upload_dir'], files[file])) # record files in upload directory and move to share direcory def upload(fname=None): if fname: if os.path.isfile(os.path.join(config['upload_dir'], fname)): file = {} fmd5 = md5(os.path.join(config['upload_dir'], fname))
class DocumentManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def doc_exists(self, file_name, title): entries = self._db.search((where('file_name') == file_name) & (where('name') == title)) if entries: return True else: return False def is_doc_new(self, file_name): file_name_exists = self._db.search(where('file_name') == file_name) if not file_name_exists: return True return False def is_doc_modified(self, file_name, path): entry = self._db.get(where('file_name') == file_name) full_path = os.path.join(path, file_name) last_modified = os.stat(full_path).st_mtime if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified: return True return False def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name): entry = {'name': title, 'added': create_date, 'id': doc_id, 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name, 'downloaded': []} self._db.insert(entry) def update_document(self, field, new_val, doc_id): if type(new_val) is list: self._db.update(_update_entry_list(field, new_val), where('id') == doc_id) else: if type(new_val) is set: new_val = list(new_val) self._db.update({field: new_val}, where('id') == doc_id) def get_doc_by_prop(self, prop, expected_value): """ get documents by the specified property """ entry = self._db.get(where(prop) == expected_value) return entry def get_all_entries(self): return self._db.all() def get_doc_ids(self): """ returns all the ids of documents that user has added """ doc_ids = [] for entry in self._db.all(): doc_ids.append(entry['id']) return doc_ids def remove_element(self, doc_id): self._db.remove(where('id') == doc_id) def clear_all(self): self._db.purge()
class Experiment: """ A class to encapsulate information about a experiment and store together different experiment runs. The recommended way to identify an experiment is overwriting Experiment.get_id in a child class in a way that is dependent from the source file. Example: class MyExperiment(Experiment): def get_id(self): return inspect.getsourcefile(self) # return __file__ Alternatively, one can pass an id as a constructor parameter. If no id is passed, the default behaviour is to generate a random id (meaning a new experiment is created for each run). Experiments are instantiated with the classmethod Experiment.new, which additionally stores the experiment in the database with useful metadata or Experiment.use, which additionally makes sure that the experiment is stored only the first time. Actual experiments are run on models. To add a model to the current experiment, instantiate the inner class Model with Experiment.model. model = Experiment.use(path).model("modelId", {"type": "SVM"}) Model instantiates and encapsulates model information, providing database persistency. A model_id is required to identify the model. It also provides convenience methods to store experiment results for both single-result and epoch-based training experiments. See Model.session Parameters: ----------- path : str Path to the database file backend. A path in a remote machine can be specified with syntax: username@host:/path/to/remote/file. """ def __init__(self, path, exp_id=None, verbose=False): assert path, "Path cannot be the empty string" self.level = logging.WARN if verbose else logging.NOTSET try: from sftp_storage import SFTPStorage, WrongPathException try: self.db = TinyDB(path, policy='autoadd', storage=SFTPStorage) log("Using remote db file [%s]" % path, level=self.level) except WrongPathException: self.db = TinyDB(path) except: self.db = TinyDB(path) log("Using local file [%s]" % path, level=self.level) self.git = GitInfo(self.getsourcefile()) self.id = exp_id if exp_id else self.get_id() def get_id(self): return uuid4().hex def getsourcefile(self): return utils.getsourcefile(lambda: None) def exists(self): return self.db.get(where("id") == self.id) def add_tag(self, tag): self.db.update(extend("tags", tag), where("id") == self.id) def remove_tag(self, tag): return self.db.update(remove("tags", tag), where("id") == self.id) def get_models(self): experiment = self.db.get(where("id") == self.id) return experiment.get("models") if experiment else {} @classmethod def new(cls, path, exp_id=None, tags=(), **params): """ Stores a new Experiment in the database. Throws an exception if experiment already exists. """ exp = cls(path, exp_id=exp_id) if exp.exists(): raise ValueError("Experiment %s already exists" % str(exp.id)) now, exp_id = str(datetime.now()), exp_id or exp.id base = {"id": exp_id, "tags": tags, "models": [], "created": now} exp.db.insert(utils.merge(base, params)) return exp @classmethod def use(cls, path, exp_id=None, tags=(), **params): """ Stores a new Experiment if none can be found with given parameters, otherwise instantiate the existing one with data from database. """ exp = cls(path, exp_id=exp_id) if exp.exists(): return exp else: log("Creating new Experiment %s" % str(exp.id)) return cls.new(path, exp_id=exp_id, tags=tags, **params) def model_exists(self, model_id): """ Returns: -------- dict or None """ return self.db.get( (where("id") == self.id) & where("models").any(where("modelId") == model_id)) def model(self, model_id, model_config={}): return self.Model(self, model_id, {"config": model_config}) class Model: def __init__(self, experiment, model_id, model_config): self._session_params = None self.e = experiment self.model_id = model_id self.which_model = model_pred(self.model_id) self.cond = ((where("id") == experiment.id) & where("models").any(where("modelId") == model_id)) if not self.exists(): self._add_default_model(**model_config) def _add_default_model(self, **kwargs): model = utils.merge({"modelId": self.model_id}, kwargs) self.e.db.update(append("models", model), where("id") == self.e.id) def _result_meta(self): return {"commit": self.e.git.get_commit() or "not-git-tracked", "branch": self.e.git.get_branch() or "not-git-tracked", "user": getuser(), "platform": platform(), "timestamp": str(datetime.now())} def _check_params(self, params): models = self.e.get_models() if not models: return model = next(m for m in models if m["modelId"] == self.model_id) for result in model.get("sessions", []): if result["params"] == params: raise ExistingModelParamsException() def _add_result(self, result, params): """ Add session result (new) """ meta = self._result_meta() result = {"params": params, "meta": meta, "result": result} path = ["models", self.which_model, "sessions"] self.e.db.update(append_in(path, result), self.cond) def _add_session_result(self, result, index_by=None): """ Adds (partial) result to session currently running. Session is identifed based on session `params`. In case a model is run with the same params in a second session, results are added to the chronologically last session (which means that we relay on the fact that `update_in` checks lists in reverse, see `update_in`) Parameters: ----------- result : (serializable-)dict index_by : serializable, optional Key to store result by. `result` is appended to session.result.index_by if given, or to session.result otherwise. """ which_session = params_pred(self._session_params) path = ["models", self.which_model, "sessions", which_session, "result"] + ([index_by] or []) self.e.db.update(append_in(path, result), self.cond) def _start_session(self, params): self._session_params = params path = ["models", self.which_model, "sessions"] result = {"params": params, "meta": self._result_meta()} self.e.db.update(append_in(path, result), self.cond) def _end_session(self): self._session_params = None def exists(self): return self.e.model_exists(self.model_id) @contextlib.contextmanager def session(self, params, ensure_unique=True): # TODO: store on exit """ Context manager for cases in which we want to add several results to the same experiment run. Current session is identified based on `params` (see _add_session_result). Example: model_db = Experiment.use("test.json").model("id") with model_db.session({"param-1": 10, "param-2": 100}) as session: from time import time start_time = time() svm.fit(X_train, y_train) end_time = time() session.add_meta({"duration": end_time - start_time}) y_pred = svm.predict(X_test) session.add_result({"accuracy": accuracy(y_pred, y_true)}) Parameters: ----------- params: dict, parameters passed in to the model instance ensure_unique: bool, throw an exception in case model has already been run with the same parameters """ assert isinstance(params, dict), \ "Params expected dict but got %s" % str(type(params)) if ensure_unique: self._check_params(params) self._start_session(params) yield self self._end_session() def add_meta(self, d): """ Adds session meta info Parameters: ----------- d: dict, Specifies multiple key-val additional info for the session """ if not self._session_params: raise ValueError("add_meta requires session context manager") if not isinstance(d, dict): raise ValueError("add_meta input must be dict") if not self.exists(): self._add_default_model() which_session = params_pred(self._session_params) path = ["models", self.which_model, "sessions", which_session, "meta"] self.e.db.update(assign_in(path, d), self.cond) def add_result(self, result, params=None, index_by=None): """ appends result to models.$.sessions.$.result """ if not params and not self._session_params: raise ValueError("Experiment params missing") if not self._session_params: self._add_result(result, params) else: self._add_session_result(result, index_by=index_by) def add_epoch(self, epoch_num, result, timestamp=True): if not self._session_params: raise ValueError("add_epoch requires session context manager") result.update({"epoch_num": epoch_num}) if timestamp: result.update({"timestamp": str(datetime.now())}) self._add_session_result(result, index_by="epochs")
def goto(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'seen': True}, eids=[eid]) return redirect(result['url'], code=302)
def star(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'starred': not result['starred']}, eids=[eid]) db.close() return 'OK'
def hide(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'hidden': not result['hidden']}, eids=[eid]) db.close() return 'OK'
class xlibris(object): def __init__(self, directory = 'xlibris/'): self.directory = directory if not os.path.exists(self.directory): os.mkdir(self.directory) def new(self,dbname): '''Creates a new database assigns it to a current database instance to be used by all the other functions Usage:- new('database name') or new database_name ''' self.db = TinyDB(self.directory + dbname + '.json') print 'New database {} created at {}'.format(dbname, self.directory + dbname + '.json' ) def connect(self,name): '''Connect to an existing database for updating/Query Usage:- connect('name') or connect name where 'name' is the name of the existing database''' self.db = TinyDB(self.directory + name + '.json') print 'Connected to {}'.format(name) def display(self): try: print tabulate(_concat(self.db.all()), headers='keys', tablefmt="simple") print "\n" print self.count() except AttributeError: print '''No database Connected, to see a list of available databases use list_db or to make a new database use new''' def add(self, ISBN): '''Add books to the current working database Usage:- add(ISBN) or add ISBN''' if _doesexist(self.db, ISBN) == False: try: bookData = meta(ISBN) bookData = _cleanify(bookData) bookData['Date Added'] = _today() self.db.insert(bookData) print 'ISBN {} inserted'.format(ISBN) except: print 'ISBN {} not found. Please add details manually- '.format(ISBN) self.madd() else: print 'Book Already Exists' def madd(self): bookData = {} bookData['Authors'] = raw_input('Authors Name: ') bookData['ISBN'] = raw_input('ISBN: ') bookData['Language'] = raw_input('Language: ') bookData['Publisher'] = raw_input('Publisher: ') bookData['Title'] = raw_input('Title: ') bookData['Year'] = raw_input('Year: ') bookData['Date Added'] = _today() self.db.insert(bookData) def search(self, keyword): NewSearch = Query() title = self.db.search(NewSearch.Title == keyword) auth = self.db.search(NewSearch.Authors == keyword) pub = self.db.search(NewSearch.Publisher == keyword) isbn = self.db.search(NewSearch.ISBN == keyword) ttable = [title, auth, pub, isbn] for i in ttable: if i: print 'Matches Found for {} \n'.format(keyword) print tabulate(_concat(i), headers='keys', tablefmt="fancy_grid") def _blkadd(self, ISBNlist): with tqdm(ISBNlist) as pbar: for i in pbar: pbar.set_description("Adding %s "%i) self.add(i) pbar.update(1/len(ISBNlist)*100) def add_from_file(self, filename): with open(filename, 'rb') as f: raw = reader(f) final = list(raw) for i in range(len(final)): final[i] = str(final[i][0]) self._blkadd(final) print 'Done' def change_title(self, isbn): tmp = Query() def change(field): def transform(element): element[field] = raw_input('Enter Title ') return transform title = self.db.search(tmp.ISBN == isbn)[0] print 'Change title of :- {}'.format(title['Title']) self.db.update(change('Title'), tmp.ISBN == isbn ) print 'Entry Updated' def change_author(self, isbn): tmp = Query() def change(field): def transform(element): element[field] = raw_input('Enter Author ') return transform title = self.db.search(tmp.ISBN == isbn)[0] print 'Change author of :- {}'.format(title['Title']) self.db.update(change('Authors'), tmp.ISBN == isbn ) print 'Entry Updated' def change_publisher(self, isbn): tmp = Query() def change(field): def transform(element): element[field] = raw_input('Enter Publisher ') return transform title = self.db.search(tmp.ISBN == isbn)[0] print 'Change Publisher of :- {}'.format(title['Title']) self.db.update(change('Publisher'), tmp.ISBN == isbn ) print 'Entry Updated' def write_to_file(self, filename): try: data = tabulate(_concat(self.db.all()), headers='keys', tablefmt="simple") except AttributeError: print 'No database Connected' f = open('xlibris/' + filename + '.txt', 'w') f.write(data.encode('utf8')) f.write('\n'.encode('utf8')) f.write('--------------------'.encode('utf8')) f.write('\n'.encode('utf8')) f.write(self.count().encode('utf8')) f.close() print 'Written to {}'.format('xlibris/' + filename + '.txt') def purge_current(self): self.db.purge() def remove(self, isbn): tmp = Query() resp = raw_input('Delete \n {} \n ? (y/n)'.format(tabulate(self.db.search(tmp.ISBN == isbn), headers='keys'))) resp = resp.lower() if resp == 'y': for i in ['Publisher', 'Title', 'Authors', 'Year', 'Date Added', 'Language', 'ISBN']: self.db.update(delete(i), tmp.ISBN == isbn) print 'Deleted' elif resp == 'n': print 'Spared' def lookup(self, keyword): data = _concat(self.db.all()) title = data.pop('Title') auth = data.pop('Authors') choices = title + auth searchKey = process.extractBests(keyword, choices) for i in searchKey: if i[1] >= 90: self.search(i[0]) def count(self): listisbn = _concat(self.db.all()) listisbn = listisbn.pop('ISBN') return "Total {} books.".format(len(listisbn))
def update_test(db_file='db.json'): db = TinyDB(db_file) customer = Query() db.update({'items': 990}, customer.contact == 9734286136)
def output(x): print("") print(x) db = TinyDB("summaries.json") # cleanup: db.purge() # insert db.insert({"type": "apple", "count": 7}) db.insert({"type": "peach", "count": 3}) # query output(db.all()) Fruit = Query() output(db.search(Fruit.type == "peach")) output(db.search(Fruit.count >= 3)) db.update({"count": 10}, Fruit.type == "apple") output(db.search(Fruit.type == "apple")) db.remove(Fruit.count < 5) print(db.all())
def log_update(field, filename, source): filename = os.path.abspath(filename) print("Adding %s to %s using $s" % (field, filename, source)) db = TinyDB(DBFILE) db.update(append(field, filename), eids=[RUN_ID]) db.close()
class ProgramEngine: # pylint: disable=exec-used _instance = None def __init__(self): self._program = None self._log = "" self._programs = TinyDB("data/programs.json") query = Query() for dirname, dirnames, filenames, in os.walk(PROGRAM_PATH): dirnames for filename in filenames: if PROGRAM_PREFIX in filename: program_name = filename[len(PROGRAM_PREFIX):-len(PROGRAM_SUFFIX)] if self._programs.search(query.name == program_name) == []: logging.info("adding program %s in path %s as default %r", program_name, dirname, ("default" in dirname)) self._programs.insert({"name": program_name, "filename": os.path.join(dirname, filename), "default": str("default" in dirname)}) @classmethod def get_instance(cls): if not cls._instance: cls._instance = ProgramEngine() return cls._instance def prog_list(self): return self._programs.all() def save(self, program): query = Query() self._program = program program_db_entry = program.as_dict() program_db_entry["filename"] = os.path.join(PROGRAM_PATH, PROGRAM_PREFIX + program.name + PROGRAM_SUFFIX) if self._programs.search(query.name == program.name) != []: self._programs.update(program_db_entry, query.name == program.name) else: self._programs.insert(program_db_entry) f = open(program_db_entry["filename"], 'w+') json.dump(program.as_dict(), f) f.close() def load(self, name): query = Query() program_db_entries = self._programs.search(query.name == name) if program_db_entries != []: logging.info(program_db_entries[0]) f = open(program_db_entries[0]["filename"], 'r') self._program = Program.from_dict(json.load(f)) return self._program def delete(self, name): query = Query() program_db_entries = self._programs.search(query.name == name) if program_db_entries != []: os.remove(program_db_entries[0]["filename"]) self._programs.remove(query.name == name) def create(self, name, code): self._program = Program(name, code) return self._program def is_running(self, name): return self._program.is_running() and self._program.name == name def check_end(self): return self._program.check_end() def log(self, text): self._log += text + "\n" def get_log(self): return self._log
class StasherTinyDB(StasherClass): def __init__(self): self.db = TinyDB("stasher/havocbot.json", default_table="users", sort_keys=True, indent=2) def add_user(self, user): # Iterate through the user's usernames and see if any usernames already exist if self._user_exists(user): logger.error("This user already exists in the db") raise UserDataAlreadyExistsException logger.info("Adding new user '%s' to database" % (user.name)) logger.debug("add_user - adding '%s'" % (user.to_dict_for_db())) self.db.insert(user.to_dict_for_db()) def del_user(self, user): pass def add_permission_to_user_id(self, user_id, permission): try: self._add_string_to_list_by_key_for_user_id(user_id, "permissions", permission) except: raise def del_permission_to_user_id(self, user_id, permission): try: self._del_string_to_list_by_key_for_user_id(user_id, "permissions", permission) except: raise def add_alias_to_user_id(self, user_id, alias): try: self._add_string_to_list_by_key_for_user_id(user_id, "aliases", alias) except: raise def del_alias_to_user_id(self, user_id, alias): try: self._del_string_to_list_by_key_for_user_id(user_id, "aliases", alias) except: raise def add_points_to_user_id(self, user_id, points): logger.info("Adding %d points to user id %s" % (points, user_id)) def increment_by_value(field, value): def transform(element): element[field] += int(value) return transform self.db.update(increment_by_value("points", points), eids=[user_id]) def del_points_to_user_id(self, user_id, points): logger.info("Deleting %d points from user id %s" % (points, user_id)) def decrement_by_value(field, value): def transform(element): element[field] -= int(value) return transform self.db.update(decrement_by_value("points", points), eids=[user_id]) def find_user_by_id(self, search_user_id): logger.info("Searching for '%s'" % (search_user_id)) user = None result = self.db.get(eid=search_user_id) if result is not None: user = self.build_user(result) logger.debug("Returning with '%s'" % (user)) return user def find_user_by_username_for_client(self, search_username, client_name): logger.info("Searching for '%s' in client '%s'" % (search_username, client_name)) user = None user_query = Query() result_list = self.db.search(user_query.usernames[client_name].any([search_username])) if result_list is not None and result_list: if len(result_list) == 1: user = self.build_user(result_list[0]) logger.debug("Returning with '%s'" % (user)) return user def find_users_by_username(self, search_username): # user_list = None # # UserQuery = Query() # results = self.db.search(UserQuery.usernames[client_name].any([search_username])) # # if results is not None and results: # user_list = [] # for user in results: # user_list.append(self.build_user(user)) # user_list = results # # return user_list pass def find_users_by_name_for_client(self, search_name, client_name): logger.info("Searching for '%s' in client '%s'" % (search_name, client_name)) results = [] def name_test_func(val, nested_search_name): return val.lower() == nested_search_name.lower() user_query = Query() matched_users = self.db.search( (user_query["name"].test(name_test_func, search_name)) & (user_query["usernames"].any([client_name])) ) if matched_users: for matched_user in matched_users: a_user = self.build_user(matched_user) if a_user.is_valid(): results.append(a_user) logger.debug("Returning with '[%s]'" % (", ".join(map(str, results)))) return results def find_users_by_alias_for_client(self, search_alias, client_name): logger.info("Searching for '%s' in client '%s'" % (search_alias, client_name)) results = [] def alias_test_func(val, nested_search_alias): return any(x.lower() for x in val if x.lower() == search_alias.lower()) user_query = Query() matched_users = self.db.search( (user_query["aliases"].test(alias_test_func, search_alias)) & (user_query["usernames"].any([client_name])) ) if matched_users: for matched_user in matched_users: a_user = self.build_user(matched_user) if a_user.is_valid(): results.append(a_user) logger.debug("Returning with '[%s]'" % (", ".join(map(str, results)))) return results def find_users_by_matching_string_for_client(self, search_string, client_name): logger.info("Searching for '%s' in client '%s'" % (search_string, client_name)) results = [] results_name = self.find_users_by_name_for_client(search_string, client_name) if results_name is not None and results_name: results.extend(results_name) result_username = self.find_user_by_username_for_client(search_string, client_name) if result_username is not None and result_username: results.append(result_username) results_alias = self.find_users_by_alias_for_client(search_string, client_name) if results_alias is not None and results_alias: results.extend(results_alias) return results def find_all_users(self): pass def build_user(self, result_data): user = User(result_data.eid) user.name = result_data["name"] if "name" in result_data else None user.usernames = result_data["usernames"] if "usernames" in result_data else {} user.points = result_data["points"] if "points" in result_data else None user.permissions = result_data["permissions"] if "permissions" in result_data else [] user.aliases = result_data["aliases"] if "aliases" in result_data else [] user.is_stashed = True return user def _add_string_to_list_by_key_for_user_id(self, user_id, list_key, string_item): logger.info("Adding '%s' item '%s' to user id %d" % (list_key, string_item, user_id)) list_items = [] try: list_items = self.db.get(eid=user_id)[list_key] except KeyError: logger.info("No items found for list '%s' for user id '%d'" % (list_key, user_id)) list_items = [string_item] else: if string_item in list_items: raise UserDataAlreadyExistsException else: list_items.append(string_item) finally: logger.debug("Updating '%s' to '%s' for user id '%s'" % (list_key, list_items, user_id)) self.db.update({list_key: list_items}, eids=[user_id]) def _del_string_to_list_by_key_for_user_id(self, user_id, list_key, string_item): logger.info("Deleting '%s' item '%s' from user id %d" % (list_key, string_item, user_id)) list_items = [] try: list_items = self.db.get(eid=user_id)[list_key] except KeyError: raise else: if string_item not in list_items: raise UserDataNotFoundException else: list_items.remove(string_item) logger.debug("Updating '%s' to '%s' for user id '%s'" % (list_key, list_items, user_id)) self.db.update({list_key: list_items}, eids=[user_id]) def _user_exists(self, user): # Iterate through the user's usernames and see if any usernames already exist if user.usernames is not None and user.usernames: for (key, value) in user.usernames.items(): logger.info("Iterating over key '%s' with value '%s'" % (key, value)) for username in value: logger.info("Iterating over username '%s'" % (username)) result = self.find_user_by_username_for_client(username, key) logger.info("result for username '%s' is '%s'" % (username, result)) if result is not None: return True return False
class JobDB: """Keeps a database of jobs, with a MD5 hash that encodes the function name, version, and all arguments to the function. """ def __init__(self, path): self.db = TinyDB(path) self.lock = Lock() def get_result_or_attach(self, key, prov, running): job = Query() with self.lock: rec = self.db.get(job.prov == prov) if 'result' in rec: return 'retrieved', rec['key'], rec['result'] job_running = rec['key'] in running wf_running = rec['link'] in running.workflows if job_running or wf_running: self.db.update(attach_job(key), job.prov == prov) return 'attached', rec['key'], None print("WARNING: unfinished job in database. Removing it and " " rerunning.", file=sys.stderr) self.db.remove(eids=[rec.eid]) return 'broken', None, None def job_exists(self, prov): job = Query() with self.lock: return self.db.contains(job.prov == prov) def store_result(self, key, result): job = Query() with self.lock: if not self.db.contains(job.key == key): return self.add_time_stamp(key, 'done') with self.lock: self.db.update( {'result': result, 'link': None}, job.key == key) rec = self.db.get(job.key == key) return rec['attached'] def new_job(self, key, prov, job_msg): with self.lock: self.db.insert({ 'key': key, 'attached': [], 'prov': prov, 'link': None, 'time': {'schedule': time_stamp()}, 'version': job_msg['data']['hints'].get('version'), 'function': job_msg['data']['function'], 'arguments': job_msg['data']['arguments'] }) return key, prov def add_link(self, key, ppn): job = Query() with self.lock: self.db.update({'link': ppn}, job.key == key) def get_linked_jobs(self, ppn): job = Query() with self.lock: rec = self.db.search(job.link == ppn) return [r['key'] for r in rec] def add_time_stamp(self, key, name): def update(r): r['time'][name] = time_stamp() job = Query() with self.lock: self.db.update( update, job.key == key)