def USROFF(self, params): ssid = params['ssid'][-1] self.users.update({'logged': False}, Query()['SSID'] == ssid) self.users.update(delete('lastAct'), Query()['SSID'] == ssid) self.users.update(delete('client_ip'), Query()['SSID'] == ssid) self.users.update(delete('SSID'), Query()['SSID'] == ssid) return db_proto.Response(code=DBRespCode.OK)
def retrieve_device_public_key(device_id, token): data = {"device_id": device_id} table = get_tinydb_table(path, 'device_keys') doc = table.get(Query().device_id == device_id) if not doc: with click.Context(send_key_to_device) as ctx: click.echo( f"Keys for device {device_id} not present, please use: {ctx.command.name}" ) click.echo(get_attr_auth_keys.get_help(ctx)) return r = requests.post(URL_RECEIVE_PUBLIC_KEY, headers={"Authorization": token}, data=data, verify=VERIFY_CERTS) if r.status_code != 200: click.echo(r.content.decode('unicode-escape')) return content = r.content.decode('unicode-escape') json_content = json_string_with_bytes_to_dict(content) private_key = load_pem_private_key(doc["private_key"].encode(), password=None, backend=default_backend()) assert isinstance( private_key, EllipticCurvePrivateKey ), "Loading private key failed! - private_key is not instance of EllipticCurvePrivateKey" device_public_key = load_pem_public_key( json_content["device_public_key"].encode(), backend=default_backend()) assert isinstance( device_public_key, EllipticCurvePublicKey ), "Loading public key failed! - private_key is not instance of EllipticCurvePublicKey" shared_key = private_key.exchange(ec.ECDH(), device_public_key) derived_key = HKDF(algorithm=hashes.SHA256(), length=32, salt=None, info=b'handshake data', backend=default_backend()).derive(shared_key) key = key_to_hex(derived_key) # NOTE: retrieve key as `key_to_hex(key)` table.update(delete("public_key"), Query().device_id == device_id) table.update(delete("private_key"), Query().device_id == device_id) table.update(set("shared_key", key), Query().device_id == device_id)
def unset_node_canary_for_package(package: str): query = Query() nodes = DB.table("nodes") canaries = nodes.search(query.canary != "") for canary in canaries: if canary["canary"]["package"] == package: nodes.update(delete("canary"), query.node_id == canary["node_id"])
def checkField(linkDB, linkfield): """ Va vérifier que la base de donnée est à jour par rapport au fichier fieldTemplate. Si il découvre un champ qui n'exsite pas, alors il met à jour. """ db = TinyDB("{}.json".format(linkDB)) flag = 0 FL = fieldList(linkfield) # Liste du template DBFL = DBFieldList(linkDB) # Liste des champs actuellement dans la DB # Ajout for x in FL: if db.search(Query()[x]) == []: db.update({str(x): FL[x]}) DBFL.clear() DBFL = DBFieldList( linkDB) # Liste des champs actuellement dans la DB flag = "add" + str(flag) # Supression for x in DBFL: if x not in FL: db.update(delete(x)) DBFL.clear() DBFL = DBFieldList( linkDB) # Liste des champs actuellement dans la DB flag = "sup" + str(flag) # Type for x in DBFL: if not isinstance(DBFL[x], type(FL[x])): db.update({str(x): FL[x]}) flag = "type" + str(flag) db.close() return flag
def USRVERIFIED(self, params): usr = self.users.get(Query()['uid'] == params['uid']) if usr is None: return db_proto.Response(code=DBRespCode.FAIL) self.users.update({'verified': True}, doc_ids=[usr.doc_id]) self.users.update(delete('verify_token'), doc_ids=[usr.doc_id]) return db_proto.Response(code=DBRespCode.OK)
def __update(self, dbg, uuid, table_name, meta): log.debug("%s: xcpng.meta.MetadataHandler.__update: uuid: %s table_name: %s meta: %s" % (dbg, uuid, table_name, meta)) if table_name == 'sr': uuid_tag = SR_UUID_TAG elif table_name == 'vdis': uuid_tag = VDI_UUID_TAG else: raise Exception('Incorrect table name') table = self.db.table(table_name) try: if table.contains(Query()[uuid_tag] == uuid): for tag, value in meta.iteritems(): if value is None: log.debug("%s: xcpng.meta.MetadataHandler.__update: tag: %s remove value" % (dbg, tag)) table.update(delete(tag), Query()[uuid_tag] == uuid) else: log.debug("%s: xcpng.meta.MetadataHandler.__update: tag: %s set value: %s" % (dbg, tag, value)) table.update({tag: value}, Query()[uuid_tag] == uuid) else: table.insert(meta) self.__updated = True except Exception as e: log.error("%s: xcpng.meta.MetadataHandler._update: Failed to update metadata" % dbg) raise Exception(e)
def test_can_not_delete_created_at_key(self): with TinyDB(storage=TimestampsMiddleware(MemoryStorage)) as db: id = db.insert({"xyz": "foo"}) ts = db.get(doc_id=id).get("created_at") assert (ts) db.update(delete("created_at"), doc_ids=[id]) assert (db.get(doc_id=id).get("created_at") == ts)
def update(self): update = self.as_dict() for arg in self._deleted_args: try: self.db.update(delete(arg), eids = [ self.eid.value ]) except: pass return self.db.update(update, eids = [ self.eid.value ])
def delete(self, key): result = False try: self.db.update(delete(key), doc_ids=[self.doc_id]) result = True except Exception as e: LOG.exception(e) return result
def upsert(self, note, cond): orig = self.select(cond) if orig: #for key in (set(note.keys()) - set(orig.keys())): # self.db.table('notes').update(delete(key), cond) for key in (set(orig.keys()) - set(note.keys())): self.db.table('notes').update(delete(key), doc_ids=[orig.doc_id]) return self.db.table('notes').upsert(note, cond)
def return_a_book(book_id, returned_date=None, cancel=False): if cancel: books.update(delete('returned_date'), Book.id == book_id) if not returned_date: returned_date = datetime.now().strftime(EXPRESSION) books.update({'returned_date': returned_date}, Book.id == book_id)
def caution_reformat_db_to_shallow_dict(self): def reformat_a_user(usr): value_keys = ["user", "idade", "ano", "sexo", "jogada"] shallow_content = {key: usr["value"][key] for key in value_keys} return shallow_content users = self.find_all_users() for user in users: self.banco.update(reformat_a_user(user), eids=[user.eid]) self.banco.update(delete('value'), eids=[user.eid])
def remove(self, isbn): tmp = Query() resp = raw_input('Delete \n {} \n ? (y/n)'.format(tabulate(self.db.search(tmp.ISBN == isbn), headers='keys'))) resp = resp.lower() if resp == 'y': for i in ['Publisher', 'Title', 'Authors', 'Year', 'Date Added', 'Language', 'ISBN']: self.db.update(delete(i), tmp.ISBN == isbn) print 'Deleted' elif resp == 'n': print 'Spared'
def updateResource(self, resource): ri = resource.ri with self.lockResources: self.tabResources.update(resource.json, Query().ri == ri) # remove nullified fields from db and resource for k in list(resource.json): if resource.json[k] is None: self.tabResources.update(delete(k), Query().ri == ri) del resource.json[k] return resource
async def reset_key(guild_id, key): Guilds = Query() result = core.caches.static.guilds_db_cache.search(where("guild_id") == guild_id) if result: saved_data = result if result[0].get(f"{key}", None) else None if saved_data: core.caches.static.guilds_db_cache.update(delete(f"{key}"), Guilds.guild_id == guild_id) return saved_data[0] if saved_data[0].get(f"{key}", None) else None else: return None else: return None
def remove(self, isbn): tmp = Query() resp = raw_input('Delete \n {} \n ? (y/n)'.format( tabulate(self.db.search(tmp.ISBN == isbn), headers='keys'))) resp = resp.lower() if resp == 'y': for i in [ 'Publisher', 'Title', 'Authors', 'Year', 'Date Added', 'Language', 'ISBN' ]: self.db.update(delete(i), tmp.ISBN == isbn) print 'Deleted' elif resp == 'n': print 'Spared'
def updateResource(self, resource: Resource) -> Resource: #Logging.logDebug(resource) with self.lockResources: ri = resource.ri self.tabResources.update(resource.dict, Query().ri == ri) # type: ignore # remove nullified fields from db and resource # TODO remove Null values recursively for k in list(resource.dict): if resource.dict[k] is None: self.tabResources.update(delete(k), Query().ri == ri) # type: ignore del resource.dict[k] return resource
def clean_database(): """ Cleans every entry in the songs database to match the Songs datamodel. :returns: Renders the master songs page. """ all_songs = song_db.all() for s in all_songs: for k, v in s.items(): if k == "id": pass elif k not in song_datamodel: song_db.update(delete(k), ids=[s.id]) return redirect("/songs/")
def unset(self, fields, keys, table_name=None, match_any=False): """Update records by unsetting fields. Update only allows you to update a record by adding new fields or overwriting existing fields. Use this method to remove a field from the record. The behavior depends on the type of `keys`: * self.Record.eid_type: update the record with that element identifier. * dict: update all records with attributes matching `keys`. * list or tuple: apply update to all records matching the elements of `keys`. Args: fields (list): Names of fields to remove from matching records. keys: Fields or element identifiers to match. table_name (str): Name of the table to operate on. See :any:`AbstractDatabase.table`. match_any (bool): Only applies if `keys` is a dictionary. If True then any key in `keys` may match or if False then all keys in `keys` must match. Raises: ValueError: ``bool(keys) == False`` or invaild value for `keys`. """ table = self.table(table_name) if isinstance(keys, self.Record.eid_type): for field in fields: #LOGGER.debug("%s: unset(%s, eid=%r)", table_name, field, keys) table.update(operations.delete(field), eids=[keys]) elif isinstance(keys, dict): for field in fields: #LOGGER.debug("%s: unset(%s, keys=%r)", table_name, field, keys) table.update(operations.delete(field), self._query(keys, match_any)) elif isinstance(keys, (list, tuple)): for field in fields: #LOGGER.debug("%s: unset(%s, eids=%r)", table_name, field, keys) table.update(operations.delete(field), eids=keys) else: raise ValueError(keys)
def updateResource(self, resource: Resource) -> Resource: #L.logDebug(resource) with self.lockResources: ri = resource.ri self.tabResources.update(resource.dict, self.resourceQuery.ri == ri) # remove nullified fields from db and resource for k in list(resource.dict): if resource.dict[ k] is None: # only remove the real None attributes, not those with 0 self.tabResources.update( delete(k), self.resourceQuery.ri == ri) # type: ignore [no-untyped-call] del resource.dict[k] return resource
def delete_sheet_music(id): """ Deletes the sheet music from the song. .. note:: The file on disk is not actually removed; only the file path recorded in the database is. This is an intentional choice during development, to prevent data losses from happening. Before going into production, this should be changed. .. todo:: Make sure to fix the problem indicated in the note above. :param id: The id of the song to remove sheet music from. :type id: int """ song_db.update(delete("sheet_music"), ids=[id]) return redirect(f"/songs/{id}")
def save_tournament_in_db(self, serialized_info, update=False): tournament_name = serialized_info["tournament_data"][ "tournament_info"]["name"] if self.tournaments.search( self.info["tournament_data"]["tournament_info"]["name"] == tournament_name): if update is True: self.tournaments.update( delete("tournament_data"), self.info["tournament_data"] ["tournament_info"]["name"] == tournament_name) self.tournaments.insert(serialized_info) else: raise Warning( "Tournoi déjà sauvegardé dans la base de données.") else: self.tournaments.insert(serialized_info)
def insert_ratings_db(stock_symbol, data_dict): stock_firstLetter = stock_symbol[0] if stock_firstLetter.isalpha(): stock_firstLetter = stock_firstLetter.lower() else: stock_firstLetter = '' dbFilePath = str(dbPath) + '/db/ratingsDB_' + stock_firstLetter + '.json' print('dbFilePath: ', dbFilePath) ratings_db = TinyDB(dbFilePath) print("record to be inserted: ", data_dict) print("first remove... ") ratings_db.update(delete('stockSymbol'), where('stockSymbol') == stock_symbol) print("... then insert ") ratings_db.insert_multiple(data_dict) return
def prom(self, TgID, token): record = Query() current = self.dbU.search(user.TgID == TgID) retval = -1 if current[0]["attempts"] != 0: if (self.dbC.contains(record.token == token)): self.dbC.update(delete('token'), token.chiave == token) user = Query() self.dbU.update(set('account_status', 1), user.TgID == TgID) retval = True else: self.dbU.update(set('attempts', current[0]["attempts"]), user.TgID == TgID) if current[0]["attempts"] == 1: self.dbU.update(set('account_status', -1), user.TgID == TgID) retval = current[0]["attempts"] - 1 return retval
def check_duplicate(self): ''' used to mark all duplicated entries :return: ''' try: self.update(delete("similarto"),all) except KeyError: pass num=len(self.all()) for i in range(num): mainjson=self.all()[i] try: temp=mainjson["duplicate"] except KeyError: fullitem=json.dumps(mainjson) for j in range(num)[i+1:]: currentjson=self.all()[j] currentitem = json.dumps(currentjson) if fullitem==currentitem: self.update({"duplicate":"yes"},doc_ids=[currentjson.doc_id])
def save(self): """ Method of saving data in JSON format with TinyDB """ data = self.__dict__.copy() q = Query() # On efface l'ancienne liste des joueurs du tournoi pour la mettre a jour ensuite. self.table_tournoi.update(delete('players'), q.id == self.id) # On serialize les données joueurs d_players = [] for player in data['rounds'][0].players: player_data = player.uuid, player.point, player.has_met d_players.append(player_data) data['players'] = d_players # On serialize les données rounds del data["rounds"] data_round = [] for round in self.rounds: r = {'id': round.id, 'name': round.name, 'number': round.number, 'start': round.start, 'end': round.end} data_round.append(r) data_matches = [] for match in round.matches: if isinstance(match.score, list): match_serialize = ([match.players[0].uuid, match.players[1].uuid], [match.score[0], match.score[1]]) else: match_serialize = ([match.players[0].uuid, match.players[1].uuid], None) data_matches.append(match_serialize) data_round.append({"matches": data_matches}) data["rounds"] = data_round self.table_tournoi.upsert(data, q.id == self.id) return self
def delete(self, id_, key): print("------------------------------") print( f"Attempting configuration key delete for object {self.id_string}\n" f"ID: {id_}\n" f"Key: {key}") result = self.data.search(where(self.id_string) == id_) if not result: print("Failed to delete key in configuration\n" "Configuration does not exist") return saved = result[0].get(key) if saved: self.data.update(delete(key), where(self.id_string) == id_) print("Deleted key of configuration for object") if self.cache.get(id_): self.cache.pop(id_) print(f"Removed configuration from cache") return saved # return what was deleted else: print("Failed to delete key in configuration\n" "Key in configuration does not exist") return
'key3': 560, 'key4': ['a', 'b', 'c'], 'key5': 4.2379 } # key2 missing ids = [] for i in range(100): ids.append(db.insert(rec)) print('Length of test3.json: ', len(db)) #make non uniform key modids = [ids[3], ids[12]] # 'randomly' pick 3rd, and 12th doc_ids to mess with print('modifying ids: ', modids) db.update({'key1': 5}, doc_ids=modids) # key1 is now not uniform type print('deleting key2 from ', ids[14], ids[18]) db.update(delete('key2'), doc_ids=[ids[14], ids[18]]) #################################################################### # # Test 5: some records have both missing keys and extra keys # dbfname = 'test5.json' #dbfname = 'testdb.json' db = TinyDB(dbfname) for r in db: db.remove(q.key1.exists()) # clear old copy if any rec = { 'key1': 'John Smith', 'key2': '127',
def cs(): flightname=request.form['flightname'] dbase.update(delete(where('flightname') == flightname)) return render_template('cancelled_msg.html')
def clean_invalid(self): for key in ['data', 'data_long', 'carb', 'bolus', 'basal']: self.db.update(delete(key), (where('valid') == False) & where(key).exists()) self.db.storage.flush()
def test_delete(db): db.update(delete('int'), where('char') == 'a') assert 'int' not in db.get(where('char') == 'a')
from tinydb import TinyDB, Query from tinydb.operations import delete db = TinyDB('database.json') User = Query() db.update(delete('age'), User.age.exists()) db.insert({'name': 'John', 'age': 22}) db.insert({'name': 'Johnny', 'age': 7}) db.insert({'name': 'Bob', 'age': 3}) print(db.search((User.name == 'John')))
def delete(self, source_id): db.update(delete('log'), Log.source.source_id == source_id) db.update(delete('source'), Log.source.source_id == source_id) return {'status': 'success'}