class DB: def __init__(self,db_path): self.db = TinyDB(db_path) def add(self, data): # Only add it if you can't find it Track = Query() if not self.db.get(Track.display_id == data['display_id']): return self.db.insert(data) def searchById(self, video_id): Track = Query() return self.db.get(Track.display_id == video_id) def search(self, text): pattern = re.compile(text,re.IGNORECASE) def test(txt): return pattern.search(txt) Track = Query() q = Track.title.test(test) | Track.description.test(test) return self.db.search(q) def all(self): return self.db.all()
class InventoryManagement(object): def __init__(self): self.db = TinyDB('db/inventory.json', indent=4, separators=(',', ': ')) def add(self, item_name): Item = Query() item = self.db.get(Item.item == item_name) if item: qty = item["total_qty"] item.update({"total_qty": qty - 1, "item": item}) def delete(self, item_name): Item = Query() item = self.db.get(Item.item == item_name) if item: qty = item["total_qty"] item.update({"total_qty": qty + 1, "item": item}) def get_item_or_none(self, item_name): Item = Query() item = self.db.get(Item.item == item_name) if item: qty = item["total_qty"] category = item["category"] price = item["price"] return qty, category, price else: return None
class DB: def __init__(self): self.db = TinyDB('db.json') def saveResource(self, resourceName, resourceId, jsonResource): search = Query() result = self.db.get((search._resourceName == resourceName) & (search._resourceId == resourceId)) if (result): self.deleteResource(resourceName, resourceId) jsonResource["_resourceName"] = resourceName jsonResource["_resourceId"] = resourceId self.db.insert(jsonResource) def findResource(self, resourceName, resourceId): search = Query() result = self.db.get((search._resourceName == resourceName) & (search._resourceId == resourceId)) if result: result.pop("_resourceName") result.pop("_resourceId") return result def deleteResource(self, resourceName, resourceId): search = Query() self.db.remove((search._resourceName == resourceName) & (search._resourceId == resourceId))
def is_vip(author): vip_db = TinyDB("dbs/vips.json") try: vip_db.get(where("email") == author) return True except: return False
def test_upsert_by_id(db: TinyDB): assert len(db) == 3 # Single document existing extant_doc = Document({'char': 'v'}, doc_id=1) assert db.upsert(extant_doc) == [1] doc = db.get(where('char') == 'v') assert doc is not None assert doc.doc_id == 1 assert len(db) == 3 # Single document missing missing_doc = Document({'int': 5, 'char': 'w'}, doc_id=5) assert db.upsert(missing_doc) == [5] doc = db.get(where('char') == 'w') assert doc is not None assert doc.doc_id == 5 assert len(db) == 4 # Missing doc_id and condition with pytest.raises(ValueError, match=r"(?=.*\bdoc_id\b)(?=.*\bquery\b)"): db.upsert({'no_Document': 'no_query'}) # Make sure we didn't break anything assert db.insert({'check': '_next_id'}) == 6
class DB: def __init__(self, db_path): self.db = TinyDB(db_path) def add(self, data): # Only add it if you can't find it Track = Query() if not self.db.get(Track.display_id == data['display_id']): return self.db.insert(data) def searchById(self, video_id): Track = Query() return self.db.get(Track.display_id == video_id) def search(self, text): pattern = re.compile(text, re.IGNORECASE) def test(txt): return pattern.search(txt) Track = Query() q = Track.title.test(test) | Track.description.test(test) return self.db.search(q) def all(self): return self.db.all()
class GNGraphConfigModel: query = Query() def __init__(self, db_path): dbpath = os.path.join(db_path, 'gngraph_config_settings.json') self._db = TinyDB(dbpath) def req_fields_json(self, dict_result): req_items = ['sfmode', 'dbmode'] return { key: value for key, value in dict_result.items() if key in req_items } def search_op(self, req_dict): return True if self.search_res(1) else False def search_res(self, id): return self._db.get(doc_id=id) def update_op(self, req_dict): self._db.update( { 'sfmode': req_dict['sfmode'], 'dbmode': req_dict['dbmode'] }, doc_id == 1) def insert_op(self, req_dict): if not self.search_op(1): self._db.insert(req_dict) return self._db.all() else: self.update_op(req_dict) return "None_Insert" def delete_op(self, req_dict): if self.search_op(req_dict): self._db.remove(where('serverIP') == req_dict['serverIP']) return self._db.all() return "None_Delete" def get_op(self): return self._db.get(doc_id=1) def update_op(self, req_dict): if not self.search_res(1): return False self._db.update( { 'sfmode': req_dict['sfmode'], 'dbmode': req_dict['dbmode'] }, GNGraphConfigModel.query.id == 1) return self._db.all() def stop_db(self): self._db.close()
class TinyDBStore(object): def __init__(self): self.drafts_db = TinyDB('event_drafts.json') self.events_db = TinyDB('events.json') # Drafts def contains_draft(self, user_id): return self.drafts_db.contains(Query().user_id == user_id) def new_draft(self, user_id): if self.contains_draft(user_id): self.drafts_db.remove(Query().user_id == user_id) self.drafts_db.insert({ 'user_id': user_id, 'current_field': 0, 'event': {} }) def update_draft(self, user_id, event, current_field): self.drafts_db.update( { 'user_id': user_id, 'current_field': current_field, 'event': event }, Query().user_id == user_id) def get_draft(self, user_id): return self.drafts_db.get(Query().user_id == user_id) def remove_draft(self, user_id): self.drafts_db.remove(Query().user_id == user_id) # Events def insert_event(self, event): event_id = self.events_db.insert(event) event['id'] = event_id return event def update_event(self, event): self.events_db.update(event, eids=[event.eid]) def remove_event(self, event): self.events_db.remove(eids=[event.eid]) def get_events(self, user_id, name=None): if name: return self.events_db.search(( (Query().user_id == user_id) & (Query().name.test(lambda v: name in v))) | (Query().invite == 'yes')) return self.events_db.search((Query().user_id == user_id) | (Query().invite == 'yes')) def get_event(self, event_id): return self.events_db.get(eid=int(event_id))
class DB: Lock = threading.Lock() def __init__(self, path: Optional[str] = None): if path is None: path = os.path.join(CONFIG['DEFAULT_CONFIG_DIR'], 'nauta_db.json') self._db = TinyDB(path, storage=CachingMiddleware(JSONStorage), indent=2) self._query = Query() def __contains__(self, key): return self._db.contains(self._query.username == key) def __iter__(self): for i in self._db: yield i def get(self, key: str, default=None): r = default with self.Lock: r = self._db.get(self._query.username == key) return r def get_by_alias(self, key: str, default=None): r = default with self.Lock: r = self._db.get(self._query.alias == key) return r def get_aliases(self): r = [] with self.Lock: r = self._db.search(self._query.alias != '' and self._query.alias != None) return r def set(self, key: str, data: dict): with self.Lock: self._db.update(data, self._query.username == key) def set_by_alias(self, key: str, data: dict): with self.Lock: self._db.update(data, self._query.alias == key) def insert(self, key: str, data: dict): with self.Lock: self._db.insert(data) def remove(self, key: str): with self.Lock: self._db.remove(self._query.username == key) def remove_by_alias(self, key: str): with self.Lock: self._db.remove(self._query.alias == key)
class Registration: def __init__(self): if not os.path.exists('data/'): os.makedirs('data') self.database = TinyDB('data/registrations.json') def register(self, chat, user): Chat = Query() if self.database.contains(Chat.id == chat.id): self.database.update(self.__register_in_chat(user.id), Chat.id == chat.id) else: chat_item = {'id': chat.id, 'users': {}} self.__register_in_chat(user.id)(chat_item) self.database.insert(chat_item) def get_registered_users_for_chat(self, chat): Chat = Query() chat = self.database.get(Chat.id == chat.id) return chat['users'] if chat else None def is_user_registered(self, chat, user): Chat = Query() chat = self.database.get(Chat.id == chat.id) return chat != None and str(user.id) in chat['users'] def reward_user(self, chat, user, name): Chat = Query() self.database.update(self.__reward(user, name), Chat.id == chat.id) def get_last_drawing_time(self, chat): Chat = Query() chat = self.database.get(Chat.id == chat.id) return chat[ 'last_drawing_time'] if chat and 'last_drawing_time' in chat else 0 def get_last_winner(self, chat): Chat = Query() chat = self.database.get(Chat.id == chat.id) return chat['last_winner'] if chat and 'last_winner' in chat else '' def __reward(self, user, name): def transform(element): element['last_drawing_time'] = time() element['last_winner'] = name element['users'][str(user.id)]['wins'] += 1 return transform def __register_in_chat(self, user_id): def transform(element): element['users'][user_id] = {'wins': 0} return transform
class UsersDAO: def __init__(self, dbpath): self.dbpath = dbpath self.db = TinyDB(os.path.join(dbpath, 'users.json')) if len(self.db) == 0: self.db.insert_multiple([ { 'username': '******', 'password': '******', 'currsym': '₹' }, { 'username': '******', 'password': '******', 'currsym': '$' }, ]) self.expdao_cache = {} self.portdao_cache = {} def get(self, username): UserQ = Query() user = self.db.get(UserQ.username == username) if user: del user['password'] return user def verify(self, form): UserQ = Query() user = self.db.get(UserQ.username == form['username']) if user and user['password'] == form['password']: return self.get(user['username']) return False def get_expdao(self, user): username = user['username'] if username in self.expdao_cache: return self.expdao_cache[username] dbfile = '%s.json' % username dao = ExpensesDAO(os.path.join(self.dbpath, 'expenses', dbfile)) self.expdao_cache[username] = dao return dao def get_portdao(self, user): username = user['username'] if username in self.portdao_cache: return self.portdao_cache[username] dbfile = '%s.json' % username dao = PortfolioDAO(os.path.join(self.dbpath, 'portfolios', dbfile)) self.portdao_cache[username] = dao return dao
class TinyDBStore(object): def __init__(self): self.drafts_db = TinyDB('event_drafts.json') self.events_db = TinyDB('events.json') # Drafts def contains_draft(self, user_id): return self.drafts_db.contains(Query().user_id == user_id) def new_draft(self, user_id): if self.contains_draft(user_id): self.drafts_db.remove(Query().user_id == user_id) self.drafts_db.insert({ 'user_id': user_id, 'current_field': 0, 'event': {} }) def update_draft(self, user_id, event, current_field): self.drafts_db.update({ 'user_id': user_id, 'current_field': current_field, 'event': event }, Query().user_id == user_id) def get_draft(self, user_id): return self.drafts_db.get(Query().user_id == user_id) def remove_draft(self, user_id): self.drafts_db.remove(Query().user_id == user_id) # Events def insert_event(self, event): event_id = self.events_db.insert(event) event['id'] = event_id return event def update_event(self, event): self.events_db.update(event, eids=[event.eid]) def remove_event(self, event): self.events_db.remove(eids=[event['id']]) def get_events(self, user_id, name=None): if name: return self.events_db.search((Query().user_id == user_id) & (Query().name.test(lambda v: name in v))) return self.events_db.search(Query().user_id == user_id) def get_event(self, event_id): return self.events_db.get(eid=int(event_id))
def DatabaseCleanup(self, response): db = TinyDB('DBFiles/KithDB.json') dbQ = TinyDB('DBFiles/DatabaseQueue.json') selected = dbQ.get(doc_id=self.storage) items = response.xpath('descendant-or-self::span').extract() Q = Query() for item in items: if 'data-product-price' in item: selected['price'] = cleanhtml(item) db.upsert(selected, Q.href == selected['href']) #self.log("adding "+str(selected)+" into main db") self.storage += 1 if self.storage <= len(dbQ) and dbQ.get(doc_id=self.storage) != None and dbQ.get(doc_id=self.storage)['href'] != None: yield scrapy.Request(url=str("https://"+dbQ.get(doc_id=self.storage)['href']), callback=self.DatabaseCleanup)
class TinyDBGateway(AbstractJSONStorageGateway): def __init__(self, file_path: str, table_name: str = "_default") -> None: self.table = TinyDB(file_path).table(table_name) def create(self, data: dict, max_retries: int = 10) -> dict: with transaction(self.table) as tr: while max_retries > 0: uuid = uuid4() if not self.table.contains(where('uuid') == str(uuid)): data.update(uuid=str(uuid)) tr.insert(data) return data else: max_retries -= 1 raise StorageError('could not set unique UUID') def list_(self) -> list: return self.table.all() def retrieve(self, uuid: str) -> dict: record = self.table.get(where('uuid') == uuid) if record: return record else: raise NoResultFound('object do not exist') def update(self, uuid: str, data: dict): with transaction(self.table) as tr: record = self.table.get(where('uuid') == uuid) if record: tr.update(data, where('uuid') == uuid) else: raise NoResultFound('object do not exist') def delete(self, uuid: str): with transaction(self.table) as tr: record = self.table.get(where('uuid') == uuid) if record: tr.remove(where('uuid') == uuid) else: raise NoResultFound('object do not exist') def purge(self): self.table.purge() def search(self, conditions: dict): return self.table.search( reduce(lambda x, y: x & y, [where(k) == v for k, v in conditions.items()]))
class UserStorage: def __init__(self): self.users = TinyDB('db.json', indent=4).table("users") self.user = Query() def exist(self, user_id: int) -> bool: return bool(self.users.search(self.user.user_id == user_id)) def create(self, user_id: int) -> int: return self.users.insert( dict(user_id=user_id, pass_len=DEFAULT_PASS_LEN, pass_count=DEFAULT_PASS_COUNT, allow_numbers=DEFAULT_ALLOW_NUMBERS, allow_lowercase=DEFAULT_ALLOW_LOWERCASE, allow_uppercase=DEFAULT_ALLOW_UPPERCASE, allow_spec=DEFAULT_ALLOW_SPEC)) def get(self, user_id: int) -> dict: return self.users.search(self.user.user_id == user_id)[0] def update(self, field: str, val: Any, user_id: int): self.users.update(set(field, val), self.user.user_id == user_id) def get_or_create(self, user_id: int) -> dict: if self.exist(user_id): return self.get(user_id) # Create user if not exist else: doc_id = self.create(user_id) return self.users.get(doc_id=doc_id)
class users_db(): db = None query = Query() def __init__(self, path): self.db = TinyDB(path) def get_user(self, _id): json_data = self.db.get(self.query.user_id == _id) if json_data != None: response = user(json_data["user_id"], json_data["name"]) response.group = json_data["group"] response.status = status(json_data["status"]) return response return None def add_user(self, instance): self.db.insert({ "user_id": instance.user_id, "name": instance.name, "group": instance.group, "status": instance.status.value }) def del_user(self, _id): self.db.remove(self.query.user_id == _id) def update(self, instance): self.db.update( { "user_id": instance.user_id, "name": instance.name, "group": instance.group, "status": instance.status }, self.query.user_id == instance.user_id)
def test_process(tmpdir, capsys, side_effect, success): db_fn = tmpdir.join('database.json') log_dir = tmpdir.mkdir('log') db = TinyDB(db_fn.strpath) process_me = '/my/path/A12345' accession_number = op.basename(process_me) paths2process = {process_me: 42} with patch('subprocess.Popen') as mocked_popen: stdout = b"INFO: PROCESSING STARTS: {'just': 'a test'}" mocked_popen_instance = mocked_popen.return_value mocked_popen_instance.side_effect = side_effect mocked_popen_instance.communicate.return_value = (stdout, ) # set return value for wait mocked_popen_instance.wait.return_value = 1 - success # mock also communicate to get the supposed stdout process(paths2process, db, wait=-30, logdir=log_dir.strpath) out, err = capsys.readouterr() log_fn = log_dir.join(accession_number + '.log') mocked_popen.assert_called_once() assert log_fn.check() assert log_fn.read() == stdout.decode('utf-8') assert db_fn.check() # dictionary should be empty assert not paths2process assert out == 'Time to process {0}\n'.format(process_me) # check what we have in the database Path = Query() query = db.get(Path.input_path == process_me) assert len(db) == 1 assert query assert query['success'] == success assert query['accession_number'] == op.basename(process_me) assert query['just'] == 'a test'
def modSource(ID, field, newValue): sourcesDB = TinyDB("Sdb.json") item_doc_id = sourcesDB.get(Query().ID == ID).doc_id l = list() l.append(item_doc_id) sourcesDB.update({field: newValue}, doc_ids = l) sourcesDB.close()
def getMyActivities(): json = request.get_json(force=True) phone = json['phone'] user = openPerson(phone) returnString = "***" for i in user.activities: activity = openActivity(i) messageString = "" for a in activity.messages: messageString = messageString + a + "&*^" pendingString = "" for b in activity.pending: pendingString = pendingString + b + "$#$" attendingString = "" for c in activity.attending: if c == True: attendingString = attendingString + "Y@~!" else: attendingString = attendingString + "N@~!" nameString = "" for b in activity.pending: fileLocation = "files/person_" + b[0] + ".json" db = TinyDB(fileLocation) Person = Query() data = db.get(Person.phone == b) if data: nameString = nameString + data['name'] + "$#$" else: nameString = nameString + "unknown user" + "$#$" returnString = returnString + "!@#" + activity.id + "!@#" + activity.name + "!@#" + activity.creator + "!@#" + messageString + "!@#" + pendingString + "!@#" + attendingString + "!@#" + nameString + "!@#" + activity.creator + "***" return returnString
def amAttending(): json = request.get_json(force=True) phone = json['phone'] actID = json['actID'] activity = openActivity(actID) activity.amAttending(phone) fileLocation = "files/person_" + phone[0] + ".json" db = TinyDB(fileLocation) Person = Query() data = db.get(Person.phone == phone) newPersonObject = personObject(data['name'], data['phone'], data['token'], data['activities'], data['invites'], data['friendNumbers'], data['friendNames'], data['groups'], 0) x = 0 while x < len(newPersonObject.invites): if newPersonObject.invites[x] == actID: del newPersonObject.invites[x] break newPersonObject.activities.append(actID) db.update( { 'invites': newPersonObject.invites, 'activities': newPersonObject.activities }, Person.phone == phone) returnString = "" for i in newPersonObject.invites: activity = openActivity(i) returnString = returnString + "!@#" + activity.id + "!@#" + activity.name + "!@#" + activity.creator + "***" return returnString
def unmuteUser(bot, update, args): if False == isAdmin(bot, update.message.from_user.id): return sendTo = chatId if 'supergroup' == update.message.chat.type else update.message.from_user.id if 0 == len(args) and None == update.message.reply_to_message: bot.sendMessage(sendTo, 'Эй, Вы должны ответить на сообщение') return db = TinyDB('users_in_mute.json') User = Query() userId = update.message.reply_to_message.from_user.id if 0 == len(args) else int(args[0]) userInMute = db.get(User.user_id == userId) if botId == userId: return if None == userInMute: bot.sendMessage(sendTo, 'Пользователь может писать') return else: db.remove(User.user_id == userId) bot.restrictChatMember(chatId, userId, 366, True, True, True, True) bot.sendMessage(sendTo, 'Теперь пользователь сможет нарушать')
def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db')) # Create TinyDB instance db = TinyDB(path, storage=JSONStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} get = lambda s: db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None
def remove_from_cart(): users_db = TinyDB('users.json', indent=4, separators=(',', ': ')) record = users_db.get(doc_id=request.json['id']) new_cart = record['cart'] new_cart.remove(request.json['item_key']) users_db.update({'cart': new_cart}, doc_ids=[request.json['id']]) return jsonify(Acknowledgement_base(True).serialize()), 200
def import_kb(source_dir): index = client.get_index(KB_INDEX_UID) db = TinyDB(storage=MemoryStorage) KB = Query() source_dir = Path(source_dir) for filename in source_dir.glob("**/*.json"): with open(filename, "r") as f: data = json.load(f) source_id = data["metadata"]["source_id"] collection_normalized = [ normalize(v, KB_SCHEMA) for v in data["collection"] ] for _item in collection_normalized: item = db.get(KB.uid == _item["uid"]) item = item if item is not None else _item if "sources" in item: item["sources"] = list(set(item["sources"]) | set([source_id])) else: item.update({"sources": [source_id]}) db.upsert(item, KB.uid == item["uid"]) resp = index.add_documents(db.all()) logger.info(resp)
def random_post(update, context): keyboard = [ ["Random Post"], ["Back"], ] reply_markup = ReplyKeyboardMarkup(keyboard) db = TinyDB(PATH_DB, sort_keys=True, indent=4).table("reddit_archive") post_number = random.randint(1, len(db)) post = db.get(doc_id=post_number) message = ("<b>Subreddit:</b> r/{0}\n" "<b>Title:</b> {1}\n" "<b>Direct URL:</b> {2}\n" "<b>Reddit URL:</b> https://reddit.com{3}\n".format( post["subreddit"], post["title"], post["url"], post["permalink"], )) update.message.reply_text( message, reply_markup=reply_markup, parse_mode=ParseMode.HTML, ) return 1
class TweetTests(TestCase): def setUp(self): self.db = TinyDB('testdb.json') self.dbQuery = Query() def test_should_validate_price(self): with self.assertRaises(PriceValidationError): tweet = Tweet(id=1, price=43) tweet = Tweet(id=1, price='abc1') def test_should_write_to_db(self): tweet = Tweet(id=1) tweet.write_to_db(self.db) self.assertTrue(self.db.contains(self.dbQuery.id == 1)) def test_should_update_db_if_exists(self): tweet = Tweet(id=1) tweet.write_to_db(self.db) tweet.set_price(2) tweet.write_to_db(self.db) self.assertEquals(self.db.get(self.dbQuery.id == 1)['price'], 2) def test_should_not_insert_duplicate_ids(self): tweet1 = Tweet(id=1) tweet1.write_to_db(self.db) tweet2 = Tweet(id=1, price=1) tweet2.write_to_db(self.db) self.assertEquals(len(self.db.search(self.dbQuery.id == 1)), 1) def tearDown(self): self.db.close() if os.path.exists('testdb.json'): os.remove('testdb.json')
class Database: db: TinyDB guilds: Table def __init__(self): self.db = TinyDB('db.json') self.guilds = self.db.table('guilds') def get_guild(self, id): guild = self.guilds.get(where('id') == str(id)) return guild and guild['data'] or None def set_guild(self, id, data): self.guilds.upsert({ 'id': str(id), 'data': data }, where('id') == str(id)) def get_hacks(self): hacks = self.db.get(where('key') == 'hacks') return hacks and hacks['value'] or [] def set_hacks(self, data): self.db.upsert({ 'key': 'hacks', 'value': data }, where('key') == 'hacks')
class users_db: db: TinyDB() query = Query() def __init__(self, path): self.db = TinyDB(path) def get_user(self, _id): return self.db.get(self.query.user_id == _id) def add_user(self, instance): self.db.insert({ "user_id": instance.user_id, "name": instance.name, "group": instance.group, "last_chat_id": instance.last_chat_id, "status": instance.status }) def del_user(self, _id): self.db.remove(self.query.user_id == _id) def update(self, instance): self.db.update( { "user_id": instance.user_id, "name": instance.name, "group": instance.group, "last_chat_id": instance.last_chat_id, "status": instance.status }, self.query.user_id == instance.user_id)
class Proxy(object): def __init__(self, config): self.c = config self.ldb = None self.rdb = None self.tag = Query() self.req = None if config.local: try: self.ldb = TinyDB(config.local, storage=CachingMiddleware(JSONStorage)) except: self.ldb = TinyDB(storage=MemoryStorage) else: self.ldb = TinyDB(storage=MemoryStorage) if config.url: auth = None if config.user: auth = (config.user, click.prompt('password', hide_input=True)) if config.url.startswith('http'): dbclass = CouchDB elif config.url.startswith('mongodb'): dbclass = MongoDB try: self.rdb = dbclass(config.url, auth=auth, verify=config.verify) except: self.rdb = None def set_tag(self, tag=None): self.tag = (where('tag').search(tag)) if tag else Query() def insert_multiple(self, docs): self.ldb.insert_multiple(docs) def contains(self, q=None, **kargs): if q is None: q = self.tag for k in kargs: q &= (where(k) == kargs[k]) if self.rdb: return self.rdb.contains(q.hashval, **kargs) return self.ldb.contains(q) def search(self, q=None, **kargs): if q is None: q = self.tag for k in kargs: q &= (where(k) == kargs[k]) if self.rdb: return list(self.rdb.search(q.hashval, **kargs)) return self.ldb.search(q) def get(self, q=None, **kargs): if q is None: q = self.tag for k in kargs: q &= (where(k) == kargs[k]) if self.rdb: return self.rdb.get(q.hashval, **kargs) return self.ldb.get(q) def close(self): self.ldb.close()
class SaleHandler(metaclass=Singleton): def __init__(self) -> None: super().__init__() self._logger = Logger(self.__class__.__name__) self._logger.info("Initializing Sale handler...") self._db = TinyDB("db/sale.json") def create_sale(self, item: Item, quantity: int, price: int, seller: str, seller_id: int) -> Sale: result: Sale = Sale( item_uid=item.uid, quantity=quantity, price=price, seller=seller, seller_discord_id=seller_id, from_date_timestamp=datetime.today().timestamp(), to_date_timestamp=(datetime.today() + timedelta(days=7)).timestamp(), ) self._db.insert(result.__dict__) return result def get_sale_by_sale_uid(self, sale_uid: str) -> Optional[Sale]: sale = Query() result = self._db.get(sale["_sale_uid"] == sale_uid) if not result: return None else: return Sale.from_dict(result) def get_all_sales(self) -> List[Sale]: return self._parse_entities(self._db.all()) def get_sales_by_item_uid(self, item_uid: str) -> List[Sale]: sale = Query() return self._parse_entities( self._db.search(sale["_item_uid"] == item_uid)) def get_sales_by_item_uids(self, item_uids: List[str]) -> List[Sale]: sale = Query() return self._parse_entities( self._db.search(sale["_item_uid"].one_of(item_uids))) def remove_sale_by_sale_uid(self, sale_uid: str) -> int: sale = Query() return len(self._db.remove(sale["_sale_uid"] == sale_uid)) def remove_stale_sales(self) -> int: sale = Query() return len( self._db.remove( sale["_to_date_timestamp"] < datetime.today().timestamp())) @staticmethod def _parse_entities(entities: List[Dict]) -> List[Sale]: return list(map(lambda x: Sale.from_dict(x), entities))
def exists(wallet_id): db = TinyDB(DB_FILE, default_table='wallets') q = Query() wallet_dict = db.get(q.wallet_id == wallet_id) db.close() return bool(wallet_dict)
async def update(self, context, *, league_name: str = None): """$update [new name]: Update your league name""" # verify check with roles user_roles = [role.name for role in context.message.author.roles] if variables.not_verified in user_roles: raise Exception('User not verified') elif league_name != None: db = TinyDB(variables.db_location, default_table=variables.table_name) db_ret = db.get(self.Users.discord_id == context.message.author.id) old_name = db_ret.get('league_name') # update new name db.update({'league_name': league_name}, self.Users.discord_id == context.message.author.id) db.close() fmt = '{0.mention} successfully updated your league name: {1} -> {2}' await self.bot.say( fmt.format(context.message.author, old_name, league_name)) # send new name for logging into the league-name channel channel = discord.utils.get(context.message.author.server.channels, id=variables.verify_channel_id) fmt = '{0.mention} : {1} -> {2}' await self.bot.send_message( channel, fmt.format(context.message.author, old_name, league_name)) # Log logging.info( (str(context.message.author) + ' updated league name: ' + old_name + ' -> ' + league_name).encode("utf-8")) return else: await self.bot.say('No name given')
class Wigx: def __init__(self): self.db = TinyDB('db.json') def add_target(self, url): details = Query() if self.db.search(details.subdomin == url): id = self.db.get(details.subdomin == url)['uuid'] else: id = str(uuid.uuid4()) self.db.insert({'uuid': id, 'subdomin': url}) self.start(id) _log.info("Waiting.........................") return id def start(self, uuid): HeadThread(url, self.db, uuid).start() DomainThread(url, self.db, uuid).start() RobotThread(url, self.db, uuid).start() WigrThread(url, self.db, uuid).start() def get_result(self, uuid): details = Query() result = self.db.search(details.uuid == uuid) return result
def runs2json(): run_ids = literal_eval(request.form['run_ids']) db = TinyDB(recipyGui.config.get('tinydb')) runs = [db.get(eid=run_id) for run_id in run_ids] db.close() response = make_response(dumps(runs, indent=2, sort_keys=True)) response.headers['content-type'] = 'application/json' response.headers['Content-Disposition'] = 'attachment; filename=runs.json' return response
def run_details(): form = SearchForm() annotateRunForm = AnnotateRunForm() query = request.args.get('query', '') run_id = int(request.args.get('id')) db = TinyDB(recipyGui.config.get('tinydb')) r = db.get(eid=run_id) db.close() return render_template('details.html', query=query, form=form, annotateRunForm=annotateRunForm, run=r)
class ListCache(object): DB_FILE = 'listing_db.json' DB_TTL = timedelta(hours=12) def __init__(self): self.db = TinyDB(os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), ListCache.DB_FILE)) @property def db(self): return self._db @db.setter def db(self, db): self._db = db def listing_in_cache(self, listing): lquery = Query() return self.db.contains(lquery.hsh == listing.hsh) def retrieve_listing(self, listing): lquery = Query() list_dict = self.db.get(lquery.hsh == listing.hsh) return Listing.from_dict(list_dict) def insert_listing(self, listing): if self.listing_in_cache(listing): self.update_listing(listing) else: list_dict = listing.as_dict() list_dict['last_updated'] = datetime.now().isoformat() list_dict['hsh'] = listing.hsh self.db.insert(list_dict) def remove_listing(self, listing): lquery = Query() self.db.remove(lquery.hsh == listing.hsh) def update_listing(self, listing): lquery = Query() if self.listing_in_cache(listing): self.remove_listing(listing) self.insert_listing(listing) def remove_old_listings(self): list_ar = self.db.all() for listing in list_ar: if datetime.strptime(listing['last_updated'], '%Y-%m-%dT%H:%M:%S.%f') < datetime.now() - ListCache.DB_TTL: self.remove_listing(Listing.from_dict(listing))
class NumberStore(): def __init__(self, filename): self.db = TinyDB(filename) def initNumber(self, number): if not self.getNumberDict(number): self.db.insert({'number': number, 'accesses': [], 'info': '#yolo'}) def touchNumber(self, number): self.initNumber(number) #print(self.getNumberDict(number)) #accesses = self.getNumberDict(number)['accesses'].append(datetime.datetime.now()) #self.db.update({'accesses': accesses}, where('number') == number) def getNumberDict(self, number): return self.db.get(where('number') == number) def getNumberList(self): return (entry['number'] for entry in self.db.all()) def getAccesses(self, number): # if not number in self.db['numbers']: # return None # if not 'info' in self.db['numbers'][number]: # return None # # return self.db['numbers'][number]['info'] return [] def getInfo(self, number): return self.getNumberDict(number)['info'] def setInfo(self, number, info): self.initNumber(number) self.db.update({'info': info}, where('number') == number) print(self.db.all()) def deleteNumber(self, number): self.db.remove(where('number') == number) print(self.db.all()) def close(self): self.db.close()
def latest_run(): form = SearchForm() annotateRunForm = AnnotateRunForm() db = TinyDB(recipyGui.config.get('tinydb')) runs = db.all() runs = sorted(runs, key = lambda x: parse(x['date'].replace('{TinyDate}:', '')), reverse=True) r = db.get(eid=runs[0].eid) diffs = db.table('filediffs').search(Query().run_id == r.eid) db.close() return render_template('details.html', query='', form=form, run=r, annotateRunForm=annotateRunForm, dbfile=recipyGui.config.get('tinydb'), diffs=diffs, active_page='latest_run')
class FolderManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, FOLDER_DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def add_folder(self, file_name): if not self.folder_exists(file_name): entry = {'file_name': file_name} self._db.insert(entry) def get_all_entries(self): return self._db.all() def folder_exists(self, file_name): """ checks if a folder has been added """ entries = self._db.search(where('file_name') == file_name) if entries: return True else: return False def remove_element(self, file_name): self._db.remove(where('file_name') == file_name) def get_file_names(self): """ returns all the file names of folders that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['file_name']) return file_names def get_folder_by_name(self, expected_name): """ get documents by the specified property """ entry = self._db.get(where('file_name') == expected_name) return entry def clear_all(self): self._db.purge()
def test_json_readwrite(): """ Regression test for issue #1 """ # Create TinyDB instance db = TinyDB(path, policy='autoadd', storage=SFTPStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} get = lambda s: db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None
class TerritoryMapController(BaseController): def __init__(self): self.addresses = TinyDB(self.databaseDirectory + '/addresses.json') self.territories = TinyDB(self.databaseDirectory + '/territories.json') def get(self): territories = self.territories.all() marked = {} for territory in territories: territory_addresses = territory['addresses'] points = [] for address in territory_addresses: point = self.addresses.get(where('id') == address) address_full_name = point['address'] + ', ' + point['city'] points.append({'x': point['cords'][0], 'y': point['cords'][1]}) points.sort(key=lambda x: x['x'], reverse=True) points.sort(key=lambda x: x['y']) marked[territory['id']] = points return marked
def save(): db = TinyDB('db.json') player_stats = db.table(name_input) if db.get(where('name') == name_input): player_stats.update({'time_advance' : player.time_advance}, eids=[1]) player_stats.update({'bow_message': player.bow_message}, eids=[2]) player_stats.update({'hunt_message': player.hunt_message}, eids=[3]) player_stats.update({'can_hunt': player.can_hunt}, eids=[4]) player_stats.update({'exploration1_message': player.exploration1_message}, eids=[5]) player_stats.update({'sword_message': player.sword_message}, eids=[6]) player_stats.update({'can_go_to_town': player.can_go_to_town}, eids=[7]) player_stats.update({'health': player.health}, eids=[8]) player_stats.update({'weapon': player.weapon}, eids=[9]) player_stats.update({'energy': player.energy}, eids=[10]) player_stats.update({'exploration_count': player.exploration_count}, eids=[11]) player_stats.update({'town_count': player.town_count}, eids=[12]) player_stats.update({'money': player.money}, eids=[13]) player_stats.update(player.inventory, eids=[14]) print "Saved" else: player_stats.insert({'time_advance' : player.time_advance}) player_stats.insert({'bow_message': player.bow_message}) player_stats.insert({'hunt_message': player.hunt_message}) player_stats.insert({'can_hunt': player.can_hunt}) player_stats.insert({'exploration1_message': player.exploration1_message}) player_stats.insert({'sword_message': player.sword_message}) player_stats.insert({'can_go_to_town': player.can_go_to_town}) player_stats.insert({'health': player.health}) player_stats.insert({'weapon': player.weapon}) player_stats.insert({'energy': player.energy}) player_stats.insert({'exploration_count': player.exploration_count}) player_stats.insert({'town_count': player.town_count}) player_stats.insert({'money': player.money}) player_stats.insert(player.inventory) db.insert({'name' : name_input}) print "First save"
def test_cutom_mapping_type_with_json(tmpdir): from tinydb.database import Mapping class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.purge() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 # Insert multiple db.insert_multiple([ CustomDocument({'int': 2, 'char': 'a'}), CustomDocument({'int': 3, 'char': 'a'}) ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 3) == 1 # Write back doc_id = db.get(where('int') == 3).doc_id db.write_back([CustomDocument({'int': 4, 'char': 'a'})], [doc_id]) assert db.count(where('int') == 3) == 0 assert db.count(where('int') == 4) == 1
class DB: def __init__(self, path): try: from sftp_storage import SFTPStorage, WrongPathException try: self.db = TinyDB(path, policy='autoadd', storage=SFTPStorage) except WrongPathException: self.db = TinyDB(path) except ImportError: from warnings import warn warn("""`paramiko` doesn't seem to be installed in your OS. Remote db access is disabled""", ImportWarning) self.db = TinyDB(path) def get_experiments(self): return self.db.search() def get_experiment(self, experiment_id): return self.db.get({"id": experiment_id}) def get_model(self, experiment_id, model_id): models = self.get_experiment(experiment_id)["models"] for m in models: if m["modelId"] == model_id: return m def get_tags(self): return chain(*[exp['tags'] for exp in self.db.search()]) def get_timestamps(self): return [model["meta"]["timestamp"] for exp in self.get_experiments() for model in exp.get("models", [])] def get_last_timestamp(self): return max(self.get_timestamps)
raise ValueError('Invalid track ID') def is_downloadable(info): if not info['downloadable']: return False if not info['has_downloads_left']: return False return True if __name__ == '__main__': db = TinyDB('db.json') settings = db.get(eid=1) songs = db.table('songs') for i in songs.search(where('downloadable') == False): print i['id'] info = track_info(i['id']) d = is_downloadable(info) if d: print 'Is now downloadable' songs.update({'downloadable': True}, where('id') == i['id']) for email in i['emails']: print 'Emailing ' + email
class Model(object): table = 'default' _exclude_fields = [ 'db', 'table', 'submit', '_exclude_fields', 'exclude_fields', '_deleted_args' ] _deleted_args = list() def __init__(self, **kwargs): table = os.path.join(current_app.config.get('DB_PATH', 'gallery_db'), '%s.json' % self.table) self.db = TinyDB(table, storage = S3Storage) self.eid = Field(type = int, required = False, primary = False) exclude_fields = getattr(self, 'exclude_fields', None) if exclude_fields: self._exclude_fields += exclude_fields for key, value in kwargs.items(): if key == '_deleted_args': self._deleted_args = value if key not in self._exclude_fields: self.setattr(key, value) def all(self): rows = list() for row in self.db.all(): rows.append( self.as_obj(row) ) return rows def filter(self, **kwargs): rows = list() eids = list() for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): founds = self.db.search(where(field) == value.value) for found in founds if founds else []: if found.eid not in eids: eids.append(found.eid) rows.append( self.as_obj(found) ) return rows def get(self, eid): row = self.db.get(eid = eid) if row: return self.as_obj(row) return False def search(self, **kwargs): for field, value in kwargs.iteritems(): if type(value) != Field: value = self.setattr(field, value) if value.validate(): row = self.db.search(where(field) == value.value) if row: if type(row) == list: row = row[0] return self.as_obj(row) return False def create(self): insert = self.as_dict() return self.db.insert(insert) def update(self): update = self.as_dict() for arg in self._deleted_args: try: self.db.update(delete(arg), eids = [ self.eid.value ]) except: pass return self.db.update(update, eids = [ self.eid.value ]) def save(self): if self.eid.value: self.eid.validate() return self.update() else: create = self.create() self.eid.value = create return self def delete(self): self.db.remove( eids = [ self.eid.value ] ) def as_dict(self): args = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr: if attr.validate(): args[key] = attr.value return args def clean(self): for key in self.__dict__.keys(): if key not in self._exclude_fields: delattr(self, key) def as_obj(self, row): self.clean() if not getattr(self, 'eid', None): self.eid = Field(value = row.eid, type = int, required = False, primary = False) for key, value in row.items(): self.setattr(key, value) return copy.copy( self ) def setattr(self, key, value): attr = getattr(self, key, Field()) if type(attr) != Field: attr = Field() attr.value = value if key not in self._exclude_fields: setattr(self, key, attr) return attr if key == '_deleted_args': self._deleted_args.append(value) return False def from_form(self, form): for key, value in form.items(): self.setattr(key, value) return self def as_form(self): fields = dict() for key in self.__dict__.keys(): if key not in self._exclude_fields: attr = getattr(self, key, None) if attr and type(attr) == Field: fields[key] = attr return fields def __repr__(self): if self.eid: return '<%s: %s>' % (self.__class__.__name__, self.eid.value) else: return '<%s>' % (self.__class__.__name__)
class StasherTinyDB(StasherClass): def __init__(self): self.db = TinyDB("stasher/havocbot.json", default_table="users", sort_keys=True, indent=2) def add_user(self, user): # Iterate through the user's usernames and see if any usernames already exist if self._user_exists(user): logger.error("This user already exists in the db") raise UserDataAlreadyExistsException logger.info("Adding new user '%s' to database" % (user.name)) logger.debug("add_user - adding '%s'" % (user.to_dict_for_db())) self.db.insert(user.to_dict_for_db()) def del_user(self, user): pass def add_permission_to_user_id(self, user_id, permission): try: self._add_string_to_list_by_key_for_user_id(user_id, "permissions", permission) except: raise def del_permission_to_user_id(self, user_id, permission): try: self._del_string_to_list_by_key_for_user_id(user_id, "permissions", permission) except: raise def add_alias_to_user_id(self, user_id, alias): try: self._add_string_to_list_by_key_for_user_id(user_id, "aliases", alias) except: raise def del_alias_to_user_id(self, user_id, alias): try: self._del_string_to_list_by_key_for_user_id(user_id, "aliases", alias) except: raise def add_points_to_user_id(self, user_id, points): logger.info("Adding %d points to user id %s" % (points, user_id)) def increment_by_value(field, value): def transform(element): element[field] += int(value) return transform self.db.update(increment_by_value("points", points), eids=[user_id]) def del_points_to_user_id(self, user_id, points): logger.info("Deleting %d points from user id %s" % (points, user_id)) def decrement_by_value(field, value): def transform(element): element[field] -= int(value) return transform self.db.update(decrement_by_value("points", points), eids=[user_id]) def find_user_by_id(self, search_user_id): logger.info("Searching for '%s'" % (search_user_id)) user = None result = self.db.get(eid=search_user_id) if result is not None: user = self.build_user(result) logger.debug("Returning with '%s'" % (user)) return user def find_user_by_username_for_client(self, search_username, client_name): logger.info("Searching for '%s' in client '%s'" % (search_username, client_name)) user = None user_query = Query() result_list = self.db.search(user_query.usernames[client_name].any([search_username])) if result_list is not None and result_list: if len(result_list) == 1: user = self.build_user(result_list[0]) logger.debug("Returning with '%s'" % (user)) return user def find_users_by_username(self, search_username): # user_list = None # # UserQuery = Query() # results = self.db.search(UserQuery.usernames[client_name].any([search_username])) # # if results is not None and results: # user_list = [] # for user in results: # user_list.append(self.build_user(user)) # user_list = results # # return user_list pass def find_users_by_name_for_client(self, search_name, client_name): logger.info("Searching for '%s' in client '%s'" % (search_name, client_name)) results = [] def name_test_func(val, nested_search_name): return val.lower() == nested_search_name.lower() user_query = Query() matched_users = self.db.search( (user_query["name"].test(name_test_func, search_name)) & (user_query["usernames"].any([client_name])) ) if matched_users: for matched_user in matched_users: a_user = self.build_user(matched_user) if a_user.is_valid(): results.append(a_user) logger.debug("Returning with '[%s]'" % (", ".join(map(str, results)))) return results def find_users_by_alias_for_client(self, search_alias, client_name): logger.info("Searching for '%s' in client '%s'" % (search_alias, client_name)) results = [] def alias_test_func(val, nested_search_alias): return any(x.lower() for x in val if x.lower() == search_alias.lower()) user_query = Query() matched_users = self.db.search( (user_query["aliases"].test(alias_test_func, search_alias)) & (user_query["usernames"].any([client_name])) ) if matched_users: for matched_user in matched_users: a_user = self.build_user(matched_user) if a_user.is_valid(): results.append(a_user) logger.debug("Returning with '[%s]'" % (", ".join(map(str, results)))) return results def find_users_by_matching_string_for_client(self, search_string, client_name): logger.info("Searching for '%s' in client '%s'" % (search_string, client_name)) results = [] results_name = self.find_users_by_name_for_client(search_string, client_name) if results_name is not None and results_name: results.extend(results_name) result_username = self.find_user_by_username_for_client(search_string, client_name) if result_username is not None and result_username: results.append(result_username) results_alias = self.find_users_by_alias_for_client(search_string, client_name) if results_alias is not None and results_alias: results.extend(results_alias) return results def find_all_users(self): pass def build_user(self, result_data): user = User(result_data.eid) user.name = result_data["name"] if "name" in result_data else None user.usernames = result_data["usernames"] if "usernames" in result_data else {} user.points = result_data["points"] if "points" in result_data else None user.permissions = result_data["permissions"] if "permissions" in result_data else [] user.aliases = result_data["aliases"] if "aliases" in result_data else [] user.is_stashed = True return user def _add_string_to_list_by_key_for_user_id(self, user_id, list_key, string_item): logger.info("Adding '%s' item '%s' to user id %d" % (list_key, string_item, user_id)) list_items = [] try: list_items = self.db.get(eid=user_id)[list_key] except KeyError: logger.info("No items found for list '%s' for user id '%d'" % (list_key, user_id)) list_items = [string_item] else: if string_item in list_items: raise UserDataAlreadyExistsException else: list_items.append(string_item) finally: logger.debug("Updating '%s' to '%s' for user id '%s'" % (list_key, list_items, user_id)) self.db.update({list_key: list_items}, eids=[user_id]) def _del_string_to_list_by_key_for_user_id(self, user_id, list_key, string_item): logger.info("Deleting '%s' item '%s' from user id %d" % (list_key, string_item, user_id)) list_items = [] try: list_items = self.db.get(eid=user_id)[list_key] except KeyError: raise else: if string_item not in list_items: raise UserDataNotFoundException else: list_items.remove(string_item) logger.debug("Updating '%s' to '%s' for user id '%s'" % (list_key, list_items, user_id)) self.db.update({list_key: list_items}, eids=[user_id]) def _user_exists(self, user): # Iterate through the user's usernames and see if any usernames already exist if user.usernames is not None and user.usernames: for (key, value) in user.usernames.items(): logger.info("Iterating over key '%s' with value '%s'" % (key, value)) for username in value: logger.info("Iterating over username '%s'" % (username)) result = self.find_user_by_username_for_client(username, key) logger.info("result for username '%s' is '%s'" % (username, result)) if result is not None: return True return False
class JobDB: """Keeps a database of jobs, with a MD5 hash that encodes the function name, version, and all arguments to the function. """ def __init__(self, path): self.db = TinyDB(path) self.lock = Lock() def get_result_or_attach(self, key, prov, running): job = Query() with self.lock: rec = self.db.get(job.prov == prov) if 'result' in rec: return 'retrieved', rec['key'], rec['result'] job_running = rec['key'] in running wf_running = rec['link'] in running.workflows if job_running or wf_running: self.db.update(attach_job(key), job.prov == prov) return 'attached', rec['key'], None print("WARNING: unfinished job in database. Removing it and " " rerunning.", file=sys.stderr) self.db.remove(eids=[rec.eid]) return 'broken', None, None def job_exists(self, prov): job = Query() with self.lock: return self.db.contains(job.prov == prov) def store_result(self, key, result): job = Query() with self.lock: if not self.db.contains(job.key == key): return self.add_time_stamp(key, 'done') with self.lock: self.db.update( {'result': result, 'link': None}, job.key == key) rec = self.db.get(job.key == key) return rec['attached'] def new_job(self, key, prov, job_msg): with self.lock: self.db.insert({ 'key': key, 'attached': [], 'prov': prov, 'link': None, 'time': {'schedule': time_stamp()}, 'version': job_msg['data']['hints'].get('version'), 'function': job_msg['data']['function'], 'arguments': job_msg['data']['arguments'] }) return key, prov def add_link(self, key, ppn): job = Query() with self.lock: self.db.update({'link': ppn}, job.key == key) def get_linked_jobs(self, ppn): job = Query() with self.lock: rec = self.db.search(job.link == ppn) return [r['key'] for r in rec] def add_time_stamp(self, key, name): def update(r): r['time'][name] = time_stamp() job = Query() with self.lock: self.db.update( update, job.key == key)
def goto(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'seen': True}, eids=[eid]) return redirect(result['url'], code=302)
def hide(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'hidden': not result['hidden']}, eids=[eid]) db.close() return 'OK'
class DocumentManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def doc_exists(self, file_name, title): entries = self._db.search((where('file_name') == file_name) & (where('name') == title)) if entries: return True else: return False def is_doc_new(self, file_name): file_name_exists = self._db.search(where('file_name') == file_name) if not file_name_exists: return True return False def is_doc_modified(self, file_name, path): entry = self._db.get(where('file_name') == file_name) full_path = os.path.join(path, file_name) last_modified = os.stat(full_path).st_mtime if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified: return True return False def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name): entry = {'name': title, 'added': create_date, 'id': doc_id, 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name, 'downloaded': []} self._db.insert(entry) def update_document(self, field, new_val, doc_id): if type(new_val) is list: self._db.update(_update_entry_list(field, new_val), where('id') == doc_id) else: if type(new_val) is set: new_val = list(new_val) self._db.update({field: new_val}, where('id') == doc_id) def get_doc_by_prop(self, prop, expected_value): """ get documents by the specified property """ entry = self._db.get(where(prop) == expected_value) return entry def get_all_entries(self): return self._db.all() def get_doc_ids(self): """ returns all the ids of documents that user has added """ doc_ids = [] for entry in self._db.all(): doc_ids.append(entry['id']) return doc_ids def remove_element(self, doc_id): self._db.remove(where('id') == doc_id) def clear_all(self): self._db.purge()
reviews_db = TinyDB('data/reviews.json') # Load key from yaml file with open('key.yaml', 'r') as f: key = yaml.load(f.read())['google-key'] # load language parameter with open('config.yaml', 'r') as f: language = yaml.load(f.read())['language'] # template url used to send requests url_template = ('https://maps.googleapis.com/maps/api/place/details/json?' 'key={key}&placeid={placeid}&language={language}') # load details for every place for i in xrange(1, len(places_db)): # build dic with params to send to the API params = {'key': key, 'placeid': places_db.get(eid=i)['place_id'], 'language': language} # send request res = requests.get(url_template.format(**params)) # obtain reviews if any try: reviews = json.loads(res.content)['result']['reviews'] except Exception, e: print 'Place does not have reviews, skipping...' else: # insert reviews in json file reviews_db.insert_multiple(reviews)
class DocumentManager: def __init__(self, path): self.db_file = os.path.join(path, CONF_DIR, DB_FN) self._db = TinyDB(self.db_file) def open_db(self): self._db = TinyDB(self.db_file) def close_db(self): self._db.close() def doc_exists(self, file_name, title): entries = self._db.search((where('file_name') == file_name) & (where('name') == title)) if entries: return True else: return False def is_doc_new(self, file_name): file_name_exists = self._db.search(where('file_name') == file_name) if not file_name_exists: return True return False ''' receives a translation file and checks if there are corresponding source files''' def is_translation(self, file_name, title, matched_files, actions): ''' check if the file is a translation file''' for myFile in matched_files: relative_path = actions.norm_path(myFile) myFileTitle = os.path.basename(relative_path) ''' only compare the file being checked against source files that have already been added ''' entry = self._db.get(where("file_name") == relative_path) if entry: ''' check the source file's download codes to see if the file being checked is a translation file ''' downloads = self.get_doc_downloads(relative_path) if downloads: for d in downloads: ''' append the download code to the source file for comparison ''' temp = myFileTitle.split(".") newString = temp[0]+"."+ d +"."+temp[1] if newString == title: return True return False ''' receives a source file and finds the source files associated with it ''' #def delete_local_translations(self, file_name, path, actions): def is_doc_modified(self, file_name, path): entry = self._db.get(where('file_name') == file_name) full_path = os.path.join(path, file_name) last_modified = os.stat(full_path).st_mtime if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified: return True return False def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name): entry = {'name': title, 'added': create_date, 'id': doc_id, 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name, 'downloaded': []} self._db.insert(entry) def update_document(self, field, new_val, doc_id): if type(new_val) is list: self._db.update(_update_entry_list(field, new_val), where('id') == doc_id) else: if type(new_val) is set: new_val = list(new_val) self._db.update({field: new_val}, where('id') == doc_id) def get_doc_by_prop(self, prop, expected_value): """ get documents by the specified property """ entry = self._db.get(where(prop) == expected_value) return entry def get_all_entries(self): return self._db.all() def get_doc_ids(self): """ returns all the ids of documents that the user has added """ doc_ids = [] for entry in self._db.all(): doc_ids.append(entry['id']) return doc_ids def get_file_names(self): """ returns all the file names of documents that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['file_name']) return file_names def get_names(self): """ returns all the names of documents that the user has added """ file_names = [] for entry in self._db.all(): file_names.append(entry['name']) return file_names def get_doc_name(self, file_name): """ returns the file name of a document for a given file path """ entry = self._db.get(where("file_name") == file_name) if entry: return entry['name'] else: return None def get_doc_locales(self, file_name): """ returns the target locales of a document for a given file """ locales = [] entry = self._db.get(where("file_name") == file_name) if entry: locales.append(entry['locales']) return locales def get_doc_downloads(self, file_name): """ returns all the downloaded translations for a given file """ entry = self._db.get(where("file_name") == file_name) if entry: downloads = entry['downloaded'] return downloads def remove_element(self, doc_id): self._db.remove(where('id') == doc_id) def clear_prop(self, doc_id, prop): """ Clear specified property of a document according to its type """ entry = self._db.get(where('id') == doc_id) if isinstance(entry[prop],str): self.update_document(prop,"",doc_id) elif isinstance(entry[prop],int): self.update_document(prop,0,doc_id) elif isinstance(entry[prop],list): self.update_document(prop,[],doc_id) elif isinstance(entry[prop],dict): self.update_document(prop,{},doc_id) def remove_element_in_prop(self, doc_id, prop, element): doc_prop = self.get_doc_by_prop('id', doc_id)[prop] if element in doc_prop: doc_prop.remove(element) self.update_document(prop, doc_prop, doc_id) def add_element_to_prop(self, doc_id, prop, element): doc_prop = self.get_doc_by_prop('id',doc_id)[prop] if element not in doc_prop: doc_prop.append(element) self.update_document(prop, doc_prop, doc_id) def clear_all(self): self._db.purge()
class ThoughtCache(object): """ Quickly and simply cache python objects (inc. functions and classes) into a NoSQL database for later tag-based retrieval """ def __init__(self, path='tc_db.json', encoder=jsonpickle.encode, decoder=jsonpickle.decode): """ Initialize. If specifying encoder and decoder, it must accept a signature of f(o). If not, use `partial`, lambda, etc. to wrap it. :param path: path to TinyDB json database :param encoder: function to encode objects :param decoder: function to decode objects :return: None """ self.db = TinyDB(path) self.encode, self.decode = encoder, decoder self._tags = collections.Counter() def store(self, o, metadata): """ Store object `o` in the database as `metadata['name']`. `metadata['name']` must be provided, but need not be unique. If no UUID is provided in the metadata, a random one is created and used. UUIDs are the authoritative source of identity, so they are the only basis on which a record will be overwritten without regard to any other fields. If an existing UUID is specified, *ALL* fields will be overwritten, not only the ones specified. UUID may be a valid UUID string or None. If you don't intend to overwrite an existing record, simply remove the UUID key from the dict entirely, or leave as None. :param o: The actual object to be stored. :param metadata: dict of metadata with the following format: { 'time':time.time(), 'location':'work', 'tag':['general networking'], 'name':'ExtendedPingResult' # 'name' is mandatory because it cannot be reliably inferred at runtime. 'uuid':'85280d8e-66bf-4e65-814f-507a65c0375c' } :return: """ store = self._store if 'uuid' not in metadata: metadata['uuid'] = str(uuid.uuid4()) elif self._test_uuid(metadata['uuid']): store = self._update metadata['json'] = self.encode(o) store(metadata) def retrieve_json(self, metadata_filter): """ Retrieve list of JSON objects that match the given filter. If uuid is specified, all other criteria are ignored. All non-uuid fields that are specified are, unless otherwise noted, treated as a series of 'OR' clauses that are 'AND'ed together. `if (location in locations) and (tag in tags): ` etc... :param metadata_filter: dict of values to filter against. Currently supported: { 'time': day, month, year or range #NOT IMPLEMENTED 'location': 'work', 'home', etc... 'name': where('name').contains Value. Case Sensitive 'tag': single entry or list of entries 'uuid': single entry or list of entries } :return: list of JSON strings representing the objects """ query = metadata_filter_to_query(metadata_filter) return [x['json'] for x in sequenceify(self.db.search(query))] def retrieve(self, metadata_filter): """ Returns reconstructed objects that match filter. :param metadata_filter: See ThoughtCache.retrieve_json.__doc__ :return: """ json_list = self.retrieve_json(metadata_filter) return list(map(self.decode, json_list)) def tag_search(self, *tags): """ Given a string or list of strings, return entries that with one or more matching tags (if A in B) :param tags: string or list of strings :return: list of json strings """ metadata_filter = {'tag':sequenceify(tags)} return self.retrieve(metadata_filter) def update_tags(self): """ updates internal tag Counter. :return: None """ self._tags.clear() elements = self.db.search(where('tag')) for element in elements: self._tags.update(element['tag']) @property def tags(self): """ Returns list of all tags currently in use in the database :return: list of tags (str). """ self.update_tags() return list(self._tags.keys()) def matching_tags(self, substr): """ Returns list of all tags currently in use in the database that contain substr. :param substr: substring to search for. Case insensitive. :return: list of tags (str). """ return [x for x in self.tags if (x.lower().find(substr) > -1)] def _test_uuid(self, o_uuid): """ Tests for the existence of a provided object UUID string in the database. :param o_uuid: object UUID string :return: True if UUID exists, else False """ i = self.db.count(where('uuid') == o_uuid) # number of matching queries assert i < 2, "uuid {o_uuid} is not unique in database!" return bool(i) def _store(self, metadata): """ Store new entry into db using provided metadata :param json: JSON Object :param metadata: Dict of metadata. Must include 'uuid', 'name', and 'json'. :return: """ self.db.insert(metadata) def _update(self, metadata): """ update existing db record. Preserves TinyDB behavior: overwrites existing fields, adds new fields, does not delete fields. :param metadata: Dict of metadata, must include 'name', and 'uuid', and 'json' :return: """ element = self.db.get(where('uuid') == metadata['uuid']) self.db.update(metadata, eids=[element.eid])
from tinydb import TinyDB, where db = TinyDB('/home/Ajnin123/mysite/tinydb.json') db.insert({'user': '******', 'pass': '******'}) print db.get(where('user') == 'Ajnin123')['pass']
class Experiment: """ A class to encapsulate information about a experiment and store together different experiment runs. The recommended way to identify an experiment is overwriting Experiment.get_id in a child class in a way that is dependent from the source file. Example: class MyExperiment(Experiment): def get_id(self): return inspect.getsourcefile(self) # return __file__ Alternatively, one can pass an id as a constructor parameter. If no id is passed, the default behaviour is to generate a random id (meaning a new experiment is created for each run). Experiments are instantiated with the classmethod Experiment.new, which additionally stores the experiment in the database with useful metadata or Experiment.use, which additionally makes sure that the experiment is stored only the first time. Actual experiments are run on models. To add a model to the current experiment, instantiate the inner class Model with Experiment.model. model = Experiment.use(path).model("modelId", {"type": "SVM"}) Model instantiates and encapsulates model information, providing database persistency. A model_id is required to identify the model. It also provides convenience methods to store experiment results for both single-result and epoch-based training experiments. See Model.session Parameters: ----------- path : str Path to the database file backend. A path in a remote machine can be specified with syntax: username@host:/path/to/remote/file. """ def __init__(self, path, exp_id=None, verbose=False): assert path, "Path cannot be the empty string" self.level = logging.WARN if verbose else logging.NOTSET try: from sftp_storage import SFTPStorage, WrongPathException try: self.db = TinyDB(path, policy='autoadd', storage=SFTPStorage) log("Using remote db file [%s]" % path, level=self.level) except WrongPathException: self.db = TinyDB(path) except: self.db = TinyDB(path) log("Using local file [%s]" % path, level=self.level) self.git = GitInfo(self.getsourcefile()) self.id = exp_id if exp_id else self.get_id() def get_id(self): return uuid4().hex def getsourcefile(self): return utils.getsourcefile(lambda: None) def exists(self): return self.db.get(where("id") == self.id) def add_tag(self, tag): self.db.update(extend("tags", tag), where("id") == self.id) def remove_tag(self, tag): return self.db.update(remove("tags", tag), where("id") == self.id) def get_models(self): experiment = self.db.get(where("id") == self.id) return experiment.get("models") if experiment else {} @classmethod def new(cls, path, exp_id=None, tags=(), **params): """ Stores a new Experiment in the database. Throws an exception if experiment already exists. """ exp = cls(path, exp_id=exp_id) if exp.exists(): raise ValueError("Experiment %s already exists" % str(exp.id)) now, exp_id = str(datetime.now()), exp_id or exp.id base = {"id": exp_id, "tags": tags, "models": [], "created": now} exp.db.insert(utils.merge(base, params)) return exp @classmethod def use(cls, path, exp_id=None, tags=(), **params): """ Stores a new Experiment if none can be found with given parameters, otherwise instantiate the existing one with data from database. """ exp = cls(path, exp_id=exp_id) if exp.exists(): return exp else: log("Creating new Experiment %s" % str(exp.id)) return cls.new(path, exp_id=exp_id, tags=tags, **params) def model_exists(self, model_id): """ Returns: -------- dict or None """ return self.db.get( (where("id") == self.id) & where("models").any(where("modelId") == model_id)) def model(self, model_id, model_config={}): return self.Model(self, model_id, {"config": model_config}) class Model: def __init__(self, experiment, model_id, model_config): self._session_params = None self.e = experiment self.model_id = model_id self.which_model = model_pred(self.model_id) self.cond = ((where("id") == experiment.id) & where("models").any(where("modelId") == model_id)) if not self.exists(): self._add_default_model(**model_config) def _add_default_model(self, **kwargs): model = utils.merge({"modelId": self.model_id}, kwargs) self.e.db.update(append("models", model), where("id") == self.e.id) def _result_meta(self): return {"commit": self.e.git.get_commit() or "not-git-tracked", "branch": self.e.git.get_branch() or "not-git-tracked", "user": getuser(), "platform": platform(), "timestamp": str(datetime.now())} def _check_params(self, params): models = self.e.get_models() if not models: return model = next(m for m in models if m["modelId"] == self.model_id) for result in model.get("sessions", []): if result["params"] == params: raise ExistingModelParamsException() def _add_result(self, result, params): """ Add session result (new) """ meta = self._result_meta() result = {"params": params, "meta": meta, "result": result} path = ["models", self.which_model, "sessions"] self.e.db.update(append_in(path, result), self.cond) def _add_session_result(self, result, index_by=None): """ Adds (partial) result to session currently running. Session is identifed based on session `params`. In case a model is run with the same params in a second session, results are added to the chronologically last session (which means that we relay on the fact that `update_in` checks lists in reverse, see `update_in`) Parameters: ----------- result : (serializable-)dict index_by : serializable, optional Key to store result by. `result` is appended to session.result.index_by if given, or to session.result otherwise. """ which_session = params_pred(self._session_params) path = ["models", self.which_model, "sessions", which_session, "result"] + ([index_by] or []) self.e.db.update(append_in(path, result), self.cond) def _start_session(self, params): self._session_params = params path = ["models", self.which_model, "sessions"] result = {"params": params, "meta": self._result_meta()} self.e.db.update(append_in(path, result), self.cond) def _end_session(self): self._session_params = None def exists(self): return self.e.model_exists(self.model_id) @contextlib.contextmanager def session(self, params, ensure_unique=True): # TODO: store on exit """ Context manager for cases in which we want to add several results to the same experiment run. Current session is identified based on `params` (see _add_session_result). Example: model_db = Experiment.use("test.json").model("id") with model_db.session({"param-1": 10, "param-2": 100}) as session: from time import time start_time = time() svm.fit(X_train, y_train) end_time = time() session.add_meta({"duration": end_time - start_time}) y_pred = svm.predict(X_test) session.add_result({"accuracy": accuracy(y_pred, y_true)}) Parameters: ----------- params: dict, parameters passed in to the model instance ensure_unique: bool, throw an exception in case model has already been run with the same parameters """ assert isinstance(params, dict), \ "Params expected dict but got %s" % str(type(params)) if ensure_unique: self._check_params(params) self._start_session(params) yield self self._end_session() def add_meta(self, d): """ Adds session meta info Parameters: ----------- d: dict, Specifies multiple key-val additional info for the session """ if not self._session_params: raise ValueError("add_meta requires session context manager") if not isinstance(d, dict): raise ValueError("add_meta input must be dict") if not self.exists(): self._add_default_model() which_session = params_pred(self._session_params) path = ["models", self.which_model, "sessions", which_session, "meta"] self.e.db.update(assign_in(path, d), self.cond) def add_result(self, result, params=None, index_by=None): """ appends result to models.$.sessions.$.result """ if not params and not self._session_params: raise ValueError("Experiment params missing") if not self._session_params: self._add_result(result, params) else: self._add_session_result(result, index_by=index_by) def add_epoch(self, epoch_num, result, timestamp=True): if not self._session_params: raise ValueError("add_epoch requires session context manager") result.update({"epoch_num": epoch_num}) if timestamp: result.update({"timestamp": str(datetime.now())}) self._add_session_result(result, index_by="epochs")
def star(eid): db = TinyDB(DB_FILENAME) result = db.get(eid=eid) db.update({'starred': not result['starred']}, eids=[eid]) db.close() return 'OK'
class StarredDB(object): def __init__(self, my_stars_home, mode): self._db = TinyDB(os.path.join(my_stars_home, 'mystars.db'), storage=CachingMiddleware(JSONStorage)) if mode == 't': self._db.purge_tables() self._idx = self._db.table('index') if not self._idx.contains(Query().name == 'language'): self._idx.insert({ 'name': 'language', 'docs': {} }) if not self._idx.contains(Query().name == 'keyword'): self._idx.insert({ 'name': 'keyword', 'docs': {} }) def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): self._db.close() def _get_index_docs(self, name): return self._idx.get(Query().name == name).get('docs', {}) def update(self, repo_list): if repo_list: self._db.table('latest_repo').purge() self._db.table('latest_repo').insert(repo_list[0]) language_docs = self._get_index_docs('language') keyword_docs = self._get_index_docs('keyword') for repo in repo_list: # save repo data doc_id = self._db.insert(repo) # update index name = repo.get('name') language = repo.get('language') description = repo.get('description') if language: for lang in language.split(): update_inverted_index(language_docs, lang.lower(), doc_id) keywords = split_repo_name(name) if description: keywords += split_repo_desc(description) for keyword in split_keywords(keywords): update_inverted_index(keyword_docs, keyword.lower(), doc_id) self._idx.update(operations.set('docs', language_docs), Query().name == 'language') self._idx.update(operations.set('docs', keyword_docs), Query().name == 'keyword') def get_latest_repo_full_name(self): latest_repo = self._db.table('latest_repo').all() if len(latest_repo) > 0: return latest_repo[0].get('full_name') def search(self, languages, keywords): # self._build_index() language_docs = self._get_index_docs('language') keyword_docs = self._get_index_docs('keyword') if not language_docs and not language_docs: raise EmptyIndexWarning('empty index') language_results = [] if languages: for search in languages: language_results += language_docs.get(search.lower(), []) keywords_results = [] if keywords: for keyword in keywords: for term in split_repo_name(keyword): results = keyword_docs.get(term.lower(), []) keywords_results.append(results) if languages and keywords: # python > 2.6 search_results = list(set(language_results).intersection(*keywords_results)) else: if len(keywords_results) > 1: # python > 2.6 final_keywords_results = list(set(keywords_results[0]).intersection(*keywords_results[1:])) else: final_keywords_results = [] for results in keywords_results: for r in results: final_keywords_results.append(r) search_results = language_results + final_keywords_results # remove duplicates then sort by id search_results = sorted(list(set(search_results)), key=int) return [self._db.get(doc_id=doc_id) for doc_id in search_results]