def __init__(self, container: Container, **kwargs): if not tinydb: # pragma: no cover raise IntegrationNotFoundError('tinydb') self._container = container self._path = kwargs.get('path') """ Path is needed to be specified when TinyDb gets JSONStorage (default one). We use value of path for identifying different DBs of TinyDb. If the path is not specified, we can assume that the intention is to use MemoryStorage (if not explicitly defined) and not to cache the TinyDb instance. """ self._table_name = kwargs.pop('table_name', None) or kwargs.pop( 'qualifier', None) if not self._table_name: raise ConfigError(code='NO-TABLE-NAME-PROVIDED') if self._path: if self._path not in self._db_cache: self._db_cache[self._path] = tinydb.TinyDB(**kwargs) self._db: tinydb.TinyDB = self._db_cache[self._path] else: if 'storage' not in kwargs: kwargs['storage'] = tinydb.storages.MemoryStorage self._db = tinydb.TinyDB(**kwargs) self._table: tinydb.database.Table = self._db.table(self._table_name)
def load_data(dbpath): if str(dbpath) == '-': db = tinydb.TinyDB(storage=tinydb.storages.MemoryStorage) db.storage.write(json.load(sys.stdin)) else: db = tinydb.TinyDB(dbpath, access_mode='r') return db
def bwEvent(event): if cb0.get() == 'Yes': db = tinydb.TinyDB('db_huawei_XPIC.json') else: db = tinydb.TinyDB('db_huawei.json') ref_mode.delete(0, 'end') table = db.table(str(cb1.get())) modulations = list() match_str = str(carde.get()) for row in table: modulation = row['MODULATION_TYPE'] freq = str(row['BAND_DESIGNATOR']) bandwidth = str(row['BANDWIDTH']) if (re.search('(' + match_str + ')', str(row['MODEL'])) != None) and str(freq) == str(fe.get()) and str(bandwidth) == str( cpe.get()) and modulation not in modulations: modulations.append(modulation) def sortMod(mod): if (re.match('BPSK', str(mod))): mod = '2QAM' if (re.match('QPSK', str(mod))): mod = '4QAM' if (re.match('8PSK', str(mod))): mod = '8QAM' return int(str(mod).split('QAM')[0]) modulations.sort(key=sortMod) ref_mode.config(value=modulations) return modulations
def __init__(self, container: Container, **kwargs): if not tinydb: # pragma: no cover raise IntegrationErrors.NOT_FOUND.with_params(target="tinydb") self._container = container self._path = kwargs.get("path") """ Path is needed to be specified when TinyDb gets JSONStorage (default one). We use value of path for identifying different DBs of TinyDb. If the path is not specified, we can assume that the intention is to use MemoryStorage (if not explicitly defined) and not to cache the TinyDb instance. """ self._table_name = kwargs.pop("table_name", None) or kwargs.pop("qualifier", None) if not self._table_name: raise IntegrationErrors.NO_TABLE_NAME_PROVIDED if self._path: if self._path not in self._db_cache: self._db_cache[self._path] = tinydb.TinyDB(**kwargs) self._db: tinydb.TinyDB = self._db_cache[self._path] else: if "storage" not in kwargs: kwargs["storage"] = tinydb.storages.MemoryStorage self._db = tinydb.TinyDB(**kwargs) self._table: tinydb.database.Table = self._db.table(self._table_name)
def __init__(self, db_path, create_new=False): if DB.singleton_db_obj: raise RuntimeError("An instance of DB already exist!") if db_path and os.path.exists(db_path): self.db = tinydb.TinyDB(db_path) if ACCOUNTS_TABLE_NAME in self.db.tables() and \ DEFAULT_TABLE_NAME in self.db.tables(): self.accounts_table = self.db.table(ACCOUNTS_TABLE_NAME) self.default_table = self.db.table(DEFAULT_TABLE_NAME) else: self.db.close() raise ValueError("Invalid DB, missing 'accounts' table!") elif db_path and create_new: logger.info("DB file (%s) is missing," "and create_new=%s, hence creating new DB!!" % (db_path, create_new)) self.db = tinydb.TinyDB(db_path) self.accounts_table = self.db.table(ACCOUNTS_TABLE_NAME) self.default_table = self.db.table(DEFAULT_TABLE_NAME) else: raise ValueError("Unexpected dbindex file (%s) is missing" % db_path) self.db_path = db_path self.account_obj_list = {} DB.singleton_db_obj = self
def __init__(self): try: os.mkdir('db') except OSError: pass self.devices = tinydb.TinyDB('db/devices.json') self.counters = tinydb.TinyDB('db/counters.json')
def XPICstate(event): thr.delete(0, 'end') cb1.delete(0, 'end') fe.delete(0, 'end') carde.delete(0, 'end') cpe.delete(0, 'end') ref_mode.delete(0, 'end') if cb0.get() == 'Yes': db = tinydb.TinyDB('db_huawei_XPIC.json') else: db = tinydb.TinyDB('db_huawei.json') cb1.config(value=list(db.tables()))
def AMstate(event): thr.delete(0, 'end') cb1.delete(0, 'end') fe.delete(0, 'end') carde.delete(0, 'end') cpe.delete(0, 'end') ref_mode.delete(0, 'end') if cb0.get() == 'Yes': db = tinydb.TinyDB('db_ericsson_AM.json') else: db = tinydb.TinyDB('db_ericsson.json') cb1.config(value=list(db.tables()))
def __init__(self, parser): if parser.db_demo: self._db_user = tinydb.TinyDB(storage=tinydb.storages.MemoryStorage) # add demo data in fake database with open(parser.db_demo_path, encoding='utf-8') as demo_user_file: demo_ddb_user = json.load(demo_user_file) for db_user in demo_ddb_user: self._db_user.insert(db_user) else: file_path = parser.db_path self._db_user = tinydb.TinyDB(file_path) self._query_user = tinydb.Query()
def write(infoCases, infoTests, infoBeds): """ :param infoCases: type: list format: [total cases, total deaths, active cases, recovered] :param infoTests: type: list format: [total tests, daily tests, remaining tests] :param infoBeds: type: list format: [Occupied, Vacant] :return: None """ # create db instance dbCases = tinydb.TinyDB('./data/dataCases.json') dbTests = tinydb.TinyDB('./data/dataTests.json') dbBeds = tinydb.TinyDB('./data/dataBeds.json') now = datetime.datetime.now() date = now.strftime("%Y-%m-%d") # update db dbCases.insert({ 'date': date, 'totalcases': infoCases[0], 'totaldeaths': infoCases[1], 'activecases': infoCases[2], 'recovered': infoCases[3] }) dbTests.insert({ 'date': date, 'totaltests': infoTests[0], 'dailytests': infoTests[1], 'remainingtests': infoTests[2] }) dbBeds.insert({ 'date': date, 'occupied': infoBeds[0], 'vacant': infoBeds[1] })
def __init__(self, name): if platform == 'android': self.db = tinydb.TinyDB( '/storage/emulated/0/Android/data/com.moneymanager/' + name + '.json') else: self.db = tinydb.TinyDB(name + '.json') if self.db.all() != []: self.current_money = [i['current_money'] for i in self.db.all()][-1] else: self.current_money = 0
def getThrList(): user = tinydb.Query() if cb0.get() == 'Yes': db = tinydb.TinyDB('db_huawei_XPIC.json') else: db = tinydb.TinyDB('db_huawei.json') equip = str(cb1.get()) freq = fe.get() card = str(carde.get()) bw = cpe.get() modulations = bwEvent(Event) table = db.table(equip) for mod in modulations: modulation_level[mod] = getRxThr(mod)
def init_ecaldb(config): """ one-time set up of primary database file """ ans = input('(Re)create main ecal JSON file? Are you really sure? (y/n) ') if ans.lower() != 'y': exit() f_db = config['ecaldb'] # for pgt, should have one for each detector if os.path.exists(f_db): os.remove(f_db) # create the database in-memory db_ecal = db.TinyDB(storage=MemoryStorage) query = db.Query() # create a table with metadata (provenance) about this calibration file file_info = { "system": config['system'], "cal_type": "energy", "created_gmt": datetime.utcnow().strftime("%m/%d/%Y, %H:%M:%S"), "input_table": config['input_table'] } db_ecal.table('_file_info').insert(file_info) # pretty-print the JSON database to file raw_db = db_ecal.storage.read() pmd.write_pretty(raw_db, f_db) # show the file as-is on disk with open(f_db) as f: print(f.read())
def check_token(username, token): # Lê o arquivo de usuários users_data = tinydb.TinyDB(USERS_FILE) # Busca o usuário user_db = tinydb.Query() user_search = users_data.search(user_db["username"] == username) if len(user_search) == 0: # Usuário não existe return False user = user_search[0] # Gera tokens tokens = [] prev_token = user["seed_pw"] prev_token += user["token_salt"] + datetime.datetime.now().strftime( "%Y%m%d%H%M") for i in range(5): sha = hashlib.sha256() sha.update(prev_token.encode("utf-8")) prev_token = sha.hexdigest()[:6] if prev_token == user["last_token"]: break tokens.append(prev_token) # Valida ou não o token fornecido if token in tokens: users_data.update({"last_token": token}, user_db["username"] == username) return True return False
def __init__(self, filename, clean=False): """ :param filename: name of catalogue file :type filename: :class:`str` :param clean: if set, catalogue is deleted, to be re-populatd from scratch :type clean: :class:`bool` If a new database is created, a ``_dbinfo`` table is added with version & module information to assist backward compatability. """ self.filename = filename self.name = re.sub( r'[^a-z0-9\._\-+]', '_', os.path.splitext(os.path.basename(filename))[0], flags=re.IGNORECASE, ) if clean and os.path.exists(self.filename): with open(self.filename, 'w'): pass # remove file's content, then close self.db = tinydb.TinyDB(filename, default_table='items') self.items = self.db.table('items') if self._dbinfo_name not in self.db.tables(): # info table does not exist; database is new. self._dbinfo_table.insert({ 'module': type(self).__module__, 'name': type(self).__name__, 'ver': self._version, 'lib': 'cqparts', 'lib_version': __version__, })
def _downloadLocationsDatabase(): if not DBFOLDER.path.exists(): os.makedirs(DBFOLDER.pathAsString()) if not os.path.isfile(DBFILE.pathAsString()): with open(DBFILE.pathAsString(), 'w') as f: pass return tinydb.TinyDB(DBFILE.pathAsString())
def ID(self): ''' Returns an identifier of the building configuration as string. ''' if self._ID is None: db = tinydb.TinyDB(os.path.join(tsib.data.PATH, "results","db.json")) # check if building exists in database def predicate(obj, requirements): for k,v in requirements.items(): if k not in obj or obj[k] != v: return False return True # avoid json data format conflict with numpy db_entry = {} for field, obj in self.IDentries.items(): if isinstance(obj, np.generic): db_entry[field] = obj.item() else: db_entry[field] = obj # request db entry db_id = db.get(lambda obj: predicate(obj, db_entry)) if db_id: logging.info('Building already exists under ID: ' +str(db_id) + '. If you do not want to overwrite the results, define a separate ID.') else: db_id = db.insert(db_entry) self._ID = db_id return self._ID
def getThreshDB(): """ ./chan-sel.py -getThreshDB Just an example of getting all threshold values (accounting for sub-bIdx's) from the DB. """ calDB = db.TinyDB("%s/calDB-v2.json" % dsi.latSWDir) pars = db.Query() bkg = dsi.BkgInfo() # loop over datasets # for ds in [0,1,2,3,4,5,6]: for ds in [6]: dsNum = ds if isinstance(ds, int) else 5 goodChans = det.getGoodChanList(dsNum) for bkgIdx in bkg.getRanges(ds): # ==== loop over sub-ranges (when TF was run) ==== rFirst, rLast = bkg.getRanges(ds)[bkgIdx][0], bkg.getRanges(ds)[bkgIdx][-1] subRanges = bkg.GetSubRanges(ds,bkgIdx) if len(subRanges) == 0: subRanges.append((rFirst, rLast)) for subIdx, (runLo, runHi) in enumerate(subRanges): key = "thresh_ds%d_bkg%d_sub%d" % (dsNum, bkgIdx, subIdx) thD = dsi.getDBRecord(key, False, calDB, pars) print(key) for ch in thD: print(ch,":",thD[ch]) print("")
def wfStdParse(): dsNum = 0 calDB = db.TinyDB('../calDB.json') pars = db.Query() # # use a regexp to search the DB ... very handy. # recList = calDB.search(pars.key.matches("wfstd_ds5_*")) recList = calDB.search(pars.key.matches("thresh")) print(len(recList)) for idx in range(len(recList)): key = recList[idx]['key'] vals = recList[idx]['vals'] print(key) for ch in vals: print(ch, vals[ch]) return for ch in vals: # simple iteration over chans a, b, c, d, e, base, n, m = vals[ch][3], vals[ch][4], vals[ch][ 5], vals[ch][6], vals[ch][7], vals[ch][8], vals[ch][9], vals[ ch][10] # check what string format these numbers need in a TCut. print("%s -- %.4e %.4e %.4e %.2e %.2e %.4f" % (ch, a, b, c, d, e, base, m, n)) return
def save_db(self, cls_ap): with open(self.config.get('eval', 'mapper'), 'r') as f: mapper = load_mapper(f) path = utils.get_eval_db(self.config) with tinydb.TinyDB(path) as db: row = dict([fn(self, cls_ap) for fn in mapper]) db.insert(row)
def post_iqa(region): """Save last air quality information (index, color) into database. :param region: name of region. """ db = tinydb.TinyDB(fndb.format(region=region), default_table='air') q = tinydb.Query() encstr = request.form['data'] iqas = json.loads(base64.b64decode(encstr).decode('utf-8')) # decode data inserted, updated = 0, 0 # Save data into database for zone, nfozone in iqas.items(): for typo, nfotypo in nfozone.items(): for pol, (val, iqa) in nfotypo.items(): query = (q.zone == zone) & (q.typo == typo) & (q.pol == pol) out = db.search(query) if out: # existing into databse: update it db.update({'val': val, 'iqa': iqa}, query) updated += 1 else: # insert it db.insert( dict(zone=zone, typo=typo, pol=pol, val=val, iqa=iqa)) inserted += 1 return jsonify(dict(status='ok', inserted=inserted, updated=updated))
def getDBRecord(key, verbose=False, calDB=None, pars=None): """ View a particular database record. """ import tinydb as db if calDB is None: calDB = db.TinyDB('calDB.json') if pars is None: pars = db.Query() recList = calDB.search(pars.key == key) nRec = len(recList) if nRec == 0: if verbose: print("Record %s doesn't exist" % key) return 0 elif nRec == 1: if verbose: print("Found record:\n%s" % key) rec = recList[0]['vals'] # whole record # sort the TinyDB string keys numerically (obvs only works for integer keys) result = {} for key in sorted([int(k) for k in rec]): if verbose: print(key, rec[u'%d' % key]) result[key] = rec[u'%d' % key] return result else: print( "WARNING: Found multiple records for key: %s. Need to do some cleanup!" % key) for rec in recList: for key in sorted([int(k) for k in rec]): print(key, rec[u'%d' % key]) print(" ")
def test_circular_db(self): try: os.remove(self.db_file) except: pass event_db = tinydb.TinyDB(self.db_file, storage=tinydb.middlewares.CachingMiddleware( tinydb.storages.JSONStorage)) event_db.table_class = lager.CircularTable tbl = event_db.table('test', max_size=4) tbl.insert({'foo': '1'}) tbl.insert({'foo': '2'}) tbl.insert({'foo': '3'}) tbl.insert({'foo': '4'}) tbl.insert({'foo': '5'}) tbl.insert({'foo': '6'}) values = tbl.all() self.assertEqual(len(values), 4) expected = [{'foo': '3'}, {'foo': '4'}, {'foo': '5'}, {'foo': '6'}] print(tbl.all()) four = tbl.search((tinydb.where('foo') == '4')) tbl.remove(eids=[four[0].eid]) values = tbl.all() self.assertEqual(len(values), 3) print(tbl.all()) tbl.insert({'foo': '7'}) expected = [{'foo': '5'}, {'foo': '6'}, {'foo': '7'}] values = tbl.all() print(tbl.all()) self.assertEqual(len(values), 3)
def discover_devices(self): self.print_to_terminal("Looking for devices") cached_bluetooth_db = tinydb.TinyDB("database/bluetooth.json") nearby_devices = bluetooth.discover_devices(lookup_names=True) if len(nearby_devices) > 0: self.print_to_terminal("found the following bluetooth devices") for i in range(len(nearby_devices)): bluetooth_addr = nearby_devices[i][0] bluetooth_name = nearby_devices[i][1] # search db to see if we can cached bluetooth addrr bluetooth_document = tinydb.Query() if len( cached_bluetooth_db.search( bluetooth_document.name == bluetooth_name)) == 0: document = {'addr': bluetooth_addr, 'name': bluetooth_name} cached_bluetooth_db.insert(document) self.print_to_terminal( f"index: {i} addr: {bluetooth_addr} name: {bluetooth_name} " ) self.discovered_bluetooth_devices = nearby_devices else: self.print_to_terminal("failed to find any bluetooth devices") return
def Update(query): ''' >>> Update({'chat' : { ... }}) True ''' DataBase = tinydb.TinyDB('database/moderation.db') if len(DataBase.all()) == 0: DataBase.insert({'Chat': {}}) Find_ = Find() try: DataBase.purge() DataBase.insert(query) return True except: DataBase.purge() DataBase.insert(Find_) return False
def _init_tb(): """TODO: to be defined1. """ _config = configparser.ConfigParser() _config.read( os.path.join(os.path.abspath(constants.CONF_PATH), 'ui_builder.cfg')) _db = tinydb.TinyDB(constants.UI_BUILDER_DB_PATH) _id_table = _db.table('app_ids')
def loadDBs(service, indexFiles): dBs = {} datadir = get_data_directory(service) indexMetadata = getIndexMetadata(service) #Files in Lambda can only be created in the /tmp filesystem - If it doesn't exist, create it. lambdaFileSystem = '/tmp/' + service + '/data' if not os.path.exists(lambdaFileSystem): os.makedirs(lambdaFileSystem) for i in indexFiles: db = tinydb.TinyDB(lambdaFileSystem + '/' + i + '.json') #TODO: remove circular dependency from utils, so I can use the method get_index_file_name #TODO: initial tests show that is faster (by a few milliseconds) to populate the file from scratch). See if I should load from scratch all the time #TODO:Create a file that is an index of those files that have been generated, so the code knows which files to look for and avoid creating unnecesary empty .json files if len(db) == 0: try: with open(datadir + i + '.csv', 'rb') as csvfile: pricelist = csv.DictReader(csvfile, delimiter=',', quotechar='"') db.insert_multiple(pricelist) #csvfile.close()#avoid " [Errno 24] Too many open files" exception except IOError: pass dBs[i] = db #db.close()#avoid " [Errno 24] Too many open files" exception return dBs, indexMetadata
def from_database(self, database: Union[str, tinydb.TinyDB], table: str = None): """Load history from a database supplied as a path to a file or a :obj:`tinydb.TinyDB` object. Args: database: :obj:`str` or :obj:`tinydb.TinyDB`. The database to load. table: (optional) :obj:`str`. The table to load from the database. This argument is not required if the database has only one table. Raises: :class:`ValueError`: if the database contains more than one table and `table` is not given. """ if isinstance(database, str): db = tinydb.TinyDB(database, sort_keys=True, indent=4, separators=(',', ': ')) elif isinstance(database, tinydb.TinyDB): db = database else: raise TypeError("The database must be of type str or tinydb.TinyDB.") if len(db.tables()) > 1 and table is None: raise ValueError( "Ambiguous database with multiple tables. " "Specify a table name." ) if table is None: table = list(db.tables())[0] self._db = db self._db_default_table = self._db.table(table)
def setUp(self): global testdb testdb = tinydb.TinyDB("testdb.json") testdb.purge() for lg in data: testdb.insert(lg)
def notebook(uuid): db = tinydb.TinyDB('notebooks.json') record = db.get(tinydb.where('uuid') == uuid) if not record: return {} else: return json.dumps(record)