async def get(self): """ Response example { "createdAt": "2019-02-18T11:06:53.621000+08:00", "email": "*****@*****.**", "lastLoggedInAt": "2019-02-21T20:31:29.639000+08:00", "secretKey": "S:15cc65f5-3eeb-4fec-8131-355ad03653d4", "username": "******", "admin": false, "groups": [ { "admin": true, "creator": "*****@*****.**", "id": "1", "name": "g4" } ] } """ # get user groups gids = self.current_user.get("groups", {}) groups = await db.run( r.expr(gids.keys()).map(lambda id: r.table("groups").get(id). without("members"))) for g in groups: g['admin'] = (gids[g['id']] == 2) # 2: admin, 1: user user = self.current_user.copy() user["groups"] = groups self.write_json(user)
def _update_legacy(database, block_num, address, resource): try: data_type = addresser.get_address_type(address) if data_type in TABLE_NAMES: data = { "id": address, "start_block_num": int(block_num), "end_block_num": int(sys.maxsize), **resource, } table_query = database.get_table(TABLE_NAMES[data_type]) query = table_query.get(address).replace(lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge(resource), )) result = database.run_query(query) if (not result["inserted"] and not result["replaced"]) or result["errors"] > 0: LOGGER.warning("error updating legacy state table:\n%s\n%s", result, query) except Exception as err: # pylint: disable=broad-except LOGGER.warning("_update_legacy %s error:", type(err)) LOGGER.warning(err)
def _update_legacy(database, block_num, address, resource, data_type): """ Update the legacy sync tables (expansion by object type name) """ try: data = { "id": address, "start_block_num": int(block_num), "end_block_num": int(sys.maxsize), **resource, } table_query = database.get_table(TABLE_NAMES[data_type]) query = table_query.get(address).replace(lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge(resource), )) result = database.run_query(query) if result["errors"] > 0: LOGGER.warning("error updating legacy state table:\n%s\n%s", result, query) except Exception as err: # pylint: disable=broad-except LOGGER.warning("_update_legacy %s error:", type(err)) LOGGER.warning(err)
def post_db_result(db: ReDB, temperature: int, humidity: int): r.db('dashboard').table('temperatures').insert({ 'timestamp': r.expr(datetime.now(r.make_timezone('+02:00'))), 'location': 'SANDBOX', 'value': temperature }).run(db.get_conn()) r.db('dashboard').table('humidity').insert({ 'timestamp': r.expr(datetime.now(r.make_timezone('+02:00'))), 'location': 'SANDBOX', 'value': humidity }).run(db.get_conn())
def _update_state(database, block_num, address, resource): try: # update state table address_parts = addresser.parse(address) address_binary = bytes_from_hex(address) key = address_binary keys = {"address": address_binary} object_id = bytes_from_hex(address_parts.object_id) object_type = address_parts.object_type.value related_id = bytes_from_hex(address_parts.related_id) related_type = address_parts.related_type.value relationship_type = address_parts.relationship_type.value data = { "block_updated": int(block_num), "updated_at": r.now(), "object_type": object_type, "object_id": object_id, "related_type": related_type, "relationship_type": relationship_type, "related_id": related_id, **resource, } table_query = database.get_table("state") query = table_query.get(key).replace( lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data).merge( { "address": key, "block_created": int(block_num), "created_at": r.now(), } ), doc.merge(data), ) ) result = database.run_query(query) if not result["inserted"] == 1 and not result["replaced"]: LOGGER.warning("error updating state table:\n%s\n%s", result, query) key = [address_binary, int(block_num)] data["address"] = key if result["inserted"] == 1: data["block_created"] = int(block_num) data["created_at"] = r.now() elif result["replaced"] == 1: LOGGER.warning(result) table_query = database.get_table("state_history") query = table_query.get(key).replace(data) result = database.run_query(query) if not result["inserted"] == 1 and not result["replaced"]: LOGGER.warning("error updating state_history table:\n%s\n%s", result, query) except Exception as err: # pylint: disable=broad-except LOGGER.warning("update_state %s error:", type(err)) LOGGER.warning(err)
def process(rec, database): """ Process inbound queue records """ try: if "batch" not in rec or not rec["batch"]: database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) rec["sync_direction"] = "inbound" database.run_query(database.get_table("sync_errors").insert(rec)) return batch = batch_pb2.Batch() batch.ParseFromString(rec["batch"]) batch_list = batcher.batch_to_list(batch=batch) status = ClientSync().send_batches_get_status(batch_list=batch_list) if status[0]["status"] == "COMMITTED": if "metadata" in rec and rec["metadata"]: data = { "address": rec["address"], "object_type": rec["object_type"], "object_id": rec["object_id"], "provider_id": rec["provider_id"], "created_at": r.now(), "updated_at": r.now(), **rec["metadata"], } query = ( database.get_table("metadata").get( rec["address"]).replace(lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge({ "metadata": rec["metadata"], "updated_at": r.now() }), ))) result = database.run_query(query) if (not result["inserted"] and not result["replaced"]) or result["errors"] > 0: LOGGER.warning("error updating metadata record:\n%s\n%s", result, query) rec["sync_direction"] = "inbound" database.run_query(database.get_table("changelog").insert(rec)) database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) else: rec["error"] = get_status_error(status) rec["sync_direction"] = "inbound" database.run_query(database.get_table("sync_errors").insert(rec)) database.run_query( database.get_table("inbound_queue").get(rec["id"]).delete()) except Exception as err: # pylint: disable=broad-except LOGGER.exception("%s exception processing inbound record:\n%s", type(err).__name__, rec) LOGGER.exception(err)
def filter(self, ids=None, **kwargs): if ids: try: query = self.query.get_all(r.args(ids)).filter(kwargs) except AttributeError: # self.query already has a get_all applied query = (self.query.filter(lambda doc: r.expr(ids).contains( doc['id'])).filter(kwargs)) else: query = self.query.filter(kwargs) return ObjectSet(self, query)
def add_selections_command(self, command, selections=[], **kwargs): ''' Add selections filter to command ''' if len(selections) > 0: for sel in selections: field = sel[0] values = sel[1] print("Only downloading documents with field \'" + field + "\' equal to one of " + str(values)) command = command.filter( lambda doc: r.expr(values).contains(doc[field])) return command
s.bind(('0.0.0.0', 5767)) s.listen(5) while True: Time = round(time.time() * 1000) #timeseries in milliseconds c, addr = s.accept() data = c.recv(1024).decode("utf-8") #receiving and decoding data DATA = data.split() print(DATA) #inserting into first table r.table('MPU_GYRO').insert( r.expr({ 'id': Time, 'GyroX': DATA[4], 'GyroY': DATA[5], 'GyroZ': DATA[6], })).run(connection) #inserting into second table r.table('VIBRATION').insert({ 'id': Time, "Vibration": "NONE", }).run(connection) #inserting into third table r.table('TEMPERATURE').insert({ 'id': Time, 'Temperature': DATA[0], }).run(connection)
def filter_accessible(v): # filter out private device groups = self.current_user.get("groups", {}).keys() groups = list(groups) + [self.current_user.email, "" ] # include user-private device return r.expr(groups).contains(v['owner'].default(""))
def save_balance(self, balance): balance["timestamp"] = r.expr(datetime.now(r.make_timezone("-03:00"))) r.db(self.db_name).table("balance").insert(balance).run()
def post_db_result(db: ReDB, is_opened: bool): r.db('dashboard').table('doors').insert({ 'timestamp': r.expr(datetime.now(r.make_timezone('+02:00'))), 'location': 'MAIN', 'opened': is_opened }).run(db.get_conn())
def save_ticker(self, ticker): ticker["timestamp"] = r.expr(datetime.now(r.make_timezone("-03:00"))) r.db(self.db_name).table("tickers").insert(ticker).run()
def save_trade(self, trade): trade["timestamp"] = r.expr(datetime.now(r.make_timezone("-03:00"))) r.db(self.db_name).table("trades").insert(trade).run()
def _update_state(database, block_num, address, resource): """ Update the state, state_history and metadata tables """ try: # update state table now = r.now() address_parts = addresser.parse(address) address_binary = bytes_from_hex(address) object_id = bytes_from_hex(address_parts.object_id) object_type = address_parts.object_type.value related_id = bytes_from_hex(address_parts.related_id) related_type = address_parts.related_type.value relationship_type = address_parts.relationship_type.value state = database.get_table("state") state_history = database.get_table("state_history") data = { "address": address_binary, "object_type": object_type, "object_id": object_id, "related_type": related_type, "relationship_type": relationship_type, "related_id": related_id, "block_created": int(block_num), "block_num": int(block_num), "updated_date": now, **resource, } delta = {"block_num": int(block_num), "updated_at": now, **resource} query = state.get(address_binary).replace( lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge(delta), ), return_changes=True, ) result = database.run_query(query) if result["errors"] > 0: LOGGER.warning("error updating state table:\n%s\n%s", result, query) if result["replaced"] and "changes" in result and result["changes"]: query = state_history.insert(result["changes"][0]["old_val"]) # data["address"] = [address_binary, int(block_num)] result = database.run_query(query) if result["errors"] > 0: LOGGER.warning("error updating state_history table:\n%s\n%s", result, query) if not related_id: data["address"] = address_binary del data["related_type"] del data["relationship_type"] del data["related_id"] query = ( database.get_table("metadata").get(address_binary).replace( lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge(delta), ))) result = database.run_query(query) if result["errors"] > 0: LOGGER.warning("error updating metadata record:\n%s\n%s", result, query) except Exception as err: # pylint: disable=broad-except LOGGER.warning("update_state %s error:", type(err)) LOGGER.warning(err)
def filter_accessible(v): # filter out private device return r.expr([self.current_user.email, ""]).contains(v['owner'].default(""))
def save_executions(self, executions): for exec in executions: exec["timestamp"] = r.expr(datetime.now(r.make_timezone("-03:00"))) r.db(self.db_name).table("run_exec").insert(exec).run()