def get(self, company_id, sensor_type): """Get All sensors for user""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) all_types = get_all_types() if sensor_type not in all_types.keys(): raise NotFound("Sensor Type not found") company = Company.query.filter(Company.uid == company_id).filter( Company.deleted == False).first() sensors = Sensor.query.filter(Sensor.company_id == company.id)\ .filter(Sensor.type == sensor_type)\ .filter(Sensor.deleted == False) # TODO: Only valid for PostgreSQL Database if 'q' in filter.keys(): sensors = sensors.filter( Sensor.name.ilike("%{}%".format(filter['q']))) if order_by in [ 'id', 'uid', 'description', 'name', 'last_update', 'created_at' ]: sensors = sensors.order_by(db.text(order_by + " " + order_type)) elif order_by: sensors = sensors.order_by( db.text("CAST(sensors.value #>> '{" + order_by + "}' AS FLOAT) " + order_type)) result_sensors = [] for sensor in sensors[offset:offset + limit]: data, errors = SensorRequestSchema().dump(sensor) role = g.get('company_user_role', ROLE_READ) if not role or role == ROLE_READ: del (data['key']) result_sensors.append(data) return {"data": result_sensors, "total": sensors.count()}
def get(self, sensor_id): """Get Aggregate values like Min, Max asd Average for sensor""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) sensor = Sensor.query.filter(Sensor.uid == sensor_id).filter( Sensor.deleted == False).first() duration = None start_date = None end_date = None aggregate_function = None offset_interval = None if "offset_interval" in filter.keys(): offset_interval = filter['offset_interval'] if "duration" in filter.keys(): duration = filter["duration"] else: if "start_date" in filter.keys(): start_date = filter['start_date'] if "end_date" in filter.keys(): end_date = filter['end_date'] if "aggregate_function" in filter.keys(): aggregate_function = filter['aggregate_function'] order_type = 'DESC' points = tsdb.get_points(sensor, order_by="time " + order_type, duration=duration, start_date=start_date, end_date=end_date, aggregate_only=True, aggregate_function=aggregate_function, offset_interval=offset_interval) return points
def get(self): order_by, order_type, offset, limit, filter = get_filters( in_request=request) order_query = Setting.order if order_by in ['group', 'access', 'id', 'label', 'order']: order_query = db.text('"{}" {}'.format(order_by, order_type)) settings = Setting.query.order_by(order_query).all() # TODO: Add filter on results. return AddSettingSchema(many=True).dump(settings).data
def get(self, company_id, dashboard_id): """Get all the widgets for a dashboard""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) widgets = Widget.query\ .filter(Widget.dashboard_id == dashboard_id)\ .order_by(db.text(order_by + " " + order_type))\ .all() return { 'data': WidgetSchema(many=True).dump(widgets)[0], 'total': len(widgets) }
def get(self, sensor_id): """ Get sensor value history. :param sensor_id: Sensor ID """ order_by, order_type, offset, limit, filter = get_filters( in_request=request) sensor = Sensor.query.filter(Sensor.uid == sensor_id).filter( Sensor.deleted == False).first() duration = None start_date = None end_date = None if "duration" in filter.keys(): duration = filter["duration"] else: if "start_date" in filter.keys(): start_date = filter['start_date'] if "end_date" in filter.keys(): end_date = filter['end_date'] points = tsdb.get_points(sensor, limit=limit, offset=offset, order_by="time desc", duration=duration, start_date=start_date, end_date=end_date) sensor_types = get_all_types() points['fields'] = None if sensor.type in sensor_types.keys(): points['fields'] = sensor_types[sensor.type]['fields'] data = points['data'] def generate_csv(): if len(data) > 0: yield ', '.join([ k for k in data[0].keys() if k not in ['company_id', 'sensor_id'] ]) + '\n' for row in data: # _LOGGER.info(list(row.values())) yield ', '.join([ str(row[k]) for k in row.keys() if k not in ['company_id', 'sensor_id'] ]) + '\n' return Response( generate_csv(), mimetype='text/csv', headers={ 'Content-Disposition': 'attachment; filename=sensor_data_{}.csv'.format(sensor_id) })
def get(self, company_id): """Get alert trigger history""" order_by, order_type, offset, limit, filter = get_filters(in_request=request) company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first() tags = {"company_id": company.uid} if "sensor_id" in filter.keys(): tags["sensor_id"] = filter["sensor_id"] try: return tsdb.get_points_raw(EVENT_LOG_SERIES, tags=tags, order_by='time desc', limit=limit, offset=offset) except Exception as e: _LOGGER.error(e) return {'error': 'Server error'}, 500
def get(self, company_id): """Get all the events for the company""" order_by, order_type, offset, limit, filter = get_filters(in_request=request) company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first() events = Event.query.filter(Event.company_id == company.id).filter(Event.deleted_at == None) if "actuator_type" in filter.keys(): events = events.filter(Event.actuator_type == filter['actuator_type']) if order_by in ['id', 'name', 'created_at', 'next_runtime']: events = events.order_by(db.text(order_by + " " + order_type)) result_events = [] for event in events[offset:offset + limit]: data, errors = EventSchema().dump(event) result_events.append(data) return {"data": result_events, "total": events.count()}
def get(self): """ Get all companies. :rtype: response object """ # TODO: get user's companies order_by, order_type, offset, limit, filter = get_filters( in_request=request) user = g.get('user', None) companies = [] count = 0 if user.is_super_admin(): companies = Company.query.filter( Company.deleted == False).order_by( db.text("{}.{} {}".format(Company.__tablename__, order_by, order_type))) count = companies.count() return { "data": [{ 'id': company.uid, 'name': company.name, 'key': company.key, 'owner_name': company.owner.name, 'role': ROLE_ADMIN } for company in companies[offset:offset + limit]], "total": count } else: # companies = Company.query.join(Company.users).filter(CompanyUserAssociation.user_id == user.id).filter(Company.deleted == False) user_companies = db.session.query(Company, CompanyUserAssociation)\ .join(CompanyUserAssociation.company)\ .filter(CompanyUserAssociation.user_id == user.id)\ .filter(Company.deleted == False) count = user_companies.count() # TODO: Show key according to access level return { "data": [{ 'id': company.uid, 'name': company.name, 'key': company.key if ass.role != C_ROLE_DEFAULT else None, 'owner_name': company.owner.name, 'role': ass.role } for company, ass in user_companies[offset:offset + limit]], "total": count }
def get(self, company_id): """Get event run history""" order_by, order_type, offset, limit, filter = get_filters(in_request=request) company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first() tags = {"company_id": company.uid} if "sensor_id" in filter.keys(): tags["sensor_id"] = filter["sensor_id"] if "event_id" in filter.keys(): event = Event.query.filter(Event.id == filter["event_id"]).filter(Event.deleted_at == None).first() if event: tags["event_id"] = event.id try: return tsdb.get_points_raw(EVENT_HISTORY_SERIES, tags=tags, order_by='time desc', limit=limit, offset=offset) except Exception as e: _LOGGER.error(e) return {'error': 'Server error'}, 500
def get(self): """ Get all users :endpoint: ``/users`` :method: ``GET`` :return: Users List Example Response:: { "data": [ { "id": 1, "name": "Gopal", "email": "*****@*****.**", "phone": "987654321", "role": "user" }, . . . ], "total": 15 } """ order_by, order_type, offset, limit, filter = get_filters(in_request=request) users = User.query.filter(User.deleted == False).order_by(db.text(order_by + " " + order_type)) if 'q' in filter.keys(): users = users.filter(User.name.ilike("%{}%".format(filter['q']))) # TODO: Add filter on results. return { "data": [{ 'id': user.uid, 'name': user.name, 'email': user.email, 'phone': user.phone, 'role': user.role, 'is_verified': user.is_verified } for user in users[offset:offset+limit]], "total": users.count() }
def get(self): order_by, order_type, offset, limit, filter = get_filters( in_request=request) firmwares = Firmware.query if 'q' in filter.keys(): firmwares = firmwares.filter( Firmware.name.ilike("%{}%".format(filter['q']))) if 'type' in filter.keys(): firmwares = firmwares.filter(Firmware.type == filter['type']) if order_by in [ 'id', 'sensor_type', 'name', 'version', 'created_at', 'is_deployed' ]: firmwares = firmwares.order_by(db.text(order_by + " " + order_type)) result_firmwares = [] for firmware in firmwares[offset:offset + limit]: data = FirmwareRequestSchema().dump(firmware)[0] result_firmwares.append(data) return {"data": result_firmwares, "total": firmwares.count()}
def get(self, company_id): """Get All sensors for user""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) company = Company.query.filter(Company.uid == company_id).filter( Company.deleted == False).first() sensors = Sensor.query.filter(Sensor.company_id == company.id).filter( Sensor.deleted == False) if 'q' in filter.keys(): sensors = sensors.filter( Sensor.name.ilike("%{}%".format(filter['q']))) if 'type' in filter.keys(): sensors = sensors.filter(Sensor.type == filter['type']) # sensors = sensors.order_by(Sensor.value[order_by].cast(Float).desc()) # NOTE: Only valid for PostgreSQL Database if order_by in [ 'id', 'uid', 'type', 'description', 'name', 'last_update', 'created_at', 'is_down' ]: sensors = sensors.order_by(db.text(order_by + " " + order_type)) result_sensors = [] sensor_types = get_all_types() for sensor in sensors[offset:offset + limit]: data, errors = SensorRequestSchema().dump(sensor) role = g.get('company_user_role', ROLE_READ) if not role or role == ROLE_READ: del (data['key']) if data['value']: value_fields = sensor_types[sensor.type]['fields'] for field_name, field in value_fields.items(): if field['type'] == 'file' and field_name in data[ 'value'].keys(): data['value'][field_name] = url_for( 'files.fileresource', sensor_id=sensor.uid, uid=data['value'][field_name], sensor_key=sensor.key, _external=True ) if data['value'][field_name] else '' result_sensors.append(data) return {"data": result_sensors, "total": sensors.count()}
def get(self, company_id, network_id): """Get All sensors of a network""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) network = Network.query.filter(Network.uid == network_id).filter( Network.deleted == False).first() sensors = Sensor.query.join(Network.sensors).filter( Network.uid == network_id).filter(Sensor.deleted == False) if 'q' in filter.keys(): sensors = sensors.filter( Sensor.name.ilike("%{}%".format(filter['q']))) if order_by in [ 'id', 'uid', 'type', 'description', 'name', 'last_update', 'created_at', 'is_down' ]: sensors = sensors.order_by( db.text("sensors." + order_by.strip() + " " + order_type)) count = sensors.count() result_sensors = [] for sensor in sensors[offset:offset + limit]: data, errors = SensorRequestSchema().dump(sensor) result_sensors.append(data) return {"data": result_sensors, "total": count}
def get(self, company_id): """Get all dashboards""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) company = g.company if order_by not in ['id', 'name', 'updated_at', 'created_at']: order_by = 'id' dashboards = Dashboard.query\ .filter(Dashboard.company_id == company.id)\ .filter(Dashboard.deleted == False) if 'sensor_type' in filter.keys(): dashboards = dashboards.filter( Dashboard.sensor_type == filter['sensor_type']) if 'dashboard_type' in filter.keys(): dashboards = dashboards.filter( Dashboard.sensor_type == filter['dashboard_type']) dashboards = dashboards.order_by(db.text(order_by + " " + order_type)) result_dashboards = [] for dashboard in dashboards[offset:offset + limit]: data = DashboardSchema().dump(dashboard)[0] result_dashboards.append(data) return {"data": result_dashboards, "total": dashboards.count()}
def get(self, company_id): """Get All Network for company""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) networks = db.session.query(Network, func.count(Sensor.id), Sensor.is_down).join(Company, Company.id == Network.company_id). \ outerjoin(network_sensor_table, Network.id == network_sensor_table.c.network_id). \ outerjoin(Sensor, and_(Sensor.id == network_sensor_table.c.sensor_id, Sensor.deleted == False)). \ filter(Company.uid == company_id).filter(Network.deleted == False). \ group_by(Network.id).group_by(Sensor.is_down).order_by(Network.name).all() all_networks = {} for network, count, is_down in networks: if network.uid in all_networks.keys(): all_networks[network.uid]['sensor_count'] += count else: all_networks[network.uid] = { 'name': network.name, 'id': network.uid, 'floormap': network.filename, 'sensor_count': count, 'sensors_off': 0, 'sensors_on': 0 } if is_down: all_networks[network.uid]['sensors_off'] += count all_networks[network.uid]['sensors_on'] = all_networks[ network.uid]['sensor_count'] - all_networks[ network.uid]['sensors_off'] networks_list = list(all_networks.values()) networks_list = sorted(networks_list, key=lambda a: a['name'].upper()) return { "data": networks_list[offset:offset + limit], "total": len(networks_list) }
def get(self, company_id): """Get All users of a company""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) company = Company.query.filter(Company.uid == company_id).filter( Company.deleted == False).first() company_users = db.session.query(User, CompanyUserAssociation)\ .join(CompanyUserAssociation.user)\ .filter(CompanyUserAssociation.company_id == company.id)\ .filter(User.deleted == False) if 'email' in filter.keys(): company_users = company_users.filter(User.email == filter['email']) count = company_users.count() # TODO: Show key according to access level return { "data": [{ 'id': user.uid, 'name': user.name, 'email': user.email, 'role': ass.role } for user, ass in company_users[offset:offset + limit]], "total": count }
def get(self, company_id): """Get all the alerts for the company""" order_by, order_type, offset, limit, filter = get_filters( in_request=request) company = Company.query.filter(Company.uid == company_id).filter( Company.deleted == False).first() alerts = Alert.query.filter(Alert.company_id == company.id).filter( Alert.deleted == False) if "action_type" in filter.keys(): alerts = alerts.filter(Alert.action_type == filter['action_type']) if "actuator_type" in filter.keys(): alerts = alerts.filter( Alert.actuator_type == filter['actuator_type']) if "sensor_type" in filter.keys(): alerts = alerts.filter(Alert.sensor_type == filter['sensor_type']) if order_by in [ 'id', 'type', 'sensor_type', 'name', 'field', 'created_at' ]: alerts = alerts.order_by(db.text(order_by + " " + order_type)) result_alerts = [] for alert in alerts[offset:offset + limit]: data, errors = AlertSchema().dump(alert) result_alerts.append(data) return {"data": result_alerts, "total": alerts.count()}
def get(self, sensor_id=None, sensor_hid=None, company_id=None): """ Get sensor value history. :param sensor_id: Sensor ID """ order_by, order_type, offset, limit, filter = get_filters( in_request=request) sensor = g.sensor duration = None start_date = None end_date = None group_duration = None aggregate_function = None if "aggregate_function" in filter.keys(): aggregate_function = filter['aggregate_function'] offset_interval = None if "offset_interval" in filter.keys(): offset_interval = filter['offset_interval'] if "group_duration" in filter.keys(): group_duration = filter["group_duration"] if "duration" in filter.keys(): duration = filter["duration"] else: if "start_date" in filter.keys(): start_date = filter['start_date'] if "end_date" in filter.keys(): end_date = filter['end_date'] if parser.parse(end_date) > pytz.utc.localize( datetime.datetime.utcnow()): end_date = datetime.datetime.utcnow().replace( microsecond=0).isoformat() + ".000Z" _LOGGER.debug(end_date) order_type = 'DESC' sensor_types = get_all_types() value_fields = sensor_types[sensor.type]['fields'] points = tsdb.get_points(sensor, limit=limit, offset=offset, order_by="time " + order_type, duration=duration, start_date=start_date, end_date=end_date, group_duration=group_duration, value_fields=value_fields, aggregate_function=aggregate_function, offset_interval=offset_interval) points['fields'] = None if sensor.type in sensor_types.keys(): points['fields'] = value_fields for field_name, field in value_fields.items(): # Do not show files in grouped data if field['type'] == 'file' and group_duration is None: for point in points['data']: point[field_name] = url_for( 'files.fileresource', sensor_id=sensor.uid, uid=point[field_name], sensor_key=sensor.key, _external=True) if point[field_name] else '' return points