async def get_category_item(request): """ Args: request: category_name & config_item are required Returns: the configuration item in the given category. :Example: curl -X GET http://localhost:8081/foglamp/category/PURGE_READ/age """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) if not category_name or not config_item: raise web.HTTPBadRequest( reason="Both Category Name and Config items are required") # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) category_item = await cf_mgr.get_category_item(category_name, config_item) if category_item is None: raise web.HTTPNotFound(reason="No Category Item Found") return web.json_response(category_item)
async def delete_configuration_item_value(request): """ Args: request: category_name, config_item are required Returns: set the configuration item value to empty string in the given category :Example: curl -X DELETE http://localhost:8081/foglamp/category/{category_name}/{config_item}/value For {category_name}=>PURGE delete value for {config_item}=>age curl -X DELETE http://localhost:8081/foglamp/category/PURGE_READ/age/value """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) if not category_name or not config_item: raise web.HTTPBadRequest( reason="Both Category Name and Config items are required") # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) await cf_mgr.set_category_item_value_entry(category_name, config_item, '') result = await cf_mgr.get_category_item(category_name, config_item) if result is None: raise web.HTTPNotFound( reason= "No detail found for the category_name: {} and config_item: {}". format(category_name, config_item)) return web.json_response(result)
async def get_backup_details(request): """ Returns the details of a backup :Example: curl -X GET http://localhost:8081/foglamp/backup/1 """ backup_id = request.match_info.get('backup_id', None) try: backup_id = int(backup_id) backup = Backup(connect.get_storage()) backup_json = backup.get_backup_details(backup_id) resp = { "status": _get_status(int(backup_json["status"])), 'id': backup_json["id"], 'date': backup_json["ts"] } except ValueError: raise web.HTTPBadRequest(reason='Invalid backup id') except exceptions.DoesNotExist: raise web.HTTPNotFound( reason='Backup id {} does not exist'.format(backup_id)) except Exception as ex: raise web.HTTPException(reason=(str(ex))) return web.json_response(resp)
def delete(cls, user_id): """ Args: user_id: user id to delete Returns: json response """ # either keep 1 admin user or just reserve id:1 for superuser if int(user_id) == 1: raise ValueError("Super admin user can not be deleted") storage_client = connect.get_storage() try: # first delete the active login references cls.delete_user_tokens(user_id) payload = PayloadBuilder().SET(enabled="f").WHERE( ['id', '=', user_id]).AND_WHERE(['enabled', '=', 't']).payload() result = storage_client.update_tbl("users", payload) except StorageServerError as ex: if ex.error["retryable"]: pass # retry INSERT raise ValueError(ERROR_MSG) return result
async def asset_counts(request): """ Browse all the assets for which we have recorded readings and return a readings count. Returns: json result on basis of SELECT asset_code, count(*) FROM readings GROUP BY asset_code; :Example: curl -X GET http://localhost:8081/foglamp/asset """ payload = PayloadBuilder().AGGREGATE(["count", "*"]).ALIAS("aggregate", ("*", "count", "count"))\ .GROUP_BY("asset_code").payload() results = {} try: _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) response = results['rows'] asset_json = [{ "count": r['count'], "assetCode": r['asset_code'] } for r in response] except KeyError: raise web.HTTPBadRequest(reason=results['message']) except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response(asset_json)
async def get_category(request): """ Args: request: category_name is required Returns: the configuration items in the given category. :Example: curl -X GET http://localhost:8081/foglamp/category/PURGE_READ """ category_name = request.match_info.get('category_name', None) if not category_name: raise web.HTTPBadRequest(reason="Category Name is required") # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) category = await cf_mgr.get_category_all_items(category_name) if category is None: raise web.HTTPNotFound( reason="No such Category Found for {}".format(category_name)) return web.json_response(category)
async def _read_config(self): """Reads configuration""" default_config = { "sleep_interval": { "description": "The time (in seconds) to sleep between health checks. (must be greater than 5)", "type": "integer", "default": str(self._DEFAULT_SLEEP_INTERVAL) }, "ping_timeout": { "description": "Timeout for a response from any given microservice. (must be greater than 0)", "type": "integer", "default": str(self._DEFAULT_PING_TIMEOUT) }, } storage_client = connect.get_storage() cfg_manager = ConfigurationManager(storage_client) await cfg_manager.create_category('SMNTR', default_config, 'Service Monitor configuration') config = await cfg_manager.get_category_all_items('SMNTR') self._sleep_interval = int(config['sleep_interval']['value']) self._ping_timeout = int(config['ping_timeout']['value'])
async def get_backups(request): """ Returns a list of all backups :Example: curl -X GET http://localhost:8082/foglamp/backup :Example: curl -X GET http://localhost:8082/foglamp/backup?limit=2&skip=1&status=complete """ try: limit = int( request.query['limit']) if 'limit' in request.query else None skip = int(request.query['skip']) if 'skip' in request.query else None status = request.query['status'] if 'status' in request.query else None # TODO : Fix after actual implementation Backup.get_backup_list.return_value = [{ 'id': 28, 'date': '2017-08-30 04:05:10.382', 'status': 'running' }, { 'id': 27, 'date': '2017-08-29 04:05:13.392', 'status': 'failed' }, { 'id': 26, 'date': '2017-08-28 04:05:08.201', 'status': 'complete' }] # backup_json = [{"id": b[0], "date": b[1], "status": b[2]} # for b in Backup.get_backup_list(limit=limit, skip=skip, status=status)] backup_json = Backup.get_backup_list(limit=limit, skip=skip, status=status) # ## Test #########################################################################################: _logger = logger.setup("BACKUP-API-TEST", destination=logger.SYSLOG, level=logging.DEBUG) _logger.info("get_backups - START 3 ") _storage = connect.get_storage() backup = backup_postgres.Backup(_storage) backup_json = backup.get_all_backups(999, 0, None) _logger.debug("get_backups - END ") _logger.handlers = [] _logger.removeHandler(_logger.handle) _logger = None del backup # ## ##########################################################+###############################: except Backup.DoesNotExist: raise web.HTTPNotFound( reason='No backups found for queried parameters') return web.json_response({"backups": backup_json})
async def create_category(request): """ Args: request: A JSON object that defines the category Returns: category info :Example: curl -d '{"key": "TEST", "description": "description", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}}' -X POST http://localhost:8081/foglamp/category """ try: cf_mgr = ConfigurationManager(connect.get_storage()) data = await request.json() if not isinstance(data, dict): raise ValueError('Data payload must be a dictionary') valid_post_keys = ['key', 'description', 'value'] for k in valid_post_keys: if k not in list(data.keys()): raise KeyError( "'{}' param required to create a category".format(k)) category_name = data.get('key') category_desc = data.get('description') category_value = data.get('value') should_keep_original_items = data.get('keep_original_items', False) if not isinstance(should_keep_original_items, bool): raise TypeError( 'keep_original_items should be boolean true | false') await cf_mgr.create_category( category_name=category_name, category_description=category_desc, category_value=category_value, keep_original_items=should_keep_original_items) category_info = await cf_mgr.get_category_all_items( category_name=category_name) if category_info is None: raise LookupError('No such %s found' % category_name) except (KeyError, ValueError, TypeError) as ex: raise web.HTTPBadRequest(reason=str(ex)) except LookupError as ex: raise web.HTTPNotFound(reason=str(ex)) except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response({ "key": category_name, "description": category_desc, "value": category_info })
async def get_tasks_latest(request): """ Returns: the list of the most recent task execution for each name from tasks table :Example: curl -X GET http://localhost:8081/foglamp/task/latest curl -X GET http://localhost:8081/foglamp/task/latest?name=xxx """ payload = PayloadBuilder().SELECT("id", "process_name", "state", "start_time", "end_time", "reason", "pid", "exit_code")\ .ALIAS("return", ("start_time", 'start_time'), ("end_time", 'end_time'))\ .FORMAT("return", ("start_time", "YYYY-MM-DD HH24:MI:SS.MS"), ("end_time", "YYYY-MM-DD HH24:MI:SS.MS"))\ .ORDER_BY(["process_name", "asc"], ["start_time", "desc"]) if 'name' in request.query and request.query['name'] != '': name = request.query['name'] payload.WHERE(["process_name", "=", name]) try: _storage = connect.get_storage() results = _storage.query_tbl_with_payload('tasks', payload.payload()) if len(results['rows']) == 0: raise web.HTTPNotFound(reason="No Tasks found") tasks = [] previous_process = None for row in results['rows']: if previous_process != row['process_name']: tasks.append(row) previous_process = row['process_name'] new_tasks = [] for task in tasks: new_tasks.append({ 'id': str(task['id']), 'name': task['process_name'], 'state': [t.name.capitalize() for t in list(Task.State)][int(task['state']) - 1], 'startTime': str(task['start_time']), 'endTime': str(task['end_time']), 'exitCode': task['exit_code'], 'reason': task['reason'], 'pid': task['pid'] }) return web.json_response({'tasks': new_tasks}) except (ValueError, TaskNotFoundError) as ex: raise web.HTTPNotFound(reason=str(ex))
async def asset_reading(request): """ Browse a particular sensor value of a particular asset for which we have recorded readings and return the timestamp and reading value for that sensor. The number of rows returned is limited to a small number, this number may be altered by use of the query parameter limit=xxx&skip=xxx. The readings returned can also be time limited by use of the query parameter seconds=sss. This defines a number of seconds that the reading must have been processed in. Older readings than this will not be returned. The readings returned can also be time limited by use of the query parameter minutes=mmm. This defines a number of minutes that the reading must have been processed in. Older readings than this will not be returned. The readings returned can also be time limited by use of the query parameter hours=hh. This defines a number of hours that the reading must have been processed in. Older readings than this will not be returned. Only one of hour, minutes or seconds should be supplied Returns: json result on basis of SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", reading->>'reading' FROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0; :Example: curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?limit=1 curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?skip=10 curl -X GET "http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?limit=1&skip=10" """ asset_code = request.match_info.get('asset_code', '') reading = request.match_info.get('reading', '') _select = PayloadBuilder().SELECT(("user_ts", ["reading", reading]))\ .ALIAS("return", ("user_ts", "timestamp"), ("reading", reading))\ .FORMAT("return", ("user_ts", __TIMESTAMP_FMT)).chain_payload() _where = PayloadBuilder(_select).WHERE(["asset_code", "=", asset_code]).chain_payload() _and_where = where_clause(request, _where) # Add the order by and limit, offset clause _limit_skip_payload = prepare_limit_skip_payload(request, _and_where) payload = PayloadBuilder(_limit_skip_payload).ORDER_BY( ["timestamp", "desc"]).payload() results = {} try: _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) response = results['rows'] except KeyError: raise web.HTTPBadRequest(reason=results['message']) except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response(response)
def delete_token(cls, token): storage_client = connect.get_storage() payload = PayloadBuilder().WHERE(['token', '=', token]).payload() try: res = storage_client.delete_from_tbl("user_logins", payload) except StorageServerError as ex: if not ex.error["retryable"]: pass raise ValueError(ERROR_MSG) return res
async def ping(request): """ Args: request: Returns: basic health information json payload :Example: curl -X GET http://localhost:8081/foglamp/ping """ try: auth_token = request.token except AttributeError: cfg_mgr = ConfigurationManager(connect.get_storage()) category_item = await cfg_mgr.get_category_item( 'rest_api', 'allowPing') allow_ping = True if category_item['value'].lower( ) == 'true' else False if request.is_auth_optional is False and allow_ping is False: _logger.warning( "Permission denied for Ping when Auth is mandatory.") raise web.HTTPForbidden def get_stats(k): v = [a['value'] for a in stats if a['key'] == k] return int(v[0]) since_started = time.time() - __start_time stats_request = request.clone(rel_url='foglamp/statistics') stats_res = await get_statistics(stats_request) stats = json.loads(stats_res.body.decode()) data_read = get_stats('READINGS') data_sent_1 = get_stats('SENT_1') data_sent_2 = get_stats('SENT_2') data_sent_3 = get_stats('SENT_3') data_sent_4 = get_stats('SENT_4') data_purged = get_stats('PURGED') return web.json_response({ 'uptime': since_started, 'dataRead': data_read, 'dataSent': data_sent_1 + data_sent_2 + data_sent_3 + data_sent_4, 'dataPurged': data_purged, 'authenticationOptional': request.is_auth_optional })
async def get_tasks_latest(request): """ Returns the list of the most recent task execution for each name from tasks table :Example: curl -X GET http://localhost:8082/foglamp/task/latest :Example: curl -X GET http://localhost:8082/foglamp/task/latest?name=xxx """ try: name = request.query.get('name') if 'name' in request.query else None if name: payload = PayloadBuilder() \ .SELECT(("id", "process_name", "state", "start_time", "end_time", "reason", "pid", "exit_code")) \ .WHERE(["process_name", "=", name])\ .ORDER_BY(["start_time", "desc"]) \ .payload() else: payload = PayloadBuilder() \ .SELECT(("id", "process_name", "state", "start_time", "end_time", "reason", "pid", "exit_code")) \ .ORDER_BY(["process_name", "asc"], ["start_time", "desc"]) \ .payload() _storage = connect.get_storage() results = _storage.query_tbl_with_payload('tasks', payload) tasks = [] previous_process = None for row in results['rows']: if previous_process != row['process_name']: tasks.append(row) previous_process = row['process_name'] new_tasks = [] for task in tasks: new_tasks.append({ 'id': str(task['id']), 'process_name': task['process_name'], 'state': [t.name for t in list(Task.State)][int(task['state']) - 1], 'start_time': str(task['start_time']), 'end_time': str(task['end_time']), 'exit_code': task['exit_code'], 'reason': task['reason'], 'pid': task['pid'] }) return web.json_response({'tasks': new_tasks}) except (ValueError, TaskNotFoundError) as ex: raise web.HTTPNotFound(reason=str(ex))
def update(cls, user_id, user_data): """ Args: user_id: logged user id user_data: user dict Returns: updated user info dict """ kwargs = dict() if 'role_id' in user_data: kwargs.update({"role_id": user_data['role_id']}) storage_client = connect.get_storage() hashed_pwd = None pwd_history_list = [] if 'password' in user_data: if len(user_data['password']): hashed_pwd = cls.hash_password(user_data['password']) current_datetime = datetime.now() kwargs.update({ "pwd": hashed_pwd, "pwd_last_changed": str(current_datetime) }) # get password history list pwd_history_list = cls._get_password_history( storage_client, user_id, user_data) try: payload = PayloadBuilder().SET(**kwargs).WHERE( ['id', '=', user_id]).AND_WHERE(['enabled', '=', 't']).payload() result = storage_client.update_tbl("users", payload) if result['rows_affected']: # FIXME: FOGL-1226 active session delete only in case of role_id and password updation # delete all active sessions cls.delete_user_tokens(user_id) if 'password' in user_data: # insert pwd history and delete oldest pwd if USED_PASSWORD_HISTORY_COUNT exceeds cls._insert_pwd_history_with_oldest_pwd_deletion_if_count_exceeds( storage_client, user_id, hashed_pwd, pwd_history_list) return True except StorageServerError as ex: if ex.error["retryable"]: pass # retry UPDATE raise ValueError(ERROR_MSG) except Exception: raise
async def create_backup(request): """ Creates a backup :Example: curl -X POST http://localhost:8081/foglamp/backup """ try: backup = Backup(connect.get_storage()) status = await backup.create_backup() except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response({"status": status})
def is_user_exists(cls, username, password): payload = PayloadBuilder().SELECT("id", "pwd").WHERE( ['uname', '=', username]).AND_WHERE(['enabled', '=', 't']).payload() storage_client = connect.get_storage() result = storage_client.query_tbl_with_payload('users', payload) if len(result['rows']) == 0: return None found_user = result['rows'][0] is_valid_pwd = cls.check_password(found_user['pwd'], str(password)) return result['rows'][0]['id'] if is_valid_pwd else None
async def get_backups(request): """ Returns a list of all backups :Example: curl -X GET http://localhost:8081/foglamp/backup :Example: curl -X GET http://localhost:8081/foglamp/backup?limit=2&skip=1&status=completed """ limit = __DEFAULT_LIMIT if 'limit' in request.query and request.query['limit'] != '': try: limit = int(request.query['limit']) if limit < 0: raise ValueError except ValueError: raise web.HTTPBadRequest(reason="Limit must be a positive integer") skip = __DEFAULT_OFFSET if 'skip' in request.query and request.query['skip'] != '': try: skip = int(request.query['skip']) if skip < 0: raise ValueError except ValueError: raise web.HTTPBadRequest( reason="Skip/Offset must be a positive integer") status = None if 'status' in request.query and request.query['status'] != '': try: status = Status[request.query['status'].upper()].value except KeyError as ex: raise web.HTTPBadRequest( reason="{} is not a valid status".format(ex)) try: backup = Backup(connect.get_storage()) backup_json = backup.get_all_backups(limit=limit, skip=skip, status=status) res = [] for row in backup_json: r = OrderedDict() r["id"] = row["id"] r["date"] = row["ts"] r["status"] = _get_status(int(row["status"])) res.append(r) except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response({"backups": res})
async def asset_summary(request): """ Browse all the assets for which we have recorded readings and return a summary for a particular sensor. The values that are returned are the min, max and average values of the sensor. The readings summarised can also be time limited by use of the query parameter seconds=sss. This defines a number of seconds that the reading must have been processed in. Older readings than this will not be summarised. The readings summarised can also be time limited by use of the query parameter minutes=mmm. This defines a number of minutes that the reading must have been processed in. Older readings than this will not be summarised. The readings summarised can also be time limited by use of the query parameter hours=hh. This defines a number of hours that the reading must have been processed in. Older readings than this will not be summarised. Only one of hour, minutes or seconds should be supplied Returns: json result on basis of SELECT MIN(reading->>'reading'), MAX(reading->>'reading'), AVG((reading->>'reading')::float) FROM readings WHERE asset_code = 'asset_code'; :Example: curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/summary """ asset_code = request.match_info.get('asset_code', '') reading = request.match_info.get('reading', '') _aggregate = PayloadBuilder().AGGREGATE(["min", ["reading", reading]], ["max", ["reading", reading]], ["avg", ["reading", reading]])\ .ALIAS('aggregate', ('reading', 'min', 'min'), ('reading', 'max', 'max'), ('reading', 'avg', 'average')).chain_payload() _where = PayloadBuilder(_aggregate).WHERE(["asset_code", "=", asset_code]).chain_payload() _and_where = where_clause(request, _where) payload = PayloadBuilder(_and_where).payload() results = {} try: _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) # for aggregates, so there can only ever be one row response = results['rows'][0] except KeyError: raise web.HTTPBadRequest(reason=results['message']) except Exception as ex: raise web.HTTPException(reason=str(ex)) return web.json_response({reading: response})
async def get_statistics(request): """ Args: request: Returns: a general set of statistics :Example: curl -X GET http://localhost:8081/foglamp/statistics """ payload = PayloadBuilder().SELECT( ("key", "description", "value")).ORDER_BY(["key"]).payload() storage_client = connect.get_storage() result = storage_client.query_tbl_with_payload('statistics', payload) return web.json_response(result['rows'])
async def get_audit_log_codes(request): """ Args: request: Returns: an array of log codes with description :Example: curl -X GET http://localhost:8081/foglamp/audit/logcode """ storage_client = connect.get_storage() result = storage_client.query_tbl('log_codes') return web.json_response({'logCode': result['rows']})
async def asset_summary(request): """ Browse all the assets for which we have recorded readings and return a summary for a particular sensor. The values that are returned are the min, max and average values of the sensor. The readings summarised can also be time limited by use of the query parameter seconds=sss. This defines a number of seconds that the reading must have been processed in. Older readings than this will not be summarised. The readings summarised can also be time limited by use of the query parameter minutes=mmm. This defines a number of minutes that the reading must have been processed in. Older readings than this will not be summarised. The readings summarised can also be time limited by use of the query parameter hours=hh. This defines a number of hours that the reading must have been processed in. Older readings than this will not be summarised. Only one of hour, minutes or seconds should be supplied Return the result of the query SELECT MIN(reading->>'reading'), MAX(reading->>'reading'), AVG((reading->>'reading')::float) FROM readings WHERE asset_code = 'asset_code' """ asset_code = request.match_info.get('asset_code', '') reading = request.match_info.get('reading', '') # TODO: FOGL-643 prop_dict = {"column": "reading", "properties": reading} min_dict = {"operation": "min", "json": prop_dict, "alias": "min"} max_dict = {"operation": "max", "json": prop_dict, "alias": "max"} avg_dict = {"operation": "avg", "json": prop_dict, "alias": "average"} d = OrderedDict() d['aggregate'] = [min_dict, max_dict, avg_dict] _where = PayloadBuilder().WHERE(["asset_code", "=", asset_code]).chain_payload() _and_where = where_clause(request, _where) d.update(_and_where) payload = json.dumps(d) _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) if 'rows' not in results: raise web.HTTPBadRequest(reason=results['message']) else: return web.json_response({reading: results['rows']})
def filter(cls, **kwargs): user_id = kwargs['uid'] user_name = kwargs['username'] q = PayloadBuilder().SELECT("id", "uname", "role_id").WHERE(['enabled', '=', 't']) if user_id is not None: q = q.AND_WHERE(['id', '=', user_id]) if user_name is not None: q = q.AND_WHERE(['uname', '=', user_name]) storage_client = connect.get_storage() q_payload = PayloadBuilder(q.chain_payload()).payload() result = storage_client.query_tbl_with_payload('users', q_payload) return result['rows']
async def get_categories(request): """ Args: request: Returns: the list of known categories in the configuration database :Example: curl -X GET http://localhost:8081/foglamp/categories """ # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) categories = await cf_mgr.get_all_category_names() categories_json = [{"key": c[0], "description": c[1]} for c in categories] return web.json_response({'categories': categories_json})
async def set_configuration_item(request): """ Args: request: category_name, config_item, {"value" : <some value>} are required Returns: set the configuration item value in the given category. :Example: curl -X PUT -H "Content-Type: application/json" -d '{"value": <some value> }' http://localhost:8081/foglamp/category/{category_name}/{config_item} For {category_name}=>PURGE update value for {config_item}=>age curl -X PUT -H "Content-Type: application/json" -d '{"value": 24}' http://localhost:8081/foglamp/category/PURGE_READ/age """ category_name = request.match_info.get('category_name', None) config_item = request.match_info.get('config_item', None) data = await request.json() # TODO: make it optimized and elegant cf_mgr = ConfigurationManager(connect.get_storage()) try: value = data['value'] except KeyError: raise web.HTTPBadRequest( reason='Missing required value for {}'.format(config_item)) try: await cf_mgr.set_category_item_value_entry(category_name, config_item, value) except ValueError: raise web.HTTPNotFound( reason= "No detail found for the category_name: {} and config_item: {}". format(category_name, config_item)) result = await cf_mgr.get_category_item(category_name, config_item) if result is None: raise web.HTTPNotFound( reason= "No detail found for the category_name: {} and config_item: {}". format(category_name, config_item)) return web.json_response(result)
async def delete_backup(request): """ Delete a backup :Example: curl -X DELETE http://localhost:8081/foglamp/backup/1 """ backup_id = request.match_info.get('backup_id', None) try: backup_id = int(backup_id) backup = Backup(connect.get_storage()) backup.delete_backup(backup_id) return web.json_response({'message': "Backup deleted successfully"}) except ValueError: raise web.HTTPBadRequest(reason='Invalid backup id') except exceptions.DoesNotExist: raise web.HTTPNotFound( reason='Backup id {} does not exist'.format(backup_id)) except Exception as ex: raise web.HTTPException(reason=str(ex))
async def asset(request): """ Browse a particular asset for which we have recorded readings and return a readings with timestamps for the asset. The number of readings return is defaulted to a small number (20), this may be changed by supplying the query parameter ?limit=xx&skip=xx Return the result of the query SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", (reading)::jsonFROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0 """ asset_code = request.match_info.get('asset_code', '') # TODO: FOGL-637, 640 timestamp = { "column": "user_ts", "format": __TIMESTAMP_FMT, "alias": "timestamp" } d = OrderedDict() d['return'] = [timestamp, "reading"] _where = PayloadBuilder().WHERE(["asset_code", "=", asset_code]).chain_payload() _and_where = where_clause(request, _where) d.update(_and_where) # Add the order by and limit clause limit = int(request.query.get( 'limit')) if 'limit' in request.query else __DEFAULT_LIMIT offset = int(request.query.get( 'skip')) if 'skip' in request.query else __DEFAULT_OFFSET _sort_limit_skip_payload = PayloadBuilder(d).ORDER_BY(["user_ts", "desc" ]).LIMIT(limit) if offset: _sort_limit_skip_payload = PayloadBuilder(d).SKIP(offset) d.update(_sort_limit_skip_payload.chain_payload()) payload = json.dumps(d) _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) if 'rows' in results: return web.json_response(results['rows']) else: raise web.HTTPBadRequest(reason=results['message'])
async def restore_backup(request): """ Restore from a backup :Example: curl -X PUT http://localhost:8081/foglamp/backup/1/restore """ # TODO: FOGL-861 backup_id = request.match_info.get('backup_id', None) try: backup_id = int(backup_id) restore = Restore(connect.get_storage()) status = await restore.restore_backup(backup_id) return web.json_response({'status': status}) except ValueError: raise web.HTTPBadRequest(reason='Invalid backup id') except exceptions.DoesNotExist: raise web.HTTPNotFound( reason='Backup with {} does not exist'.format(backup_id)) except Exception as ex: raise web.HTTPException(reason=str(ex))
async def asset_counts(request): """ Browse all the assets for which we have recorded readings and return a readings count. Return the result of the query SELECT asset_code, count(*) FROM readings GROUP BY asset_code; """ # TODO: FOGL-643 - Aggregate with alias support needed to use payload builder # PayloadBuilder().AGGREGATE(["count", "*"]).GROUP_BY('asset_code') aggregate = {"operation": "count", "column": "*", "alias": "count"} d = OrderedDict() d['aggregate'] = aggregate d['group'] = "asset_code" payload = json.dumps(d) _storage = connect.get_storage() results = _storage.query_tbl_with_payload('readings', payload) return web.json_response(results['rows'])
async def delete_certificate(request): """ Delete a certificate :Example: curl -X DELETE http://localhost:8081/foglamp/certificate/foglamp """ cert_name = request.match_info.get('name', None) certs_dir = _get_certs_dir() cert_file = certs_dir + '/{}.cert'.format(cert_name) key_file = certs_dir + '/{}.key'.format(cert_name) if not os.path.isfile(cert_file) and not os.path.isfile(key_file): raise web.HTTPNotFound(reason='Certificate with name {} does not exist'.format(cert_name)) # read config # if cert_name is currently set for 'certificateName' in config for 'rest_api' cf_mgr = ConfigurationManager(connect.get_storage()) result = await cf_mgr.get_category_item(category_name='rest_api', item_name='certificateName') if cert_name == result['value']: raise web.HTTPConflict(reason='Certificate with name {} is already in use, you can not delete' .format(cert_name)) msg = '' cert_file_found_and_removed = False if os.path.isfile(cert_file): os.remove(cert_file) msg = "{}.cert has been deleted successfully".format(cert_name) cert_file_found_and_removed = True key_file_found_and_removed = False if os.path.isfile(key_file): os.remove(key_file) msg = "{}.key has been deleted successfully".format(cert_name) key_file_found_and_removed = True if key_file_found_and_removed and cert_file_found_and_removed: msg = "{}.key, {}.cert have been deleted successfully".format(cert_name, cert_name) return web.json_response({'result': msg})