Ejemplo n.º 1
0
async def safe_unwrap(
    resp: web.Response,
) -> Tuple[Optional[Union[Dict[str, Any], List[Dict[str, Any]]]], Optional[Dict]]:
    if resp.status != 200:
        body = await resp.text()
        raise web.HTTPException(reason=f"Unexpected response: '{body}'")

    payload = await resp.json()
    if not isinstance(payload, dict):
        raise web.HTTPException(reason=f"Did not receive a dict: '{payload}'")

    data, error = unwrap_envelope(payload)

    return data, error
Ejemplo n.º 2
0
async def asset(request):
    """ Browse a particular asset for which we have recorded readings and
    return a readings with timestamps for the asset. The number of readings
    return is defaulted to a small number (20), this may be changed by supplying
    the query parameter ?limit=xx&skip=xx

    Returns:
          json result on basis of SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", (reading)::jsonFROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0;

    :Example:
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity?limit=1
            curl -X GET "http://localhost:8081/foglamp/asset/fogbench%2Fhumidity?limit=1&skip=1"
    """
    asset_code = request.match_info.get('asset_code', '')
    _select = PayloadBuilder().SELECT(("reading", "user_ts")).ALIAS("return", ("user_ts", "timestamp")). \
        FORMAT("return", ("user_ts", __TIMESTAMP_FMT)).chain_payload()
    _where = PayloadBuilder(_select).WHERE(["asset_code", "=", asset_code]).chain_payload()
    _and_where = where_clause(request, _where)

    # Add the order by and limit, offset clause
    _limit_skip_payload = prepare_limit_skip_payload(request, _and_where)
    payload = PayloadBuilder(_limit_skip_payload).ORDER_BY(["user_ts", "desc"]).payload()

    results = {}
    try:
        _readings = connect.get_readings_async()
        results = await _readings.query(payload)
        response = results['rows']
    except KeyError:
        raise web.HTTPBadRequest(reason=results['message'])
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response(response)
Ejemplo n.º 3
0
    async def shutdown(cls, request):
        """ Shutdown the core microservice and its components

        :return: JSON payload with message key
        :Example:
            curl -X POST http://localhost:<core mgt port>/foglamp/service/shutdown
        """
        try:

            await cls._stop()
            loop = request.loop
            # allow some time
            await asyncio.sleep(2.0, loop=loop)
            _logger.info("Stopping the FogLAMP Core event loop. Good Bye!")
            loop.stop()

            return web.json_response({
                'message':
                'FogLAMP stopped successfully. '
                'Wait for few seconds for process cleanup.'
            })
        except TimeoutError as err:
            raise web.HTTPInternalServerError(reason=str(err))
        except Exception as ex:
            raise web.HTTPException(reason=str(ex))
Ejemplo n.º 4
0
async def asset_counts(request):
    """ Browse all the assets for which we have recorded readings and
    return a readings count.

    Returns:
           json result on basis of SELECT asset_code, count(*) FROM readings GROUP BY asset_code;

    :Example:
            curl -X GET http://localhost:8081/foglamp/asset
    """
    payload = PayloadBuilder().AGGREGATE(["count", "*"]).ALIAS("aggregate", ("*", "count", "count"))\
        .GROUP_BY("asset_code").payload()

    results = {}
    try:
        _readings = connect.get_readings_async()
        results = await _readings.query(payload)
        response = results['rows']
        asset_json = [{"count": r['count'], "assetCode": r['asset_code']} for r in response]
    except KeyError:
        raise web.HTTPBadRequest(reason=results['message'])
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response(asset_json)
Ejemplo n.º 5
0
async def get_backup_details(request):
    """ Returns the details of a backup

    :Example: curl -X GET http://localhost:8081/foglamp/backup/1
    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage())
        backup_json = backup.get_backup_details(backup_id)

        resp = {
            "status": _get_status(int(backup_json["status"])),
            'id': backup_json["id"],
            'date': backup_json["ts"]
        }

    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(
            reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPException(reason=(str(ex)))

    return web.json_response(resp)
Ejemplo n.º 6
0
async def restore_backup(request):
    """
    Restore from a backup
    :Example: curl -X PUT http://localhost:8081/foglamp/backup/1/restore
    """

    raise web.HTTPNotImplemented(
        reason='Restore backup method is not implemented yet.')

    backup_id = request.match_info.get('backup_id', None)

    if not backup_id:
        raise web.HTTPBadRequest(reason='Backup id is required')

    try:
        backup_id = int(backup_id, 10)
        # TODO: FOGL-861
        # restore = Restore(connect.get_storage())
        # status = restore.restore_backup(backup_id)
        # return web.json_response({'status': status})
    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(
            reason='Backup with {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))
Ejemplo n.º 7
0
 async def get_connection(self, request):
     connection_id = request.match_info["connection_id"]
     print(f"get_connection {connection_id=}")
     if connection_id in self.connections:
         return web.Response()
     else:
         raise web.HTTPException(reason=f"{connection_id=} doesn't exist.")
Ejemplo n.º 8
0
async def wkhtmltopdf(request):
    # TODO handle empty requests (without args)
    reader = await request.multipart()
    args = []
    tmpdir = tempfile.mkdtemp()
    try:
        # read arguments, store files in temporary files
        while True:
            part = await reader.next()  # noqa: B305
            if not part:
                break
            if part.name == "option":
                option = await part.text()
                if not option:
                    continue
                args.append(option)
            elif part.name == "file":
                assert part.filename
                # It's important to preserve as much as possible of the
                # original filename because some javascript can depend on it
                # through document.location.
                filename = os.path.join(tmpdir,
                                        os.path.basename(part.filename))
                # TODO what if multiple files with same basename?
                assert not os.path.exists(filename)
                with open(filename, "wb") as f:
                    while True:
                        chunk = await part.read_chunk(CHUNK_SIZE)
                        if not chunk:
                            break
                        f.write(chunk)
                    args.append(filename)
        is_pdf_command = _is_pdf_command(args)
        # run wkhtmltopdf and stream response
        if is_pdf_command:
            args.append("-")
        cmd = [_wkhtmltopdf_bin()] + args
        print(">", " ".join(cmd), file=sys.stderr)
        proc = await asyncio.create_subprocess_exec(
            *cmd, stdout=asyncio.subprocess.PIPE)
        response = web.StreamResponse(status=200)
        response.enable_chunked_encoding()
        if is_pdf_command:
            response.content_type = "application/pdf"
        else:
            response.content_type = "text/plain"
        await response.prepare(request)
        while True:
            chunk = await proc.stdout.read()
            if not chunk:
                break
            await response.write(chunk)
        r = await proc.wait()
        if r != 0:
            raise web.HTTPException()
        print("<", " ".join(cmd), file=sys.stderr)
        return response
    finally:
        shutil.rmtree(tmpdir)
Ejemplo n.º 9
0
async def create_category(request):
    """
    Args:
         request: A JSON object that defines the category

    Returns:
            category info

    :Example:
            curl -d '{"key": "TEST", "description": "description", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}}' -X POST http://localhost:8081/foglamp/category
    """
    try:
        cf_mgr = ConfigurationManager(connect.get_storage())
        data = await request.json()
        if not isinstance(data, dict):
            raise ValueError('Data payload must be a dictionary')

        valid_post_keys = ['key', 'description', 'value']
        for k in valid_post_keys:
            if k not in list(data.keys()):
                raise KeyError(
                    "'{}' param required to create a category".format(k))

        category_name = data.get('key')
        category_desc = data.get('description')
        category_value = data.get('value')

        should_keep_original_items = data.get('keep_original_items', False)
        if not isinstance(should_keep_original_items, bool):
            raise TypeError(
                'keep_original_items should be boolean true | false')

        await cf_mgr.create_category(
            category_name=category_name,
            category_description=category_desc,
            category_value=category_value,
            keep_original_items=should_keep_original_items)

        category_info = await cf_mgr.get_category_all_items(
            category_name=category_name)
        if category_info is None:
            raise LookupError('No such %s found' % category_name)

    except (KeyError, ValueError, TypeError) as ex:
        raise web.HTTPBadRequest(reason=str(ex))

    except LookupError as ex:
        raise web.HTTPNotFound(reason=str(ex))

    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response({
        "key": category_name,
        "description": category_desc,
        "value": category_info
    })
Ejemplo n.º 10
0
def test_HTTPException_retains_cause() -> None:
    with pytest.raises(web.HTTPException) as ei:
        try:
            raise Exception("CustomException")
        except Exception as exc:
            raise web.HTTPException() from exc
    tb = "".join(format_exception(ei.type, ei.value, ei.tb))
    assert "CustomException" in tb
    assert "direct cause" in tb
Ejemplo n.º 11
0
def test_HTTPException_retains_cause():
    with pytest.raises(web.HTTPException) as ei:
        try:
            raise Exception('CustomException')
        except Exception as exc:
            raise web.HTTPException() from exc
    tb = ''.join(format_exception(ei.type, ei.value, ei.tb))
    assert 'CustomException' in tb
    assert 'direct cause' in tb
Ejemplo n.º 12
0
async def get_project_files_metadata(app: web.Application, location_id: str,
                                     uuid_filter: str,
                                     user_id: int) -> List[Dict[str, Any]]:
    session = get_client_session(app)

    url: URL = (_get_base_storage_url(app) / "locations" / location_id /
                "files" / "metadata")
    params = dict(user_id=user_id, uuid_filter=uuid_filter)
    async with session.get(url, ssl=False, params=params) as resp:
        data, _ = await safe_unwrap(resp)

        if data is None:
            raise web.HTTPException(
                reason=f"No url found in response: '{data}'")
        if not isinstance(data, list):
            raise web.HTTPException(
                reason=f"No list payload received as data: '{data}'")

        return data
Ejemplo n.º 13
0
async def asset_reading(request):
    """ Browse a particular sensor value of a particular asset for which we have recorded readings and
    return the timestamp and reading value for that sensor. The number of rows returned
    is limited to a small number, this number may be altered by use of
    the query parameter limit=xxx&skip=xxx.

    The readings returned can also be time limited by use of the query
    parameter seconds=sss. This defines a number of seconds that the reading
    must have been processed in. Older readings than this will not be returned.

    The readings returned can also be time limited by use of the query
    parameter minutes=mmm. This defines a number of minutes that the reading
    must have been processed in. Older readings than this will not be returned.

    The readings returned can also be time limited by use of the query
    parameter hours=hh. This defines a number of hours that the reading
    must have been processed in. Older readings than this will not be returned.

    Only one of hour, minutes or seconds should be supplied

    Returns:
           json result on basis of SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", reading->>'reading' FROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0;

    :Example:
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?limit=1
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?skip=10
            curl -X GET "http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature?limit=1&skip=10"
    """
    asset_code = request.match_info.get('asset_code', '')
    reading = request.match_info.get('reading', '')

    _select = PayloadBuilder().SELECT(("user_ts", ["reading", reading]))\
        .ALIAS("return", ("user_ts", "timestamp"), ("reading", reading))\
        .FORMAT("return", ("user_ts", __TIMESTAMP_FMT)).chain_payload()
    _where = PayloadBuilder(_select).WHERE(["asset_code", "=",
                                            asset_code]).chain_payload()
    _and_where = where_clause(request, _where)

    # Add the order by and limit, offset clause
    _limit_skip_payload = prepare_limit_skip_payload(request, _and_where)
    payload = PayloadBuilder(_limit_skip_payload).ORDER_BY(
        ["timestamp", "desc"]).payload()

    results = {}
    try:
        _storage = connect.get_storage()
        results = _storage.query_tbl_with_payload('readings', payload)
        response = results['rows']
    except KeyError:
        raise web.HTTPBadRequest(reason=results['message'])
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response(response)
Ejemplo n.º 14
0
 async def post(self) -> Dict:
     multi_data = await self.request.post()
     log.debug(multi_data)
     partner_access = create_partner_access(
         con=self.request.app['DISTRIBUTOR_DB'])
     try:
         partner = await partner_access.change_partner_data(**multi_data)
         return partner.to_dict()
     except sqlite3.DatabaseError as error:
         log.error(error)
         raise web.HTTPException()
Ejemplo n.º 15
0
    def get_coords(self) -> Tuple[float]:
        """
        Get coordinates from GET parameters or raise an error
        in case they are missing or in wrong format.

        The format used is Decimal degrees
        """
        lat = self.request.query.get('lat', None)
        lon = self.request.query.get('lon', None)
        if not (lat and lon):
            raise web.HTTPException(text=dumps(
                {'error': 'both lat and lon are required'}),
                                    content_type='application/json')

        try:
            lat, lon = map(float, [lat, lon])
        except ValueError:
            raise web.HTTPException(text=dumps(
                {'error': 'Coordinates should be in float point format'}),
                                    content_type='application/json')
        return lat, lon
Ejemplo n.º 16
0
async def create_category(request):
    """
    Args:
         request: A JSON object that defines the category

    Returns:
            category info

    :Example:
            curl -d '{"key": "TEST", "description": "description", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}}' -X POST http://localhost:8081/foglamp/category
            curl -d '{"key": "TEST", "description": "description", "display_name": "Display test", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}}' -X POST http://localhost:8081/foglamp/category
            curl -d '{"key": "TEST", "description": "description", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}, "children":["child1", "child2"]}' -X POST http://localhost:8081/foglamp/category
    """
    keep_original_items = None
    if 'keep_original_items' in request.query and request.query['keep_original_items'] != '':
        keep_original_items = request.query['keep_original_items'].lower()
        if keep_original_items not in ['true', 'false']:
            raise ValueError("Only 'true' and 'false' are allowed for keep_original_items. {} given.".format(keep_original_items))

    try:
        cf_mgr = ConfigurationManager(connect.get_storage_async())
        data = await request.json()
        if not isinstance(data, dict):
            raise ValueError('Data payload must be a dictionary')

        valid_post_keys = ['key', 'description', 'value']
        for k in valid_post_keys:
            if k not in list(data.keys()):
                raise KeyError("'{}' param required to create a category".format(k))

        category_name = data.get('key')
        category_desc = data.get('description')
        category_value = data.get('value')
        category_display_name = data.get('display_name')
        should_keep_original_items = True if keep_original_items == 'true' else False

        await cf_mgr.create_category(category_name=category_name, category_description=category_desc,
                                     category_value=category_value, display_name=category_display_name, keep_original_items=should_keep_original_items)

        category_info = await cf_mgr.get_category_all_items(category_name=category_name)
        if category_info is None:
            raise LookupError('No such %s found' % category_name)
        result = {"key": category_name, "description": category_desc, "value": category_info, "displayName": cf_mgr._cacheManager.cache[category_name]['displayName']}
        if data.get('children'):
            r = await cf_mgr.create_child_category(category_name, data.get('children'))
            result.update(r)
    except (KeyError, ValueError, TypeError) as ex:
        raise web.HTTPBadRequest(reason=str(ex))
    except LookupError as ex:
        raise web.HTTPNotFound(reason=str(ex))
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))
    return web.json_response(result)
Ejemplo n.º 17
0
async def create_backup(request):
    """ Creates a backup

    :Example: curl -X POST http://localhost:8081/foglamp/backup
    """
    try:
        backup = Backup(connect.get_storage_async())
        status = await backup.create_backup()
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response({"status": status})
Ejemplo n.º 18
0
 async def post_connection(self, request):
     connection_id = request.match_info["connection_id"]
     body = await request.json()
     message = body["message"]
     print(f"<- HTTP: {connection_id=} {message=}")
     if connection_id in self.connections:
         websocket: WebSocketServerProtocol = self.connections[
             connection_id]
         print(f"<- WEBSOCKET: {connection_id=} {message=}")
         await websocket.send(message)
         return web.Response()
     else:
         raise web.HTTPException(reason=f"{connection_id=} doesn't exist.")
Ejemplo n.º 19
0
async def get_backups(request):
    """ Returns a list of all backups

    :Example: curl -X GET http://localhost:8081/foglamp/backup
    :Example: curl -X GET http://localhost:8081/foglamp/backup?limit=2&skip=1&status=completed
    """
    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    skip = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            skip = int(request.query['skip'])
            if skip < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(
                reason="Skip/Offset must be a positive integer")

    status = None
    if 'status' in request.query and request.query['status'] != '':
        try:
            status = Status[request.query['status'].upper()].value
        except KeyError as ex:
            raise web.HTTPBadRequest(
                reason="{} is not a valid status".format(ex))
    try:
        backup = Backup(connect.get_storage())
        backup_json = backup.get_all_backups(limit=limit,
                                             skip=skip,
                                             status=status)

        res = []
        for row in backup_json:
            r = OrderedDict()
            r["id"] = row["id"]
            r["date"] = row["ts"]
            r["status"] = _get_status(int(row["status"]))
            res.append(r)

    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response({"backups": res})
Ejemplo n.º 20
0
async def get_syslog_entries(request):
    """ Returns a list of syslog trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:
        curl -X GET http://localhost:8081/foglamp/syslog
        curl -X GET "http://localhost:8081/foglamp/syslog?limit=5"
        curl -X GET "http://localhost:8081/foglamp/syslog?offset=5"
        curl -X GET "http://localhost:8081/foglamp/syslog?source=storage"
        curl -X GET "http://localhost:8081/foglamp/syslog?limit=5&source=storage"
        curl -X GET "http://localhost:8081/foglamp/syslog?limit=5&offset=5&source=storage"
    """

    try:
        limit = int(request.query['limit']) if 'limit' in request.query and request.query['limit'] != '' else __DEFAULT_LIMIT
        if limit < 0:
            raise ValueError
    except (Exception, ValueError):
        raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    try:
        offset = int(request.query['offset']) if 'offset' in request.query and request.query['offset'] != '' else __DEFAULT_OFFSET
        if offset < 0:
            raise ValueError
    except (Exception, ValueError):
        raise web.HTTPBadRequest(reason="Offset must be a positive integer OR Zero")

    try:
        source = request.query['source'] if 'source' in request.query and request.query['source'] != '' else __DEFAULT_LOG_TYPE
        if source.lower() not in ['foglamp', 'storage', 'foglamp storage']:
            raise ValueError
        valid_source = {'foglamp': "FogLAMP", 'storage': 'Storage', 'foglamp storage': 'FogLAMP Storage'}
    except ValueError:
        raise web.HTTPBadRequest(reason="{} is not a valid source".format(source))

    try:
        # Get total lines
        cmd = __GET_SYSLOG_TOTAL_MATCHED_LINES.format(valid_source[source.lower()], _SYSLOG_FILE)
        t = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.readlines()
        tot_lines = int(t[0].decode())

        # Get filtered lines
        cmd = __GET_SYSLOG_CMD_TEMPLATE.format(valid_source[source.lower()], _SYSLOG_FILE, tot_lines - offset, limit)
        a = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.readlines()
        c = [b.decode() for b in a]  # Since "a" contains return value in bytes, convert it to string
    except (OSError, Exception) as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response({'logs': c, 'count': tot_lines})
Ejemplo n.º 21
0
async def get_message(request):
    """
    обрабатывает GET запрос и отдает json со статусом сообщения
    :param request:
    :return:
    """
    message_id = int(request.match_info.get('id'))
    message = await read_message(message_id)
    if message:
        text = 'Message status: {}'.format(message.status)
        print(text)
        body = {'status': message.status}
        return web.json_response(body, dumps=functools.partial(json.dumps, indent=4))
    else:
        return web.HTTPException(text='There is no message with such id.')
Ejemplo n.º 22
0
async def asset_summary(request):
    """ Browse all the assets for which we have recorded readings and
    return a summary for a particular sensor. The values that are
    returned are the min, max and average values of the sensor.

    The readings summarised can also be time limited by use of the query
    parameter seconds=sss. This defines a number of seconds that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings summarised can also be time limited by use of the query
    parameter minutes=mmm. This defines a number of minutes that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings summarised can also be time limited by use of the query
    parameter hours=hh. This defines a number of hours that the reading
    must have been processed in. Older readings than this will not be summarised.

    Only one of hour, minutes or seconds should be supplied

    Returns:
           json result on basis of SELECT MIN(reading->>'reading'), MAX(reading->>'reading'), AVG((reading->>'reading')::float) FROM readings WHERE asset_code = 'asset_code';

    :Example:
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/summary
    """
    asset_code = request.match_info.get('asset_code', '')
    reading = request.match_info.get('reading', '')
    _aggregate = PayloadBuilder().AGGREGATE(["min", ["reading", reading]], ["max", ["reading", reading]],
                                            ["avg", ["reading", reading]])\
        .ALIAS('aggregate', ('reading', 'min', 'min'), ('reading', 'max', 'max'),
               ('reading', 'avg', 'average')).chain_payload()
    _where = PayloadBuilder(_aggregate).WHERE(["asset_code", "=",
                                               asset_code]).chain_payload()
    _and_where = where_clause(request, _where)
    payload = PayloadBuilder(_and_where).payload()

    results = {}
    try:
        _storage = connect.get_storage()
        results = _storage.query_tbl_with_payload('readings', payload)
        # for aggregates, so there can only ever be one row
        response = results['rows'][0]
    except KeyError:
        raise web.HTTPBadRequest(reason=results['message'])
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response({reading: response})
Ejemplo n.º 23
0
async def delete_backup(request):
    """ Delete a backup

    :Example: curl -X DELETE http://localhost:8081/foglamp/backup/1
    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage_async())
        await backup.delete_backup(backup_id)
        return web.json_response({'message': "Backup deleted successfully"})
    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))
Ejemplo n.º 24
0
async def restart(request):
    """
    :Example:
            curl -X PUT http://localhost:8081/foglamp/restart
    """

    try:
        _logger.info("Executing controlled shutdown and start")
        asyncio.ensure_future(server.Server.restart(request),
                              loop=request.loop)
        return web.json_response(
            {'message': 'FogLAMP restart has been scheduled.'})
    except TimeoutError as e:
        _logger.exception("Error while stopping FogLAMP server: %s", e)
        raise web.HTTPInternalServerError(reason=e)
    except Exception as ex:
        _logger.exception("Error while stopping FogLAMP server: %s", ex)
        raise web.HTTPException(reason=ex)
Ejemplo n.º 25
0
async def restore_backup(request):
    """
    Restore from a backup
    :Example: curl -X PUT http://localhost:8081/foglamp/backup/1/restore
    """

    # TODO: FOGL-861
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        restore = Restore(connect.get_storage_async())
        status = await restore.restore_backup(backup_id)
        return web.json_response({'status': status})
    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(reason='Backup with {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))
Ejemplo n.º 26
0
async def get_asset_tracker_events(request):
    """
    Args:
        request:

    Returns:
            asset track records

    :Example:
            curl -X GET http://localhost:8081/foglamp/track
            curl -X GET http://localhost:8081/foglamp/track?asset=XXX
            curl -X GET http://localhost:8081/foglamp/track?event=XXX
            curl -X GET http://localhost:8081/foglamp/track?service=XXX
    """
    payload = PayloadBuilder().SELECT("asset", "event", "service", "foglamp", "plugin", "ts") \
        .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \
        .WHERE(['1', '=', 1])
    if 'asset' in request.query and request.query['asset'] != '':
        asset = urllib.parse.unquote(request.query['asset'])
        payload.AND_WHERE(['asset', '=', asset])
    if 'event' in request.query and request.query['event'] != '':
        event = request.query['event']
        payload.AND_WHERE(['event', '=', event])
    if 'service' in request.query and request.query['service'] != '':
        service = urllib.parse.unquote(request.query['service'])
        payload.AND_WHERE(['service', '=', service])

    storage_client = connect.get_storage_async()
    payload = PayloadBuilder(payload.chain_payload())
    try:
        result = await storage_client.query_tbl_with_payload(
            'asset_tracker', payload.payload())
        response = result['rows']
    except KeyError:
        raise web.HTTPBadRequest(reason=result['message'])
    except Exception as ex:
        raise web.HTTPException(reason=ex)

    return web.json_response({'track': response})
Ejemplo n.º 27
0
async def shutdown(request):
    """
    Args:
        request:

    Returns:

    :Example:
            curl -X PUT http://localhost:8081/foglamp/shutdown
    """

    try:
        loop = request.loop
        loop.call_later(2, do_shutdown, request)
        return web.json_response({
            'message':
            'FogLAMP shutdown has been scheduled. '
            'Wait for few seconds for process cleanup.'
        })
    except TimeoutError as err:
        raise web.HTTPInternalServerError(reason=str(err))
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))
Ejemplo n.º 28
0
async def get_backup_download(request):
    """ Download back up file by id

    :Example:
        wget -O foglamp-backup-1.tar.gz http://localhost:8081/foglamp/backup/1/download

    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage_async())
        backup_json = await backup.get_backup_details(backup_id)

        # Strip filename from backup path
        file_name_path = str(backup_json["file_name"]).split('data/backup/')
        file_name = str(file_name_path[1])
        dir_name = _FOGLAMP_DATA + '/backup/' if _FOGLAMP_DATA else _FOGLAMP_ROOT + "/data/backup/"
        source = dir_name + file_name

        # Create tar file
        t = tarfile.open(source + ".tar.gz", "w:gz")
        t.add(source, arcname=os.path.basename(source))
        t.close()

        # Path of tar.gz file
        gz_path = Path(source + ".tar.gz")

    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(
            reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPException(reason=(str(ex)))

    return web.FileResponse(path=gz_path)
Ejemplo n.º 29
0
def abort(code, reason=None):
    if code == 400:
        raise web.HTTPBadRequest(reason=reason)
    if code == 404:
        raise web.HTTPNotFound(reason=reason)
    raise web.HTTPException(reason=reason)
Ejemplo n.º 30
0
async def asset_averages(request):
    """ Browse all the assets for which we have recorded readings and
    return a series of averages per second, minute or hour.

    The readings averaged can also be time limited by use of the query
    parameter seconds=sss. This defines a number of seconds that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings averaged can also be time limited by use of the query
    parameter minutes=mmm. This defines a number of minutes that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings averaged can also be time limited by use of the query
    parameter hours=hh. This defines a number of hours that the reading
    must have been processed in. Older readings than this will not be summarised.

    Only one of hour, minutes or seconds should be supplied

    The amount of time covered by each returned value is set using the
    query parameter group. This may be set to seconds, minutes or hours

    Returns:
            on the basis of
            SELECT min((reading->>'reading')::float) AS "min",
                   max((reading->>'reading')::float) AS "max",
                   avg((reading->>'reading')::float) AS "average",
                   to_char(user_ts, 'YYYY-MM-DD HH24:MI:SS') AS "timestamp"
            FROM foglamp.readings
                   WHERE asset_code = 'asset_code' AND
                     reading ? 'reading'
            GROUP BY to_char(user_ts, 'YYYY-MM-DD HH24:MI:SS')
            ORDER BY timestamp DESC;

    :Example:
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series
            curl -X GET "http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?limit=1&skip=1"
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?hours=1
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?minutes=60
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?seconds=3600
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?group=seconds
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?group=minutes
            curl -X GET http://localhost:8081/foglamp/asset/fogbench%2Fhumidity/temperature/series?group=hours
    """
    asset_code = request.match_info.get('asset_code', '')
    reading = request.match_info.get('reading', '')

    ts_restraint = 'YYYY-MM-DD HH24:MI:SS'
    if 'group' in request.query and request.query['group'] != '':
        _group = request.query['group']
        if _group in ('seconds', 'minutes', 'hours'):
            if _group == 'seconds':
                ts_restraint = 'YYYY-MM-DD HH24:MI:SS'
            elif _group == 'minutes':
                ts_restraint = 'YYYY-MM-DD HH24:MI'
            elif _group == 'hours':
                ts_restraint = 'YYYY-MM-DD HH24'
        else:
            raise web.HTTPBadRequest(reason="{} is not a valid group".format(_group))

    _aggregate = PayloadBuilder().AGGREGATE(["min", ["reading", reading]], ["max", ["reading", reading]],
                                            ["avg", ["reading", reading]])\
        .ALIAS('aggregate', ('reading', 'min', 'min'), ('reading', 'max', 'max'),
               ('reading', 'avg', 'average')).chain_payload()
    _where = PayloadBuilder(_aggregate).WHERE(["asset_code", "=", asset_code]).chain_payload()
    _and_where = where_clause(request, _where)

    # Add the GROUP BY
    _group = PayloadBuilder(_and_where).GROUP_BY("user_ts").ALIAS("group", ("user_ts", "timestamp"))\
        .FORMAT("group", ("user_ts", ts_restraint)).chain_payload()

    # Add LIMIT, OFFSET, ORDER BY timestamp DESC
    _limit_skip_payload = prepare_limit_skip_payload(request, _group)
    payload = PayloadBuilder(_limit_skip_payload).ORDER_BY(["user_ts", "desc"]).payload()

    results = {}
    try:
        _readings = connect.get_readings_async()
        results = await _readings.query(payload)
        response = results['rows']
    except KeyError:
        raise web.HTTPBadRequest(reason=results['message'])
    except Exception as ex:
        raise web.HTTPException(reason=str(ex))

    return web.json_response(response)