Esempio n. 1
0
    async def test_add_notification_service(self, client, payload):
        data = json.loads(payload)
        sch_id = '45876056-e04c-4cde-8a82-1d8dbbbe6d72'

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = sch_id
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = json.loads(arg[1])
            if table == 'schedules':
                if _payload['return'][0] == 'process_name':
                    assert {"return": ["process_name"]} == _payload
                    return {'rows': [{'process_name': 'purge'}, {'process_name': 'stats collector'}], 'count': 2}
                else:
                    assert {"return": ["schedule_name"], "where": {"column": "schedule_name", "condition": "=",
                                                                   "value": data['name']}} == _payload

                    return {'count': 0, 'rows': []}
            if table == 'scheduled_processes':
                assert {"return": ["name"], "where": {"column": "name", "condition": "=",
                                                      "value": "notification_c"}} == _payload
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}

        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(connect, 'get_storage_async', return_value=storage_client_mock):
            with patch.object(c_mgr, 'get_category_all_items', return_value=self.async_mock(None)) as patch_get_cat_info:
                with patch.object(storage_client_mock, 'query_tbl_with_payload', side_effect=q_result):
                    with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) as insert_table_patch:
                        with patch.object(server.Server.scheduler, 'save_schedule', return_value=self.async_mock("")) as patch_save_schedule:
                            with patch.object(server.Server.scheduler, 'get_schedule_by_name', return_value=async_mock_get_schedule()) as patch_get_schedule:
                                resp = await client.post('/fledge/service', data=payload)
                                server.Server.scheduler = None
                                assert 200 == resp.status
                                result = await resp.text()
                                json_response = json.loads(result)
                                assert {'id': sch_id, 'name': data['name']} == json_response
                            patch_get_schedule.assert_called_once_with(data['name'])
                        patch_save_schedule.called_once_with()
                    args, kwargs = insert_table_patch.call_args
                    assert 'scheduled_processes' == args[0]
                    p = json.loads(args[1])
                    assert {'name': 'notification_c', 'script': '["services/notification_c"]'} == p
            patch_get_cat_info.assert_called_once_with(category_name=data['name'])
Esempio n. 2
0
    async def test_dupe_notification_service_schedule(self, client):
        payload = '{"name": "NotificationServer", "type": "notification"}'
        data = json.loads(payload)

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = json.loads(arg[1])
            if table == 'schedules':
                if _payload['return'][0] == 'process_name':
                    assert {"return": ["process_name"]} == _payload
                    return {'rows': [{'process_name': 'stats collector'}, {'process_name': 'notification_c'}], 'count': 2}

                else:
                    assert {"return": ["schedule_name"], "where": {"column": "schedule_name", "condition": "=",
                                                                   "value": data['name']}} == _payload

                    return {'count': 0, 'rows': []}
            if table == 'scheduled_processes':
                assert {"return": ["name"], "where": {"column": "name", "condition": "=",
                                                      "value": "notification_c"}} == _payload
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}

        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(connect, 'get_storage_async', return_value=storage_client_mock):
            with patch.object(c_mgr, 'get_category_all_items', return_value=self.async_mock(None)) as patch_get_cat_info:
                with patch.object(storage_client_mock, 'query_tbl_with_payload', side_effect=q_result):
                    with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) as insert_table_patch:
                        resp = await client.post('/fledge/service', data=payload)
                        server.Server.scheduler = None
                        assert 400 == resp.status
                        assert 'A Notification service schedule already exists.' == resp.reason
                    args, kwargs = insert_table_patch.call_args
                    assert 'scheduled_processes' == args[0]
                    p = json.loads(args[1])
                    assert {'name': 'notification_c', 'script': '["services/notification_c"]'} == p
            patch_get_cat_info.assert_called_once_with(category_name=data['name'])
Esempio n. 3
0
 def setup_method(self):
     server.Server.scheduler = Scheduler(None, None)
Esempio n. 4
0
async def get_statistics_history(request):
    """
    Args:
        request:

    Returns:
            a list of general set of statistics

    :Example:
            curl -X GET http://localhost:8081/fledge/statistics/history?limit=1
            curl -X GET http://localhost:8081/fledge/statistics/history?key=READINGS
            curl -X GET http://localhost:8081/fledge/statistics/history?key=READINGS,PURGED,UNSENT&minutes=60
    """
    storage_client = connect.get_storage_async()
    # To find the interval in secs from stats collector schedule
    scheduler_payload = PayloadBuilder().SELECT("schedule_interval").WHERE(
        ['process_name', '=', 'stats collector']).payload()
    result = await storage_client.query_tbl_with_payload(
        'schedules', scheduler_payload)
    if len(result['rows']) > 0:
        scheduler = Scheduler()
        interval_days, interval_dt = scheduler.extract_day_time_from_interval(
            result['rows'][0]['schedule_interval'])
        interval = datetime.timedelta(days=interval_days,
                                      hours=interval_dt.hour,
                                      minutes=interval_dt.minute,
                                      seconds=interval_dt.second)
        interval_in_secs = interval.total_seconds()
    else:
        raise web.HTTPNotFound(reason="No stats collector schedule found")
    stats_history_chain_payload = PayloadBuilder().SELECT(("history_ts", "key", "value"))\
        .ALIAS("return", ("history_ts", 'history_ts')).FORMAT("return", ("history_ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
        .ORDER_BY(['history_ts', 'desc']).WHERE(['1', '=', 1]).chain_payload()

    if 'key' in request.query:
        key = request.query['key']
        split_list = key.split(',')
        stats_history_chain_payload = PayloadBuilder(
            stats_history_chain_payload).AND_WHERE(['key', '=', split_list[0]
                                                    ]).chain_payload()
        del split_list[0]
        for i in split_list:
            stats_history_chain_payload = PayloadBuilder(
                stats_history_chain_payload).OR_WHERE(['key', '=',
                                                       i]).chain_payload()
    try:
        # get time based graphs for statistics history
        val = 0
        if 'minutes' in request.query and request.query['minutes'] != '':
            val = int(request.query['minutes']) * 60
        elif 'hours' in request.query and request.query['hours'] != '':
            val = int(request.query['hours']) * 60 * 60
        elif 'days' in request.query and request.query['days'] != '':
            val = int(request.query['days']) * 24 * 60 * 60

        if val < 0:
            raise ValueError
        elif val > 0:
            stats_history_chain_payload = PayloadBuilder(
                stats_history_chain_payload).AND_WHERE(
                    ['history_ts', 'newer', val]).chain_payload()
    except ValueError:
        raise web.HTTPBadRequest(reason="Time unit must be a positive integer")

    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
            if 'key' in request.query:
                limit_count = limit
            else:
                # FIXME: Hack straight away multiply the LIMIT by the group count
                # i.e. if there are 8 records per distinct (stats_key), and limit supplied is 2
                # then internally, actual LIMIT = 2*8
                # TODO: FOGL-663 Need support for "subquery" from storage service
                # Remove python side handling date_trunc and use
                # SELECT date_trunc('second', history_ts::timestamptz)::varchar as history_ts

                count_payload = PayloadBuilder().AGGREGATE(["count",
                                                            "*"]).payload()
                result = await storage_client.query_tbl_with_payload(
                    "statistics", count_payload)
                key_count = result['rows'][0]['count_*']
                limit_count = limit * key_count
            stats_history_chain_payload = PayloadBuilder(
                stats_history_chain_payload).LIMIT(
                    limit_count).chain_payload()
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    stats_history_payload = PayloadBuilder(
        stats_history_chain_payload).payload()
    result_from_storage = await storage_client.query_tbl_with_payload(
        'statistics_history', stats_history_payload)
    group_dict = []
    for row in result_from_storage['rows']:
        new_dict = {'history_ts': row['history_ts'], row['key']: row['value']}
        group_dict.append(new_dict)

    results = []
    temp_dict = {}
    previous_ts = None
    for row in group_dict:
        # first time or when history_ts changes
        if previous_ts is None or previous_ts != row['history_ts']:
            if previous_ts is not None:
                results.append(temp_dict)
            previous_ts = row['history_ts']
            temp_dict = {'history_ts': previous_ts}

        # Append statistics key to temp dict
        for key, value in row.items():
            temp_dict.update({key: value})

    # Append the last set of records which do not get appended above
    results.append(temp_dict)
    return web.json_response({
        "interval": interval_in_secs,
        'statistics': results
    })
Esempio n. 5
0
async def get_statistics_rate(request: web.Request) -> web.Response:
    """
      Args:
          request:
      Returns:
              A JSON document with the rates for each of the statistics
      :Example:
              curl -X GET http://localhost:8081/fledge/statistics/rate?periods=1,5,15&statistics=SINUSOID,FASTSINUSOID,READINGS

      Implementation:
          Calculation via: (sum(value) / count(value)) * 60 / (<statistic history interval>)
          Queries for above example:
          select key, 4 * (sum(value) / count(value)) from statistics_history where history_ts >= datetime('now', '-1 Minute') and key in ("SINUSOID", "FASTSINUSOID", "READINGS" ) group by key;
          select key, 4 * (sum(value) / count(value)) from statistics_history where history_ts >= datetime('now', '-5 Minute') and key in ("SINUSOID", "FASTSINUSOID", "READINGS" ) group by key;
          select key, 4 * (sum(value) / count(value)) from statistics_history where history_ts >= datetime('now', '-15 Minute') and key in ("SINUSOID", "FASTSINUSOID", "READINGS" ) group by key;
      """
    params = request.query
    if 'periods' not in params:
        raise web.HTTPBadRequest(
            reason="periods request parameter is required")
    if 'statistics' not in params:
        raise web.HTTPBadRequest(
            reason="statistics request parameter is required")

    if params['periods'] == '':
        raise web.HTTPBadRequest(
            reason=
            "periods cannot be an empty. Also comma separated list of values required "
            "in case of multiple periods of time")
    if params['statistics'] == '':
        raise web.HTTPBadRequest(
            reason=
            "statistics cannot be an empty. Also comma separated list of statistics values "
            "required in case of multiple assets")

    periods = params['periods']
    period_split_list = list(filter(None, periods.split(',')))
    if not all(p.isdigit() for p in period_split_list):
        raise web.HTTPBadRequest(reason="periods should contain numbers")
    # 1 week = 10080 mins
    if any(int(p) > 10800 for p in period_split_list):
        raise web.HTTPBadRequest(
            reason="The maximum allowed value for a period is 10080 minutes")

    stats = params['statistics']
    stat_split_list = list(filter(None, [x.upper() for x in stats.split(',')]))
    storage_client = connect.get_storage_async()
    # To find the interval in secs from stats collector schedule
    scheduler_payload = PayloadBuilder().SELECT("schedule_interval").WHERE(
        ['process_name', '=', 'stats collector']).payload()
    result = await storage_client.query_tbl_with_payload(
        'schedules', scheduler_payload)
    if len(result['rows']) > 0:
        scheduler = Scheduler()
        interval_days, interval_dt = scheduler.extract_day_time_from_interval(
            result['rows'][0]['schedule_interval'])
        interval_in_secs = datetime.timedelta(
            days=interval_days,
            hours=interval_dt.hour,
            minutes=interval_dt.minute,
            seconds=interval_dt.second).total_seconds()
    else:
        raise web.HTTPNotFound(reason="No stats collector schedule found")
    ts = datetime.datetime.now().timestamp()
    resp = []
    for x, y in [(x, y) for x in period_split_list for y in stat_split_list]:
        time_diff = ts - int(x)
        # TODO: FOGL-4102
        # For example:
        # time_diff = 1590066814.037321
        # ERROR: PostgreSQL storage plugin raising error: ERROR:  invalid input syntax for type timestamp with time zone: "1590066814.037321"
        # "where": {"column": "history_ts", "condition": ">=", "value": "1590066814.037321"} - Payload works with sqlite engine BUT not with postgres
        # To overcome above problem on postgres - I have used "dt = 2020-05-21 13:13:34" - but I see some deviations in results for both engines when we use datetime format
        _payload = PayloadBuilder().SELECT("key").AGGREGATE([
            "sum", "value"
        ]).AGGREGATE(["count",
                      "value"]).WHERE(['history_ts', '>=',
                                       str(time_diff)
                                       ]).AND_WHERE(['key', '=',
                                                     y]).chain_payload()
        stats_rate_payload = PayloadBuilder(_payload).GROUP_BY("key").payload()
        result = await storage_client.query_tbl_with_payload(
            "statistics_history", stats_rate_payload)
        temp_dict = {y: {x: 0}}
        if result['rows']:
            calculated_formula_str = (int(result['rows'][0]['sum_value']) /
                                      int(result['rows'][0]['count_value'])
                                      ) * (60 / int(interval_in_secs))
            temp_dict = {y: {x: calculated_formula_str}}
        resp.append(temp_dict)
    rate_dict = {}
    for d in resp:
        for k, v in d.items():
            rate_dict[k] = {**rate_dict[k], **v} if k in rate_dict else v
    return web.json_response({"rates": rate_dict})
Esempio n. 6
0
    async def test_add_service_with_config(self, client):
        payload = '{"name": "Sine", "type": "south", "plugin": "sinusoid", "enabled": "false",' \
                  ' "config": {"dataPointsPerSec": {"value": "10"}}}'
        data = json.loads(payload)

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = arg[1]

            if table == 'scheduled_processes':
                assert {'return': ['name'],
                        'where': {'column': 'name', 'condition': '=', 'value': 'south_c'}} == json.loads(_payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {'return': ['schedule_name'],
                        'where': {'column': 'schedule_name', 'condition': '=',
                                  'value': data['name']}} == json.loads(_payload)
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': data['name'],
            'version': "1.1",
            'type': "south",
            'interface': "1.0",
            'mode': "async",
            'config': {
                'plugin': {
                    'description': "Sinusoid Plugin",
                    'type': 'string',
                    'default': 'sinusoid'
                },
                'dataPointsPerSec': {
                    'description': 'Data points per second',
                    'type': 'integer',
                    'default': '1',
                    'order': '2'
                }
            }
        }
        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(common, 'load_and_fetch_python_plugin_info', side_effect=[mock_plugin_info]):
            with patch.object(connect, 'get_storage_async', return_value=storage_client_mock):
                with patch.object(c_mgr, 'get_category_all_items', return_value=self.async_mock(None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock, 'query_tbl_with_payload', side_effect=q_result):
                        with patch.object(storage_client_mock, 'insert_into_tbl',
                                          return_value=self.async_mock(expected_insert_resp)) as insert_table_patch:
                            with patch.object(c_mgr, 'create_category', return_value=self.async_mock(None)) as patch_create_cat:
                                with patch.object(c_mgr, 'create_child_category',
                                                  return_value=self.async_mock(None)) as patch_create_child_cat:
                                    with patch.object(c_mgr, 'set_category_item_value_entry',
                                                      return_value=self.async_mock(None)) as patch_set_entry:
                                        with patch.object(server.Server.scheduler, 'save_schedule',
                                                          return_value=self.async_mock("")) as patch_save_schedule:
                                            with patch.object(server.Server.scheduler, 'get_schedule_by_name',
                                                              return_value=async_mock_get_schedule()) as patch_get_schedule:
                                                resp = await client.post('/fledge/service', data=payload)
                                                server.Server.scheduler = None
                                                assert 200 == resp.status
                                                result = await resp.text()
                                                json_response = json.loads(result)
                                                assert {'id': '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                        'name': data['name']} == json_response
                                            patch_get_schedule.assert_called_once_with(data['name'])
                                        patch_save_schedule.called_once_with()
                                    patch_set_entry.assert_called_once_with(data['name'], 'dataPointsPerSec', '10')
                                patch_create_child_cat.assert_called_once_with('South', ['Sine'])
                            assert 2 == patch_create_cat.call_count
                            patch_create_cat.assert_called_with('South', {}, 'South microservices', True)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert {'name': 'south_c', 'script': '["services/south_c"]'} == p
                patch_get_cat_info.assert_called_once_with(category_name=data['name'])
Esempio n. 7
0
    async def test_add_task_with_config(self, client):
        async def async_mock_get_schedule():
            schedule = TimedSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'north'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': 'north bound'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}

            if table == 'tasks':
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': "PI server",
            'version': "1.1",
            'type': "north",
            'interface': "1.0",
            'config': {
                'plugin': {
                    'description': "North PI plugin",
                    'type': 'string',
                    'default': 'omf'
                },
                'producerToken': {
                    'description': 'Producer token for this Fledge stream',
                    'type': 'string',
                    'default': 'pi_server_north_0001',
                    'order': '2'
                }
            }
        }
        server.Server.scheduler = Scheduler(None, None)
        data = {
            "name": "north bound",
            "plugin": "omf",
            "type": "north",
            "schedule_type": 3,
            "schedule_day": 0,
            "schedule_time": 0,
            "schedule_repeat": 30,
            "schedule_enabled": True,
            "config": {
                "producerToken": {
                    "value": "uid=180905062754237&sig=kx5l+"
                }
            }
        }

        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(common,
                          'load_and_fetch_python_plugin_info',
                          side_effect=[mock_plugin_info]):
            with patch.object(connect,
                              'get_storage_async',
                              return_value=storage_client_mock):
                with patch.object(c_mgr,
                                  'get_category_all_items',
                                  return_value=self.async_mock(
                                      None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock,
                                      'query_tbl_with_payload',
                                      side_effect=q_result):
                        with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) \
                                as insert_table_patch:
                            with patch.object(c_mgr,
                                              'create_category',
                                              return_value=self.async_mock(
                                                  None)) as patch_create_cat:
                                with patch.object(c_mgr, 'create_child_category', return_value=self.async_mock(None)) \
                                        as patch_create_child_cat:
                                    with patch.object(
                                            c_mgr,
                                            'set_category_item_value_entry',
                                            return_value=self.async_mock(
                                                None)) as patch_set_entry:
                                        with patch.object(
                                                server.Server.scheduler,
                                                'save_schedule',
                                                return_value=self.async_mock(
                                                    "")
                                        ) as patch_save_schedule:
                                            with patch.object(
                                                    server.Server.scheduler,
                                                    'get_schedule_by_name',
                                                    return_value=
                                                    async_mock_get_schedule(
                                                    )) as patch_get_schedule:
                                                resp = await client.post(
                                                    '/fledge/scheduled/task',
                                                    data=json.dumps(data))
                                                server.Server.scheduler = None
                                                assert 200 == resp.status
                                                result = await resp.text()
                                                json_response = json.loads(
                                                    result)
                                                assert {
                                                    'id':
                                                    '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                    'name': 'north bound'
                                                } == json_response
                                            patch_get_schedule.assert_called_once_with(
                                                data['name'])
                                        patch_save_schedule.called_once_with()
                                    patch_set_entry.assert_called_once_with(
                                        data['name'], 'producerToken',
                                        'uid=180905062754237&sig=kx5l+')
                                patch_create_child_cat.assert_called_once_with(
                                    'North', ['north bound'])
                            assert 2 == patch_create_cat.call_count
                            patch_create_cat.assert_called_with(
                                'North', {}, 'North tasks', True)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert p['name'] == 'north'
                        assert p['script'] == '["tasks/north"]'
                patch_get_cat_info.assert_called_once_with(
                    category_name=data['name'])