Пример #1
0
    async def test_purge_tasks(self):
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'purge_task'
        interval_schedule.process_name = "sleep5"
        interval_schedule.repeat = datetime.timedelta(seconds=0)
        # interval_schedule.repeat = datetime.timedelta(seconds=30)

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(1)
        tasks = await scheduler.get_tasks(5)
        assert tasks

        scheduler.max_running_tasks = 0
        await asyncio.sleep(7)

        scheduler.max_completed_task_age = datetime.timedelta(seconds=1)
        await scheduler.purge_tasks()

        tasks = await scheduler.get_tasks(5)
        assert not tasks
        await self.stop_scheduler(scheduler)
Пример #2
0
    async def test_update(self):
        """Test update of a running task
        :assert:
            the number of tasks running
            information regarding the process running
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'sleep10'
        interval_schedule.process_name = "sleep10"
        interval_schedule.repeat = datetime.timedelta(seconds=0)
        interval_schedule.enabled = True

        await scheduler.save_schedule(interval_schedule
                                      )  # Save update on _scheduler

        await asyncio.sleep(1)
        # Assert only 1 task is running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        # Update 'updated' schedule interval
        interval_schedule.name = 'updated'
        interval_schedule.process_name = "sleep1"
        interval_schedule.repeat = datetime.timedelta(
            seconds=5)  # Set time interval to 5 sec
        interval_schedule.enabled = True

        await scheduler.save_schedule(interval_schedule
                                      )  # Save update on _scheduler
        await asyncio.sleep(6)

        # Assert: only 1 task is running
        tasks = await scheduler.get_running_tasks(
        )  # list of current running tasks
        assert len(tasks) == 1

        interval_schedule.exclusive = False
        await scheduler.save_schedule(interval_schedule)

        # Check able to get same schedule after restart
        # Check fields have been modified
        await self.stop_scheduler(scheduler)
        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        schedule = await scheduler.get_schedule(interval_schedule.schedule_id)

        # Make sure that the values used by schedule are as expected
        assert schedule.process_name == 'sleep1'
        assert schedule.name == 'updated'
        assert schedule.repeat.seconds == 5
        assert not schedule.exclusive

        await self.stop_scheduler(scheduler)
Пример #3
0
    async def test_max_processes(self):
        """Test the maximum number of running processes
        :assert:
            the number of running processes
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # 2 maximum tasks

        # 1 runs at 1 second
        # 2 runs at 2 seconds
        # 3 runs at 11 seconds
        # 4 runs at 12 seconds
        # 5 runs at 21 seconds
        # 6 runs at 22 seconds
        # 7 runs at 31 seconds
        # 8 runs at 32 seconds
        # Total: 6

        scheduler.max_running_tasks = 2  # set the maximum number of running tasks in parallel

        # Set interval schedule configuration
        interval_schedule = IntervalSchedule()
        interval_schedule.repeat = datetime.timedelta(seconds=1)
        interval_schedule.name = 'max active'
        interval_schedule.exclusive = False
        interval_schedule.process_name = 'sleep10'
        interval_schedule.enabled = True

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(30.3)
        scheduler.max_running_tasks = 0  # set the maximum number of running tasks in parallel

        tasks = await scheduler.get_tasks(10)
        assert len(tasks) == 6

        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 2

        # They end...
        await asyncio.sleep(20)

        scheduler.max_running_tasks = 10

        await asyncio.sleep(11)
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 10

        await self.stop_scheduler(scheduler)
Пример #4
0
    async def test_timed_schedule(self):
        """Testing a timed schedule using a specific timestamp (in seconds)
        :assert:
            Number of running tasks
            The values declared at for timestamp
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        timed_schedule = TimedSchedule()

        # Set current timestamp to be: Tuesday August 8 2017 8:00:00 AM PDT
        now = 1502204400
        scheduler.current_time = now

        timed_schedule.name = 'timed'
        timed_schedule.process_name = 'sleep10'
        timed_schedule.day = 2
        timed_schedule.time = datetime.time(hour=8)
        timed_schedule.repeat = datetime.timedelta(seconds=0)
        timed_schedule.enabled = True

        # Set env timezone
        os.environ["TZ"] = "PST8PDT"
        time.tzset()

        await scheduler.save_schedule(timed_schedule)
        await asyncio.sleep(1)

        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        timed_schedule = await scheduler.get_schedule(
            uuid.UUID(str(timed_schedule.schedule_id)))

        # Assert timed_schedule values
        assert timed_schedule.time.hour == 8
        assert timed_schedule.time.minute == 0
        assert timed_schedule.time.second == 0
        assert timed_schedule.day == 2

        # Reset timezone
        del os.environ["TZ"]
        time.tzset()

        await self.stop_scheduler(scheduler)
Пример #5
0
    async def test_get_schedule(self):
        """Schedule gets retrieved
        :assert:
            Schedule is retrieved by id """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Declare schedule
        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'get_schedule_test'
        interval_schedule.process_name = "sleep30"
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)

        # Get schedule
        schedules = await scheduler.get_schedules()
        assert len(schedules) == 1  # Assert the number of schedules

        await scheduler.get_schedule(
            interval_schedule.schedule_id
        )  # Get the schedule by schedule process ID

        # Assert that schedule is retrieved by ID
        try:
            await scheduler.get_schedule(uuid.uuid4())
            assert False
        except ScheduleNotFoundError:
            pass

        await self.stop_scheduler(scheduler)
Пример #6
0
    async def test_manual_schedule(self):
        """Test manually ran scheduled processes
        :assert:
            The number of running processes
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Declare manual interval schedule
        manual_schedule = ManualSchedule()
        manual_schedule.name = 'manual task'
        manual_schedule.process_name = 'sleep10'
        manual_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(manual_schedule)
        manual_schedule = await scheduler.get_schedule(
            manual_schedule.schedule_id)

        await scheduler.queue_task(manual_schedule.schedule_id
                                   )  # Added a task to the _scheduler queue
        await asyncio.sleep(5)

        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        await self.stop_scheduler(scheduler)
Пример #7
0
    async def test_interval_none_repeat(self):
        """Tests an interval schedule where repeat is None
        :assert:
            A task starts immediately and doesn't repeat
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # assert that the schedule type is interval
        interval_schedule = IntervalSchedule()
        assert interval_schedule.schedule_type == Schedule.Type.INTERVAL

        interval_schedule.name = 'sleep10'
        interval_schedule.process_name = "sleep10"
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(1)
        # Assert only 1 task is running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        await asyncio.sleep(12)
        # Assert only 1 task is running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        await self.stop_scheduler(scheduler)
Пример #8
0
    async def test_modify_schedule_type(self):
        """Test modifying the type of a schedule
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'sleep10'
        interval_schedule.process_name = 'sleep10'
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)

        manual_schedule = ManualSchedule()
        manual_schedule.schedule_id = interval_schedule.schedule_id
        manual_schedule.name = 'manual'
        manual_schedule.process_name = 'sleep10'
        manual_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(manual_schedule)

        # Assert: only 1 task is running
        schedule = await scheduler.get_schedule(manual_schedule.schedule_id)

        assert isinstance(schedule, ManualSchedule)

        await self.stop_scheduler(scheduler)
Пример #9
0
    async def test_delete(self):
        """Test that a scheduled process gets removed
        :assert:
            scheduled task gets removed
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Set schedule to be interval based
        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'deletetest'
        interval_schedule.process_name = "sleep1"
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(5)

        # Delete a scheduled task
        await scheduler.delete_schedule(interval_schedule.schedule_id)

        # Assert that process was deleted
        try:
            await scheduler.delete_schedule(interval_schedule.schedule_id)
            assert False
        except ScheduleNotFoundError:
            pass

        await self.stop_scheduler(scheduler)
Пример #10
0
    async def test_get_task(self):
        """Test tasks exists
        :assert:
            there exists a task
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'get_task'
        interval_schedule.process_name = "sleep30"
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)
        await asyncio.sleep(1)

        tasks = await scheduler.get_running_tasks(
        )  # retrieve list running tasks
        assert len(tasks)

        task = await scheduler.get_task(str(tasks[0].task_id))
        assert task  # assert there exists a task

        await self.stop_scheduler(scheduler)
Пример #11
0
    async def test_startup_schedule(self):
        """Test startup of _scheduler
        :assert:
            the number of running tasks
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Declare schedule startup, and execute
        startup_schedule = StartUpSchedule(
        )  # A scheduled process of the _scheduler
        startup_schedule.name = 'startup schedule'
        startup_schedule.process_name = 'sleep30'
        startup_schedule.repeat = datetime.timedelta(
            seconds=0)  # set no repeat to startup
        startup_schedule.enabled = True

        await scheduler.save_schedule(startup_schedule)

        await asyncio.sleep(1)
        # Assert no tasks ar running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 0

        await scheduler.get_schedule(startup_schedule.schedule_id
                                     )  # ID of the schedule startup

        await self.stop_scheduler(scheduler)

        scheduler = Scheduler()
        await scheduler.start()

        await asyncio.sleep(2)
        # Assert only 1 task is running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        scheduler.max_running_tasks = 0  # set that no tasks would run
        await scheduler.cancel_task(tasks[0].task_id)

        await asyncio.sleep(10)

        # Assert no tasks are running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 0

        scheduler.max_running_tasks = 1

        await asyncio.sleep(2)

        # Assert a single task is running
        tasks = await scheduler.get_running_tasks()
        assert len(tasks) == 1

        await self.stop_scheduler(scheduler)
Пример #12
0
    async def test_disable_schedule(self):
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Declare schedule
        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'disable_schedule_test'
        interval_schedule.process_name = "sleep5"
        interval_schedule.repeat = datetime.timedelta(seconds=0)
        interval_schedule.enabled = True

        await scheduler.save_schedule(interval_schedule)

        # Get schedule
        schedules = await scheduler.get_schedules()
        assert len(schedules) == 1  # Assert the number of schedules
        assert schedules[0].enabled is True

        await asyncio.sleep(5)

        # assert there exists a task
        tasks = await scheduler.get_running_tasks(
        )  # retrieve list running tasks
        assert len(tasks)

        task = await scheduler.get_task(tasks[0].task_id)
        assert task

        # Disable Schedule
        retval, reason = await scheduler.disable_schedule(
            interval_schedule.schedule_id)
        assert retval

        # Confirm enabled changed
        schedules = await scheduler.get_schedules()
        assert len(schedules) == 1  # Assert the number of schedules
        assert schedules[0].enabled is False

        await self.stop_scheduler(scheduler)
Пример #13
0
    async def test_stop(self):
        """Test that stop_scheduler actually works"""
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # Set schedule interval
        interval_schedule = IntervalSchedule()
        interval_schedule.exclusive = False
        interval_schedule.name = 'sleep1'
        interval_schedule.process_name = "sleep1"
        interval_schedule.repeat = datetime.timedelta(
            seconds=1)  # Set frequency of

        await scheduler.save_schedule(interval_schedule
                                      )  # Save schedule updates
        await asyncio.sleep(10)

        await self.stop_scheduler(scheduler)
Пример #14
0
    async def test_cancel(self):
        """Cancel a running process"""
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'cancel_test'
        interval_schedule.process_name = 'sleep30'
        interval_schedule.repeat = datetime.timedelta(seconds=0)

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(5)
        tasks = await scheduler.get_running_tasks()

        await scheduler.cancel_task(tasks[0].task_id)  # Cancel a running task

        await self.stop_scheduler(scheduler)
Пример #15
0
    async def test_create_interval(self):
        """Test the creation of a new schedule interval
        :assert:
            The interval type of the schedule
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # assert that the schedule type is interval
        interval_schedule = IntervalSchedule()
        assert interval_schedule.schedule_type == Schedule.Type.INTERVAL

        interval_schedule.name = 'sleep10'
        interval_schedule.process_name = "sleep10"
        interval_schedule.repeat = datetime.timedelta(seconds=1)

        await scheduler.save_schedule(interval_schedule)

        await self.stop_scheduler(scheduler)
Пример #16
0
    async def test_add_notification_service(self, client, payload):
        data = json.loads(payload)
        sch_id = '45876056-e04c-4cde-8a82-1d8dbbbe6d72'

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = sch_id
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = json.loads(arg[1])
            if table == 'schedules':
                if _payload['return'][0] == 'process_name':
                    assert {"return": ["process_name"]} == _payload
                    return {
                        'rows': [{
                            'process_name': 'purge'
                        }, {
                            'process_name': 'stats collector'
                        }],
                        'count':
                        2
                    }
                else:
                    assert {
                        "return": ["schedule_name"],
                        "where": {
                            "column": "schedule_name",
                            "condition": "=",
                            "value": data['name']
                        }
                    } == _payload

                    return {'count': 0, 'rows': []}
            if table == 'scheduled_processes':
                assert {
                    "return": ["name"],
                    "where": {
                        "column": "name",
                        "condition": "=",
                        "value": "notification_c"
                    }
                } == _payload
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}

        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(connect,
                          'get_storage_async',
                          return_value=storage_client_mock):
            with patch.object(
                    c_mgr,
                    'get_category_all_items',
                    return_value=self.async_mock(None)) as patch_get_cat_info:
                with patch.object(storage_client_mock,
                                  'query_tbl_with_payload',
                                  side_effect=q_result):
                    with patch.object(
                            storage_client_mock,
                            'insert_into_tbl',
                            return_value=self.async_mock(
                                expected_insert_resp)) as insert_table_patch:
                        with patch.object(server.Server.scheduler,
                                          'save_schedule',
                                          return_value=self.async_mock(
                                              "")) as patch_save_schedule:
                            with patch.object(
                                    server.Server.scheduler,
                                    'get_schedule_by_name',
                                    return_value=async_mock_get_schedule(
                                    )) as patch_get_schedule:
                                resp = await client.post('/foglamp/service',
                                                         data=payload)
                                server.Server.scheduler = None
                                assert 200 == resp.status
                                result = await resp.text()
                                json_response = json.loads(result)
                                assert {
                                    'id': sch_id,
                                    'name': data['name']
                                } == json_response
                            patch_get_schedule.assert_called_once_with(
                                data['name'])
                        patch_save_schedule.called_once_with()
                    args, kwargs = insert_table_patch.call_args
                    assert 'scheduled_processes' == args[0]
                    p = json.loads(args[1])
                    assert {
                        'name': 'notification_c',
                        'script': '["services/notification_c"]'
                    } == p
            patch_get_cat_info.assert_called_once_with(
                category_name=data['name'])
Пример #17
0
 def setup_method(self):
     server.Server.scheduler = Scheduler(None, None)
Пример #18
0
    async def test_add_service(self, client, payload):
        data = json.loads(payload)

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'south_c'
                    }
                } == json.loads(_payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': 'furnace4'
                    }
                } == json.loads(_payload)
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': "furnace4",
            'version': "1.1",
            'type': "south",
            'interface': "1.0",
            'mode': "async",
            'config': {
                'plugin': {
                    'description': "DHT11 plugin",
                    'type': 'string',
                    'default': 'dht11'
                }
            }
        }

        mock = MagicMock()
        attrs = {"plugin_info.side_effect": [mock_plugin_info]}
        mock.configure_mock(**attrs)

        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch('builtins.__import__', return_value=mock):
            with patch.object(connect,
                              'get_storage_async',
                              return_value=storage_client_mock):
                with patch.object(c_mgr,
                                  'get_category_all_items',
                                  return_value=self.async_mock(
                                      None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock,
                                      'query_tbl_with_payload',
                                      side_effect=q_result):
                        with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) \
                                as insert_table_patch:
                            with patch.object(c_mgr,
                                              'create_category',
                                              return_value=self.async_mock(
                                                  None)) as patch_create_cat:
                                with patch.object(c_mgr, 'create_child_category', return_value=self.async_mock(None)) \
                                        as patch_create_child_cat:
                                    with patch.object(
                                            server.Server.scheduler,
                                            'save_schedule',
                                            return_value=self.async_mock(
                                                "")) as patch_save_schedule:
                                        with patch.object(
                                                server.Server.scheduler,
                                                'get_schedule_by_name',
                                                return_value=
                                                async_mock_get_schedule(
                                                )) as patch_get_schedule:
                                            resp = await client.post(
                                                '/foglamp/service',
                                                data=payload)
                                            server.Server.scheduler = None
                                            assert 200 == resp.status
                                            result = await resp.text()
                                            json_response = json.loads(result)
                                            assert {
                                                'id':
                                                '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                'name': 'furnace4'
                                            } == json_response
                                        patch_get_schedule.assert_called_once_with(
                                            data['name'])
                                    patch_save_schedule.called_once_with()
                                patch_create_child_cat.assert_called_once_with(
                                    'South', ['furnace4'])
                            assert 2 == patch_create_cat.call_count
                            patch_create_cat.assert_called_with(
                                'South', {}, 'South microservices', True)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert {
                            'name': 'south_c',
                            'script': '["services/south_c"]'
                        } == p
                patch_get_cat_info.assert_called_once_with(
                    category_name='furnace4')
    sqlalchemy.Column('process_name', sqlalchemy.types.VARCHAR(20),
                      default=''),
    sqlalchemy.Column('state', sqlalchemy.types.INT),
    sqlalchemy.Column('start_time', sqlalchemy.types.TIMESTAMP),
    sqlalchemy.Column('end_time', sqlalchemy.types.TIMESTAMP),
    sqlalchemy.Column('pid', sqlalchemy.types.INT),
    sqlalchemy.Column('exit_code', sqlalchemy.types.INT),
    sqlalchemy.Column('reason', sqlalchemy.types.VARCHAR(255)))

# TODO: FOGL-1017 :To run this test, FOGLAMP_ENV=TEST is only used by scheduler
# 1) Execute this command: FOGLAMP_ENV=TEST pytest -s -vv tests/integration/foglamp/services/core/test_scheduler_get_tasks.py

# TODO: How to eliminate manual intervention as below when tests will run unattended at CI?
_address = pytest.test_env.address
_m_port = pytest.test_env.core_mgmt_port
scheduler = Scheduler(_address, _m_port)


@pytest.allure.feature("integration")
@pytest.allure.story("scheduler get_tasks")
class TestSchedulerGetTasks:
    _engine = None  # type: aiopg.sa.Engine

    # TODO: This test will not work if our storage engine is not Postgres. OK for today but long term we need to
    # approach this differently. We could simply use the storage layer to insert the test data.
    @classmethod
    async def _get_connection_pool(cls) -> aiopg.sa.Engine:
        """Returns a database connection pool object"""
        if cls._engine is None:
            cls._engine = await aiopg.sa.create_engine(_CONNECTION_STRING)
        return cls._engine
Пример #20
0
 async def _start_scheduler(cls):
     """Starts the scheduler"""
     _logger.info("start scheduler")
     cls.scheduler = Scheduler(cls._host, cls.core_management_port)
     await cls.scheduler.start()
Пример #21
0
    async def test_add_service(self, client):
        async def async_mock(return_value):
            return return_value

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        def q_result(*arg):
            table = arg[0]
            payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'furnace4'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': 'furnace4'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}

        async def async_mock_insert():
            expected = {'rows_affected': 1, "response": "inserted"}
            return expected

        server.Server.scheduler = Scheduler(None, None)
        data = {"name": "furnace4", "type": "south", "plugin": "dht11"}
        description = '{} service configuration'.format(data['name'])
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        val = {
            'plugin': {
                'default': data['plugin'],
                'description': 'Python module name of the plugin to load',
                'type': 'string'
            }
        }
        with patch.object(connect,
                          'get_storage_async',
                          return_value=storage_client_mock):
            with patch.object(storage_client_mock,
                              'query_tbl_with_payload',
                              side_effect=q_result):
                with patch.object(storage_client_mock,
                                  'insert_into_tbl',
                                  return_value=async_mock_insert()
                                  ) as insert_table_patch:
                    with patch.object(
                            c_mgr,
                            'create_category',
                            return_value=async_mock(None)) as patch_create_cat:
                        with patch.object(server.Server.scheduler,
                                          'save_schedule',
                                          return_value=async_mock(
                                              "")) as patch_save_schedule:
                            with patch.object(
                                    server.Server.scheduler,
                                    'get_schedule_by_name',
                                    return_value=async_mock_get_schedule(
                                    )) as patch_get_schedule:
                                resp = await client.post('/foglamp/service',
                                                         data=json.dumps(data))
                                server.Server.scheduler = None
                                assert 200 == resp.status
                                result = await resp.text()
                                json_response = json.loads(result)
                                assert {
                                    'id':
                                    '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                    'name': 'furnace4'
                                } == json_response
                            patch_get_schedule.assert_called_once_with(
                                data['name'])
                        patch_save_schedule.called_once_with()
                    patch_create_cat.assert_called_once_with(
                        category_name=data['name'],
                        category_description=description,
                        category_value=val,
                        keep_original_items=True)

                args, kwargs = insert_table_patch.call_args
                assert 'scheduled_processes' == args[0]
                p = json.loads(args[1])
                assert {
                    'name': 'furnace4',
                    'script': '["services/south"]'
                } == p
Пример #22
0
    async def test_add_task_with_config(self, client):
        async def async_mock_get_schedule():
            schedule = TimedSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'north'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': 'north bound'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}

            if table == 'tasks':
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': "PI server",
            'version': "1.1",
            'type': "north",
            'interface': "1.0",
            'config': {
                'plugin': {
                    'description': "North PI plugin",
                    'type': 'string',
                    'default': 'omf'
                },
                'producerToken': {
                    'description': 'Producer token for this FogLAMP stream',
                    'type': 'string',
                    'default': 'pi_server_north_0001',
                    'order': '2'
                }
            }
        }

        mock = MagicMock()
        attrs = {"plugin_info.side_effect": [mock_plugin_info]}
        mock.configure_mock(**attrs)

        server.Server.scheduler = Scheduler(None, None)
        data = {
            "name": "north bound",
            "plugin": "omf",
            "type": "north",
            "schedule_type": 3,
            "schedule_day": 0,
            "schedule_time": 0,
            "schedule_repeat": 30,
            "schedule_enabled": True,
            "config": {
                "producerToken": {
                    "value": "uid=180905062754237&sig=kx5l+"
                }
            }
        }

        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch('builtins.__import__', return_value=mock):
            with patch.object(connect,
                              'get_storage_async',
                              return_value=storage_client_mock):
                with patch.object(c_mgr,
                                  'get_category_all_items',
                                  return_value=self.async_mock(
                                      None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock,
                                      'query_tbl_with_payload',
                                      side_effect=q_result):
                        with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) \
                                as insert_table_patch:
                            with patch.object(c_mgr,
                                              'create_category',
                                              return_value=self.async_mock(
                                                  None)) as patch_create_cat:
                                with patch.object(c_mgr, 'create_child_category', return_value=self.async_mock(None)) \
                                        as patch_create_child_cat:
                                    with patch.object(
                                            c_mgr,
                                            'set_category_item_value_entry',
                                            return_value=self.async_mock(
                                                None)) as patch_set_entry:
                                        with patch.object(
                                                server.Server.scheduler,
                                                'save_schedule',
                                                return_value=self.async_mock(
                                                    "")
                                        ) as patch_save_schedule:
                                            with patch.object(
                                                    server.Server.scheduler,
                                                    'get_schedule_by_name',
                                                    return_value=
                                                    async_mock_get_schedule(
                                                    )) as patch_get_schedule:
                                                resp = await client.post(
                                                    '/foglamp/scheduled/task',
                                                    data=json.dumps(data))
                                                server.Server.scheduler = None
                                                assert 200 == resp.status
                                                result = await resp.text()
                                                json_response = json.loads(
                                                    result)
                                                assert {
                                                    'id':
                                                    '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                    'name': 'north bound'
                                                } == json_response
                                            patch_get_schedule.assert_called_once_with(
                                                data['name'])
                                        patch_save_schedule.called_once_with()
                                    patch_set_entry.assert_called_once_with(
                                        data['name'], 'producerToken',
                                        'uid=180905062754237&sig=kx5l+')
                                patch_create_child_cat.assert_called_once_with(
                                    'North', ['north bound'])
                            assert 2 == patch_create_cat.call_count
                            patch_create_cat.assert_called_with(
                                'North', {}, 'North tasks', True)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert p['name'] == 'north'
                        assert p['script'] == '["tasks/north"]'
                patch_get_cat_info.assert_called_once_with(
                    category_name=data['name'])
Пример #23
0
    async def test_dupe_notification_service_schedule(self, client):
        payload = '{"name": "NotificationServer", "type": "notification"}'
        data = json.loads(payload)

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = json.loads(arg[1])
            if table == 'schedules':
                if _payload['return'][0] == 'process_name':
                    assert {"return": ["process_name"]} == _payload
                    return {
                        'rows': [{
                            'process_name': 'stats collector'
                        }, {
                            'process_name': 'notification_c'
                        }],
                        'count':
                        2
                    }

                else:
                    assert {
                        "return": ["schedule_name"],
                        "where": {
                            "column": "schedule_name",
                            "condition": "=",
                            "value": data['name']
                        }
                    } == _payload

                    return {'count': 0, 'rows': []}
            if table == 'scheduled_processes':
                assert {
                    "return": ["name"],
                    "where": {
                        "column": "name",
                        "condition": "=",
                        "value": "notification_c"
                    }
                } == _payload
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}

        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(connect,
                          'get_storage_async',
                          return_value=storage_client_mock):
            with patch.object(
                    c_mgr,
                    'get_category_all_items',
                    return_value=self.async_mock(None)) as patch_get_cat_info:
                with patch.object(storage_client_mock,
                                  'query_tbl_with_payload',
                                  side_effect=q_result):
                    with patch.object(
                            storage_client_mock,
                            'insert_into_tbl',
                            return_value=self.async_mock(
                                expected_insert_resp)) as insert_table_patch:
                        resp = await client.post('/foglamp/service',
                                                 data=payload)
                        server.Server.scheduler = None
                        assert 400 == resp.status
                        assert 'A Notification service schedule already exists.' == resp.reason
                    args, kwargs = insert_table_patch.call_args
                    assert 'scheduled_processes' == args[0]
                    p = json.loads(args[1])
                    assert {
                        'name': 'notification_c',
                        'script': '["services/notification_c"]'
                    } == p
            patch_get_cat_info.assert_called_once_with(
                category_name=data['name'])
Пример #24
0
    async def test_add_task(self, client):
        async def async_mock_get_schedule():
            schedule = TimedSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'north'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': 'north bound'
                    }
                } == json.loads(payload)
                return {'count': 0, 'rows': []}

            if table == 'tasks':
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': "north bound",
            'version': "1.1",
            'type': "north",
            'interface': "1.0",
            'config': {
                'plugin': {
                    'description': "North OMF plugin",
                    'type': 'string',
                    'default': 'omf'
                }
            }
        }
        server.Server.scheduler = Scheduler(None, None)
        data = {
            "name": "north bound",
            "plugin": "omf",
            "type": "north",
            "schedule_type": 3,
            "schedule_day": 0,
            "schedule_time": 0,
            "schedule_repeat": 30,
            "schedule_enabled": True
        }

        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(common,
                          'load_and_fetch_python_plugin_info',
                          side_effect=[mock_plugin_info]):
            with patch.object(connect,
                              'get_storage_async',
                              return_value=storage_client_mock):
                with patch.object(c_mgr,
                                  'get_category_all_items',
                                  return_value=self.async_mock(
                                      None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock,
                                      'query_tbl_with_payload',
                                      side_effect=q_result):
                        with patch.object(storage_client_mock, 'insert_into_tbl', return_value=self.async_mock(expected_insert_resp)) \
                                as insert_table_patch:
                            with patch.object(c_mgr,
                                              'create_category',
                                              return_value=self.async_mock(
                                                  None)) as patch_create_cat:
                                with patch.object(c_mgr, 'create_child_category', return_value=self.async_mock(None)) \
                                        as patch_create_child_cat:
                                    with patch.object(
                                            server.Server.scheduler,
                                            'save_schedule',
                                            return_value=self.async_mock(
                                                "")) as patch_save_schedule:
                                        with patch.object(
                                                server.Server.scheduler,
                                                'get_schedule_by_name',
                                                return_value=
                                                async_mock_get_schedule(
                                                )) as patch_get_schedule:
                                            resp = await client.post(
                                                '/foglamp/scheduled/task',
                                                data=json.dumps(data))
                                            server.Server.scheduler = None
                                            assert 200 == resp.status
                                            result = await resp.text()
                                            json_response = json.loads(result)
                                            assert {
                                                'id':
                                                '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                'name': 'north bound'
                                            } == json_response
                                        patch_get_schedule.assert_called_once_with(
                                            data['name'])
                                    patch_save_schedule.called_once_with()
                                patch_create_child_cat.assert_called_once_with(
                                    'North', ['north bound'])
                            calls = [
                                call(category_description='North OMF plugin',
                                     category_name='north bound',
                                     category_value={
                                         'plugin': {
                                             'description': 'North OMF plugin',
                                             'default': 'omf',
                                             'type': 'string'
                                         }
                                     },
                                     keep_original_items=True),
                                call('North', {}, 'North tasks', True)
                            ]
                            patch_create_cat.assert_has_calls(calls)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert p['name'] == 'north'
                        assert p['script'] == '["tasks/north"]'
                patch_get_cat_info.assert_called_once_with(
                    category_name=data['name'])
Пример #25
0
    async def test_add_service_with_config(self, client):
        payload = '{"name": "Sine", "type": "south", "plugin": "sinusoid", "enabled": "false",' \
                  ' "config": {"dataPointsPerSec": {"value": "10"}}}'
        data = json.loads(payload)

        async def async_mock_get_schedule():
            schedule = StartUpSchedule()
            schedule.schedule_id = '2129cc95-c841-441a-ad39-6469a87dbc8b'
            return schedule

        @asyncio.coroutine
        def q_result(*arg):
            table = arg[0]
            _payload = arg[1]

            if table == 'scheduled_processes':
                assert {
                    'return': ['name'],
                    'where': {
                        'column': 'name',
                        'condition': '=',
                        'value': 'south_c'
                    }
                } == json.loads(_payload)
                return {'count': 0, 'rows': []}
            if table == 'schedules':
                assert {
                    'return': ['schedule_name'],
                    'where': {
                        'column': 'schedule_name',
                        'condition': '=',
                        'value': data['name']
                    }
                } == json.loads(_payload)
                return {'count': 0, 'rows': []}

        expected_insert_resp = {'rows_affected': 1, "response": "inserted"}
        mock_plugin_info = {
            'name': data['name'],
            'version': "1.1",
            'type': "south",
            'interface': "1.0",
            'mode': "async",
            'config': {
                'plugin': {
                    'description': "Sinusoid Plugin",
                    'type': 'string',
                    'default': 'sinusoid'
                },
                'dataPointsPerSec': {
                    'description': 'Data points per second',
                    'type': 'integer',
                    'default': '1',
                    'order': '2'
                }
            }
        }
        server.Server.scheduler = Scheduler(None, None)
        storage_client_mock = MagicMock(StorageClientAsync)
        c_mgr = ConfigurationManager(storage_client_mock)
        with patch.object(common,
                          'load_and_fetch_python_plugin_info',
                          side_effect=[mock_plugin_info]):
            with patch.object(connect,
                              'get_storage_async',
                              return_value=storage_client_mock):
                with patch.object(c_mgr,
                                  'get_category_all_items',
                                  return_value=self.async_mock(
                                      None)) as patch_get_cat_info:
                    with patch.object(storage_client_mock,
                                      'query_tbl_with_payload',
                                      side_effect=q_result):
                        with patch.object(storage_client_mock,
                                          'insert_into_tbl',
                                          return_value=self.async_mock(
                                              expected_insert_resp)
                                          ) as insert_table_patch:
                            with patch.object(c_mgr,
                                              'create_category',
                                              return_value=self.async_mock(
                                                  None)) as patch_create_cat:
                                with patch.object(
                                        c_mgr,
                                        'create_child_category',
                                        return_value=self.async_mock(
                                            None)) as patch_create_child_cat:
                                    with patch.object(
                                            c_mgr,
                                            'set_category_item_value_entry',
                                            return_value=self.async_mock(
                                                None)) as patch_set_entry:
                                        with patch.object(
                                                server.Server.scheduler,
                                                'save_schedule',
                                                return_value=self.async_mock(
                                                    "")
                                        ) as patch_save_schedule:
                                            with patch.object(
                                                    server.Server.scheduler,
                                                    'get_schedule_by_name',
                                                    return_value=
                                                    async_mock_get_schedule(
                                                    )) as patch_get_schedule:
                                                resp = await client.post(
                                                    '/foglamp/service',
                                                    data=payload)
                                                server.Server.scheduler = None
                                                assert 200 == resp.status
                                                result = await resp.text()
                                                json_response = json.loads(
                                                    result)
                                                assert {
                                                    'id':
                                                    '2129cc95-c841-441a-ad39-6469a87dbc8b',
                                                    'name': data['name']
                                                } == json_response
                                            patch_get_schedule.assert_called_once_with(
                                                data['name'])
                                        patch_save_schedule.called_once_with()
                                    patch_set_entry.assert_called_once_with(
                                        data['name'], 'dataPointsPerSec', '10')
                                patch_create_child_cat.assert_called_once_with(
                                    'South', ['Sine'])
                            assert 2 == patch_create_cat.call_count
                            patch_create_cat.assert_called_with(
                                'South', {}, 'South microservices', True)
                        args, kwargs = insert_table_patch.call_args
                        assert 'scheduled_processes' == args[0]
                        p = json.loads(args[1])
                        assert {
                            'name': 'south_c',
                            'script': '["services/south_c"]'
                        } == p
                patch_get_cat_info.assert_called_once_with(
                    category_name=data['name'])
Пример #26
0
    async def test_get_tasks(self):
        """Get list of tasks
        :assert:
            Number of running tasks
            The state of tasks
            the start time of a given task
        """
        await self.populate_test_data(
        )  # Populate data in foglamp.scheduled_processes

        scheduler = Scheduler(_address, _m_port)
        await scheduler.start()

        # declare _scheduler task
        interval_schedule = IntervalSchedule()
        interval_schedule.name = 'get_tasks'
        interval_schedule.process_name = "sleep5"
        interval_schedule.repeat = datetime.timedelta(seconds=1)
        interval_schedule.exclusive = False

        await scheduler.save_schedule(interval_schedule)

        await asyncio.sleep(15)

        # Assert running tasks
        tasks = await scheduler.get_tasks(
            where=["state", "=", int(Task.State.INTERRUPTED)])
        assert not tasks

        tasks = await scheduler.get_tasks(where=["end_time", "=", 'NULL'])
        assert tasks

        tasks = await scheduler.get_tasks(limit=50)
        states = [int(task.state) for task in tasks]

        assert len(tasks) > 1
        assert int(Task.State.RUNNING) in states
        assert int(Task.State.COMPLETE) in states

        tasks = await scheduler.get_tasks(1)
        assert len(tasks) == 1

        tasks = await scheduler.get_tasks(
            where=["state", "=", int(Task.State.RUNNING)],
            sort=[["state", "desc"]],
            offset=50)
        assert not tasks

        tasks = await scheduler.get_tasks(
            where=["state", "=", int(Task.State.RUNNING)],
            sort=[["state", "desc"], ["start_time", "asc"]])
        assert tasks

        tasks = await scheduler.get_tasks(or_where_list=[["state", "=", int(Task.State.RUNNING)], \
                                                         ["state", "=", int(Task.State.RUNNING)]])
        assert tasks

        tasks = await scheduler.get_tasks(and_where_list=[["state", "=", int(Task.State.RUNNING)], \
                                                          ["state", "=", int(Task.State.RUNNING)]])
        assert tasks

        await self.stop_scheduler(scheduler)
Пример #27
0
async def get_statistics_history(request):
    """
    Args:
        request:

    Returns:
            a list of general set of statistics

    :Example:
            curl -X GET http://localhost:8081/foglamp/statistics/history?limit=1
    """
    storage_client = connect.get_storage()

    # To find the interval in secs from stats collector schedule
    scheduler_payload = PayloadBuilder().SELECT("schedule_interval").WHERE(
        ['process_name', '=', 'stats collector']).payload()
    result = storage_client.query_tbl_with_payload('schedules',
                                                   scheduler_payload)
    if len(result['rows']) > 0:
        scheduler = Scheduler()
        interval_days, interval_dt = scheduler.extract_day_time_from_interval(
            result['rows'][0]['schedule_interval'])
        interval = datetime.timedelta(days=interval_days,
                                      hours=interval_dt.hour,
                                      minutes=interval_dt.minute,
                                      seconds=interval_dt.second)
        interval_in_secs = interval.total_seconds()
    else:
        raise web.HTTPNotFound(reason="No stats collector schedule found")
    stats_history_chain_payload = PayloadBuilder().SELECT(("history_ts", "key", "value"))\
        .ALIAS("return", ("history_ts", 'history_ts')).FORMAT("return", ("history_ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
        .ORDER_BY(['history_ts', 'desc']).chain_payload()

    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
            # FIXME: Hack straight away multiply the LIMIT by the group count
            # i.e. if there are 8 records per distinct (stats_key), and limit supplied is 2
            # then internally, actual LIMIT = 2*8
            # TODO: FOGL-663 Need support for "subquery" from storage service
            # Remove python side handling date_trunc and use
            # SELECT date_trunc('second', history_ts::timestamptz)::varchar as history_ts

            count_payload = PayloadBuilder().AGGREGATE(["count",
                                                        "*"]).payload()
            result = storage_client.query_tbl_with_payload(
                "statistics", count_payload)
            key_count = result['rows'][0]['count_*']

            stats_history_chain_payload = PayloadBuilder(
                stats_history_chain_payload).LIMIT(limit *
                                                   key_count).chain_payload()
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    stats_history_payload = PayloadBuilder(
        stats_history_chain_payload).payload()
    result_from_storage = storage_client.query_tbl_with_payload(
        'statistics_history', stats_history_payload)
    group_dict = []
    for row in result_from_storage['rows']:
        new_dict = {'history_ts': row['history_ts'], row['key']: row['value']}
        group_dict.append(new_dict)

    results = []
    temp_dict = {}
    previous_ts = None
    for row in group_dict:
        # first time or when history_ts changes
        if previous_ts is None or previous_ts != row['history_ts']:
            if previous_ts is not None:
                results.append(temp_dict)
            previous_ts = row['history_ts']
            temp_dict = {'history_ts': previous_ts}

        # Append statistics key to temp dict
        for key, value in row.items():
            temp_dict.update({key: value})

    # Append the last set of records which do not get appended above
    results.append(temp_dict)

    return web.json_response({
        "interval": interval_in_secs,
        'statistics': results
    })
Пример #28
0
 async def test_disable_schedule_wrong_schedule_id(self):
     with pytest.raises(ScheduleNotFoundError) as excinfo:
         scheduler = Scheduler(_address, _m_port)
         await scheduler.start()
         random_schedule_id = uuid.uuid4()
         await scheduler.disable_schedule(random_schedule_id)