예제 #1
0
 def __init__(self, root_uri):
     self.info = Datasource(
         uri='.'.join((root_uri,'network')),
         supplies = []
     )
     self._ifaces = []
     self.tms = []
예제 #2
0
 async def test_metrics_updated_two_tm_activated_with_them(self):
     ''' metrics_updated should generate two new tasks '''
     tm1 = transfer_methods.transfermethod(f=noop)
     tm2 = transfer_methods.transfermethod(f=noop)
     tm1._decorate_method(tm1._f)
     tm2._decorate_method(tm2._f)
     tmi = TransferMethodsIndex()
     self.assertTrue(tmi.add_tm(tm1))
     self.assertTrue(tmi.add_tm(tm2))
     self.assertTrue(tm1.mid in tmi._disabled_methods)
     self.assertTrue(tm2.mid in tmi._disabled_methods)
     self.assertTrue(await tmi.enable_all())
     tm1.schedule = OnUpdateSchedule(activation_metrics=Datasource('uri'))
     tm2.schedule = OnUpdateSchedule(activation_metrics=Datasource('uri'))
     self.assertTrue(tm1.mid in tmi._enabled_methods)
     self.assertTrue(tm2.mid in tmi._enabled_methods)
     self.assertIsNotNone(tmi._enabled_methods[tm1.mid]['first'])
     self.assertIsNotNone(tmi._enabled_methods[tm2.mid]['first'])
     metrics = [Datasource('uri'), Datasource('uri2')]
     t = timeuuid.TimeUUID()
     current_task = asyncio.Task.current_task()
     tasks = asyncio.Task.all_tasks()
     self.assertEqual(len(tasks), 1)  #this task
     tmi.metrics_updated(t=t, metrics=metrics, irt=timeuuid.TimeUUID())
     tasks = asyncio.Task.all_tasks()
     self.assertEqual(len(tasks), 3)  #two tm activated
     [
         task.cancel() for task in asyncio.Task.all_tasks()
         if task != current_task
     ]
예제 #3
0
 async def test_get_tms_activated_with_some_found_no_repeat(self):
     ''' _get_tms_activated_with should return the tms activated with these metrics each only once '''
     tm1 = transfer_methods.transfermethod(f=noop)
     tm2 = transfer_methods.transfermethod(f=noop)
     tm1._decorate_method(tm1._f)
     tm2._decorate_method(tm2._f)
     tmi = TransferMethodsIndex()
     self.assertTrue(tmi.add_tm(tm1))
     self.assertTrue(tmi.add_tm(tm2))
     self.assertTrue(tm1.mid in tmi._disabled_methods)
     self.assertTrue(tm2.mid in tmi._disabled_methods)
     self.assertTrue(await tmi.enable_all())
     tm1.schedule = OnUpdateSchedule(
         activation_metrics=[Datasource('uri'),
                             Datasource('uri2')])
     tm2.schedule = OnUpdateSchedule(
         activation_metrics=[Datasource('uri'),
                             Datasource('uri2')])
     self.assertTrue(Datasource('uri') in tm1.schedule.activation_metrics)
     self.assertTrue(Datasource('uri') in tm2.schedule.activation_metrics)
     self.assertTrue(Datasource('uri2') in tm1.schedule.activation_metrics)
     self.assertTrue(Datasource('uri2') in tm2.schedule.activation_metrics)
     self.assertTrue(tm1.mid in tmi._enabled_methods)
     self.assertTrue(tm2.mid in tmi._enabled_methods)
     self.assertIsNotNone(tmi._enabled_methods[tm1.mid]['first'])
     self.assertIsNotNone(tmi._enabled_methods[tm2.mid]['first'])
     metrics = [Datasource('uri'), Datasource('uri2')]
     activated = tmi._get_tms_activated_with(metrics)
     self.assertEqual(len(activated), 2)
     self.assertTrue(tm1 in activated)
     self.assertTrue(tm2 in activated)
예제 #4
0
 def __init__(self, root_uri):
     self.info = Datasource(
         uri='.'.join((root_uri,'storage')),
         supplies = []
     )
     self._devs = []
     self._fss = []
     self.tms = []
예제 #5
0
    def __init__(self, root_uri):
        self.info = Datasource(
            uri='.'.join((root_uri,'memory')),
            supplies = ['pswpin','pswpout','pgpgin','pgpgout','fault','majflt','pgfree','pgscank','pgscand','pgsteal','vmeff','kbmemfree','kbmemused','memused','kbbuffers','kbcached','kbcommit','commit','kbactive','kbinact','kbdirty']
        )
        self.use_metrics= {
            'memused':Datapoint('.'.join((self.info.uri,'memused'))),
            'majflt':Datapoint('.'.join((self.info.uri,'majflt')))
        }

        self.tms = [
            transfermethod(f=self.check_anom, schedule=OnUpdateSchedule(activation_metrics=self.use_metrics)),
        ]
예제 #6
0
def process_message_send_data_interval(msg, session, **kwargs):
    if msg.m_type == Metrics.DATAPOINT:
        metric = Datapoint(uri=msg.uri, session=session)
    else:
        metric = Datasource(uri=msg.uri, session=session)
    for row in msg.data[::-1]:
        session.store.insert(metric, row[0], row[1])
예제 #7
0
 async def test_get_tms_activated_with_none_found(self):
     ''' _get_tms_activated_with should return [] if no tm is activated with these metrics '''
     tm1 = transfer_methods.transfermethod(f=noop)
     tm2 = transfer_methods.transfermethod(f=noop)
     tm1._decorate_method(tm1._f)
     tm2._decorate_method(tm2._f)
     tmi = TransferMethodsIndex()
     self.assertTrue(tmi.add_tm(tm1))
     self.assertTrue(tmi.add_tm(tm2))
     self.assertTrue(tm1.mid in tmi._disabled_methods)
     self.assertTrue(tm2.mid in tmi._disabled_methods)
     self.assertTrue(await tmi.enable_all())
     self.assertTrue(tm1.mid in tmi._enabled_methods)
     self.assertTrue(tm2.mid in tmi._enabled_methods)
     self.assertIsNotNone(tmi._enabled_methods[tm1.mid]['first'])
     self.assertIsNotNone(tmi._enabled_methods[tm2.mid]['first'])
     metrics = [Datasource('uri'), Datasource('uri2')]
     self.assertEqual(tmi._get_tms_activated_with(metrics), [])
예제 #8
0
    def __init__(self, root_uri):
        self.info = Datasource(
            uri = '.'.join((root_uri,'cpu')),
            supplies = ['ldavg.1min','ldavg.5min','ldavg.15min','cpu_count','all.user','all.nice','all.system','all.iowait','all.steal','all.idle','tasks.runnable','tasks.total','tasks.blocked']
        )
        self.use_metrics= {
            'user':Datapoint('.'.join((self.info.uri,'all.user'))),
            'nice':Datapoint('.'.join((self.info.uri,'all.nice'))),
            'system':Datapoint('.'.join((self.info.uri,'all.system'))),
            'steal':Datapoint('.'.join((self.info.uri,'all.steal'))),
            'iowait':Datapoint('.'.join((self.info.uri,'all.iowait'))),
            'idle':Datapoint('.'.join((self.info.uri,'all.idle'))),
            'cpu_count':Datapoint('.'.join((self.info.uri,'cpu_count'))),
            't_runnable':Datapoint('.'.join((self.info.uri,'tasks.runnable'))),
            'ldavg1':Datapoint('.'.join((self.info.uri,'ldavg.1min')))
        }

        self.tms = [
            transfermethod(f=self.check_anom, schedule=OnUpdateSchedule(activation_metrics=self.use_metrics)),
        ]
예제 #9
0
def process_message_send_multi_data(msg, session, **kwargs):
    metrics=[]
    for item in msg.uris:
        if item['type'] == Metrics.DATASOURCE:
            metric=Datasource(uri=item['uri'], session=session)
        elif item['type'] == Metrics.DATAPOINT:
            metric=Datapoint(uri=item['uri'], session=session)
        if not session.store.is_in(metric=metric, t=msg.t, value=item['content']):
            session.store.insert(metric, msg.t, item['content'])
            metrics.append(metric)
    tmIndex.metrics_updated(t=msg.t, metrics=metrics, irt=msg.seq)
예제 #10
0
async def send_stdin(s, uri):
    data = sys.stdin.read()
    sample = Sample(metric=Datasource(uri, session=s),
                    t=TimeUUID(),
                    value=data)
    await s.login()
    result = await prproc.send_samples([sample])
    if not result['success']:
        sys.stderr.write('Error sending data to Komlog.\n')
        for err in result['errors']:
            sys.stderr.write(str(err['error']) + '\n')
    await s.close()
예제 #11
0
 def test_transfermethod_failure_invalid_f_params(self):
     ''' creation of a transfermethod object should fail if f_params is invalid '''
     params = [
         1, 'str', {'set'}, ['list'], ('tupl', 'e'),
         Datasource(uri='uri'),
         pd.Timestamp('now')
     ]
     for f_params in params:
         with self.assertRaises(exceptions.BadParametersException) as cm:
             tm = transfer_methods.transfermethod(f_params=f_params)
         self.assertEqual(cm.exception.msg,
                          '"f_params" attribute must be a dict')
예제 #12
0
 async def test_hook_to_metric_failure_invalid_response(self):
     ''' hook_to_metric should fail if we receive and unknown response '''
     username1 = 'username1'
     privkey1 = crypto.generate_rsa_key()
     session1 = KomlogSession(username=username1, privkey=privkey1)
     session1.send_message = test.AsyncMock(return_value=None)
     metric = Datasource('my_ds', session=session1)
     response = await prproc.hook_to_metric(metric)
     self.assertEqual(session1.send_message.call_count, 1)
     self.assertEqual(response['hooked'], False)
     self.assertEqual(response['exists'], False)
     sessionIndex.unregister_session(session1.sid)
예제 #13
0
 async def test_request_data_failure_unknown_response(self):
     ''' request_data should fail if we receive and unknown response '''
     username1 = 'username1'
     privkey1 = crypto.generate_rsa_key()
     session1 = KomlogSession(username=username1, privkey=privkey1)
     session1.send_message = test.AsyncMock(return_value=None)
     start = TimeUUID(100)
     end = TimeUUID(300)
     count = 10
     metric = Datasource('my_ds', session=session1)
     response = await prproc.request_data(metric, start, end, count)
     self.assertEqual(session1.send_message.call_count, 1)
     self.assertEqual(response['success'], False)
     self.assertEqual(response['error'], 'Unknown response')
     sessionIndex.unregister_session(session1.sid)
예제 #14
0
 async def test_process_message_send_data_interval_success_ds_data(self):
     ''' process_message_send_data_interval should store contents in session store '''
     username = '******'
     privkey = crypto.generate_rsa_key()
     uri = 'my_datasource'
     m_type = Metrics.DATASOURCE
     start = TimeUUID(1)
     end = TimeUUID(3000)
     data_json = json.dumps([(TimeUUID(i).hex, 'sample ' + str(i))
                             for i in range(1, 100)])
     data = json.loads(data_json)
     session = KomlogSession(username=username, privkey=privkey)
     msg = messages.SendDataInterval(uri, m_type, start, end, data)
     self.assertIsNone(
         prmsg.process_message_send_data_interval(msg, session))
     for d in data:
         smp = Sample(Datasource(uri, session), TimeUUID(s=d[0]), d[1])
         self.assertTrue(session.store.is_in(smp.metric, smp.t, smp.value))
     sessionIndex.unregister_session(session.sid)
예제 #15
0
 async def test_enable_tm_failure_cannot_hook_metric(self):
     ''' enable_tm should fail if we cannot hook to a metric.It should generate a retry task '''
     tm = transfer_methods.transfermethod(f=noop,
                                          schedule=OnUpdateSchedule(
                                              Datasource('uri')))
     tm._decorate_method(tm._f)
     tmi = TransferMethodsIndex()
     self.assertTrue(tmi.add_tm(tm))
     self.assertTrue(tm.mid in tmi._disabled_methods)
     self.assertFalse(await tmi.enable_tm(tm.mid))
     self.assertFalse(tm.mid in tmi._enabled_methods)
     self.assertIsNone(tmi._disabled_methods[tm.mid]['first'])
     current_task = asyncio.Task.current_task()
     tasks = asyncio.Task.all_tasks()
     self.assertEqual(len(tasks), 2)  #this task and retry task
     [
         task.cancel() for task in asyncio.Task.all_tasks()
         if task != current_task
     ]
예제 #16
0
class Memory:

    def __init__(self, root_uri):
        self.info = Datasource(
            uri='.'.join((root_uri,'memory')),
            supplies = ['pswpin','pswpout','pgpgin','pgpgout','fault','majflt','pgfree','pgscank','pgscand','pgsteal','vmeff','kbmemfree','kbmemused','memused','kbbuffers','kbcached','kbcommit','commit','kbactive','kbinact','kbdirty']
        )
        self.use_metrics= {
            'memused':Datapoint('.'.join((self.info.uri,'memused'))),
            'majflt':Datapoint('.'.join((self.info.uri,'majflt')))
        }

        self.tms = [
            transfermethod(f=self.check_anom, schedule=OnUpdateSchedule(activation_metrics=self.use_metrics)),
        ]

    async def load(self):
        for tm in self.tms:
            await tm.bind()

    async def update(self, t, content):
        if not content:
            return
        swp_block = False
        pg_block = False
        mem_block = False
        swp_block_lines = []
        pg_block_lines = []
        mem_block_lines = []
        for i,line in enumerate(content.split('\n')):
            fields = line.split()
            if len(fields) > 1:
                if fields[0].count(':') == 1:
                    # this should be a resume or average line
                    if fields[1] == 'pswpin/s' or swp_block:
                        swp_block = True
                        pg_block = False
                        mem_block = False
                        swp_block_lines.append(line)
                    elif fields[1] == 'pgpgin/s' or pg_block:
                        swp_block = False
                        pg_block = True
                        mem_block = False
                        pg_block_lines.append(line)
                    elif fields[1] == 'kbmemfree' or mem_block:
                        swp_block = False
                        pg_block = False
                        mem_block = True
                        mem_block_lines.append(line)
            else:
                swp_block = False
                pg_block = False
                mem_block = False
        if swp_block_lines or pg_block_lines or mem_block_lines:
            value = ''
            for block in [mem_block_lines, swp_block_lines, pg_block_lines]:
                margin = get_margin(block)
                if margin > -1:
                    for line in block:
                        value += line[margin:]+'\n'
                    value += '\n'
            self.info.insert(t=t, value=value)

    async def check_anom(self, t):
        memused = await self.use_metrics['memused'].get(t=t)
        majflt = await self.use_metrics['majflt'].get(t=t)
        if memused is None and majflt is None:
            return
        elif memused is not None and memused[0] > 85:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=1)
        elif majflt is not None and majflt[0] > 100:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=1)
        else:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=0)
예제 #17
0
class Storage:
    _dev_metrics = ['tps','rd_sec','wr_sec','avgrq-sz','avgqu-sz','await','svctm','util']
    _fs_metrics = ['MBfsfree', 'MBfsused', 'fsused', 'ufsused', 'Ifree', 'Iused', 'pIused']

    def __init__(self, root_uri):
        self.info = Datasource(
            uri='.'.join((root_uri,'storage')),
            supplies = []
        )
        self._devs = []
        self._fss = []
        self.tms = []

    async def load(self):
        for tm in self.tms:
            await tm.bind()

    async def update(self, t, content):
        if not content:
            return
        dev_block = False
        fs_block = False
        dev_block_lines = []
        fs_block_lines = []
        for i,line in enumerate(content.split('\n')):
            fields = line.split()
            if len(fields) > 1:
                if fields[0].count(':') == 1:
                    # this should be a resume or average line
                    if fields[1] == 'DEV' or dev_block:
                        dev_block = True
                        fs_block = False
                        dev_block_lines.append(line)
                    elif fields[-1] == 'FILESYSTEM' or fs_block:
                        dev_block = False
                        fs_block = True
                        fs_block_lines.append(line)
            else:
                dev_block = False
                fs_block = False
        if dev_block_lines or fs_block_lines:
            value = ''
            for block in [dev_block_lines, fs_block_lines]:
                margin = get_margin(block)
                if margin > -1:
                    for line in block:
                        value += line[margin:]+'\n'
                    value += '\n'
            await self.find_missing(value)
            self.info.insert(t=t, value=value)

    async def find_missing(self, content):
        devs = []
        fss = []
        dev_block = False
        fs_block = False
        for line in content.split('\n'):
            fields = line.split()
            if dev_block and fields:
                dev_name = fields[0]
                final_name = ''
                for i,c in enumerate(dev_name):
                    if validation.is_local_uri(c):
                        final_name += c
                    elif i>0:
                        final_name += '_'
                if validation.is_local_uri(final_name):
                    devs.append(final_name)
            elif fs_block and fields:
                fs_name = fields[-1]
                final_name = ''
                for i,c in enumerate(fs_name):
                    if validation.is_local_uri(c):
                        final_name += c
                    elif i>0:
                        final_name += '_'
                if validation.is_local_uri(final_name):
                    fss.append(final_name)
            else:
                dev_block = False
                fs_block = False
                if fields and fields[0][0] == 'D':
                    dev_block = True
                elif fields and fields[0][0:2] == 'MB':
                    fs_block = True
        for dev in devs:
            if not dev in self._devs:
                logging.logger.debug('New device found: '+dev)
                self._devs.append(dev)
                for m in self._dev_metrics:
                    self.info.supplies.append('.'.join((dev,m)))
                f_params = {
                    'util':Datapoint(uri='.'.join((self.info.uri,dev,'util')))
                }
                tm = transfermethod(f=self.check_anom_dev, f_params=f_params)
                await tm.bind()
                self.tms.append(tm)
        for fs in fss:
            if not fs in self._fss:
                logging.logger.debug('New filesystem found: '+fs)
                self._fss.append(fs)
                for m in self._fs_metrics:
                    self.info.supplies.append('.'.join((fs,m)))
                f_params = {
                    'fsused':Datapoint(uri='.'.join((self.info.uri,fs,'fsused'))),
                    'ufsused':Datapoint(uri='.'.join((self.info.uri,fs,'ufsused'))),
                    'pIused':Datapoint(uri='.'.join((self.info.uri,fs,'pIused')))
                }
                tm = transfermethod(f=self.check_anom_fs, f_params=f_params)
                await tm.bind()
                self.tms.append(tm)

    async def check_anom_dev(self, t, util):
        util_s = await util.get(end=t, count=2)
        if util_s is not None:
            anom = Anomaly(util)
            if util_s[0] > 80:
                # set anomaly signal to 1
                anom.insert(t=t, value=1)
            elif len(util_s)==2 and util_s.index[-1] < t and util_s[-1] > 80:
                # cancel previously set anomaly
                anom.insert(t=t, value=0)

    async def check_anom_fs(self, t, fsused, ufsused, pIused):
        fsused_s = await fsused.get(end=t, count=2)
        ufsused_s = await ufsused.get(end=t, count=2)
        pIused_s = await pIused.get(end=t, count=2)
        if fsused_s is None and ufsused_s is None and pIused_s is None:
            return
        elif fsused_s is not None and fsused_s.index[0] == t:
            if fsused_s[0] > 80:
                fsused_anom = Anomaly(fsused)
                fsused_anom.insert(t=t, value=1)
            elif len(fsused_s) == 2 and fsused_s[-1] > 80:
                fsused_anom.insert(t=t, value=0)
        elif ufsused_s is not None and ufsused_s.index[0] == t:
            if ufsused_s[0] > 80:
                ufsused_anom = Anomaly(ufsused)
                ufsused_anom.insert(t=t, value=1)
            elif len(ufsused_s) == 2 and ufsused_s[-1] > 80:
                ufsused_anom.insert(t=t, value=0)
        elif pIused_s is not None and pIused_s.index[0] == t:
            if pIused_s[0] > 80:
                pIused_anom = Anomaly(pIused)
                pIused_anom.insert(t=t, value=1)
            elif len(pIused_s) == 2 and pIused_s[-1] > 80:
                pIused_anom.insert(t=t, value=0)
예제 #18
0
class Network:
    _iface_metrics = ['rxpck','txpck','rxkB','txkB','rxcmp','txcmp','rxmcst','ifutil','rxerr','txerr','coll','rxdrop','txdrop','txcarr','rxfram','rxfifo','txfifo']

    def __init__(self, root_uri):
        self.info = Datasource(
            uri='.'.join((root_uri,'network')),
            supplies = []
        )
        self._ifaces = []
        self.tms = []

    async def load(self):
        for tm in self.tms:
            await tm.bind()

    async def update(self, t, content):
        if not content:
            return
        tr_block = False
        err_block = False
        tr_block_lines = []
        err_block_lines = []
        for i,line in enumerate(content.split('\n')):
            fields = line.split()
            if len(fields) > 1:
                if fields[0].count(':') == 1:
                    # this should be a resume or average line
                    if (fields[1] == 'IFACE' and fields[2] == 'rxpck/s') or tr_block:
                        tr_block = True
                        err_block = False
                        tr_block_lines.append(line)
                    elif (fields[1] == 'IFACE' and fields[2] == 'rxerr/s') or err_block:
                        tr_block = False
                        err_block = True
                        err_block_lines.append(line)
            else:
                tr_block = False
                err_block = False
        if tr_block_lines or err_block_lines:
            value = ''
            for block in [tr_block_lines, err_block_lines]:
                margin = get_margin(block)
                if margin > -1:
                    for line in block:
                        value += line[margin:]+'\n'
                    value += '\n'
            await self.find_missing(value)
            self.info.insert(t=t, value=value)

    async def find_missing(self, content):
        ifaces = []
        iface_block = False
        for line in content.split('\n'):
            fields = line.split()
            if iface_block and fields:
                iface_name = fields[0]
                final_name = ''
                for i,c in enumerate(iface_name):
                    if validation.is_local_uri(c):
                        final_name += c
                    elif i>0:
                        final_name += '_'
                if validation.is_local_uri(final_name):
                    ifaces.append(final_name)
            else:
                iface_block = False
                if fields and fields[0] == 'IFACE':
                    iface_block = True
        for iface in ifaces:
            if not iface in self._ifaces:
                logging.logger.debug('New network interface found: '+iface)
                self._ifaces.append(iface)
                for m in self._iface_metrics:
                    self.info.supplies.append('.'.join((iface,m)))
                f_params = {
                    'ifutil':Datapoint(uri='.'.join((self.info.uri,iface,'ifutil')))
                }
                tm = transfermethod(f=self.check_anom, f_params=f_params)
                await tm.bind()
                self.tms.append(tm)

    async def check_anom(self, t, ifutil):
        ifutil_s = await ifutil.get(end=t, count=2)
        if ifutil_s is not None:
            anom = Anomaly(ifutil)
            if ifutil_s[0] > 80:
                # set anomaly signal
                anom.insert(t=t, value=1)
            elif len(ifutil_s) == 2 and ifutil_s.index[1] < t and ifutil_s[1] > 80:
                # cancel previously set anomaly
                anom.insert(t=t, value=0)
예제 #19
0
 async def test_process_message_send_multi_data_success_no_notify_already_stored_values(
         self):
     ''' process_message_send_multi_data should store contents and notify tmIndex '''
     try:
         username = '******'
         privkey = crypto.generate_rsa_key()
         t = TimeUUID()
         uris = [{
             'uri': 'datasource1',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource2',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource3',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource4',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datapoint1',
             'type': Metrics.DATAPOINT.value,
             'content': '1232'
         }, {
             'uri': 'datapoint2',
             'type': Metrics.DATAPOINT.value,
             'content': '1233'
         }, {
             'uri': 'datapoint3',
             'type': Metrics.DATAPOINT.value,
             'content': '1234'
         }, {
             'uri': 'datapoint4',
             'type': Metrics.DATAPOINT.value,
             'content': '1235'
         }]
         msg = messages.SendMultiData(t, uris)
         session = KomlogSession(username=username, privkey=privkey)
         bck = tmIndex.metrics_updated
         tmIndex.metrics_updated = Mock(return_value=None)
         self.assertIsNone(
             prmsg.process_message_send_multi_data(msg, session))
         metrics = [
             Datasource('datasource1', session=session),
             Datasource('datasource2', session=session),
             Datasource('datasource3', session=session),
             Datasource('datasource4', session=session),
             Datapoint('datapoint1', session=session),
             Datapoint('datapoint2', session=session),
             Datapoint('datapoint3', session=session),
             Datapoint('datapoint4', session=session)
         ]
         for uri in uris:
             if uri['type'] == Metrics.DATASOURCE.value:
                 smp = Sample(Datasource(uri['uri'], session=session), t,
                              uri['content'])
             else:
                 smp = Sample(Datapoint(uri['uri'], session=session), t,
                              uri['content'])
             self.assertTrue(
                 session.store.is_in(smp.metric, smp.t, smp.value))
         self.assertEqual(tmIndex.metrics_updated.call_args[1]['t'], t)
         self.assertEqual(tmIndex.metrics_updated.call_args[1]['metrics'],
                          metrics)
         updated_uris = [{
             'uri': 'datasource1',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource2',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data2'
         }, {
             'uri': 'datasource3',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource4',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datasource5',
             'type': Metrics.DATASOURCE.value,
             'content': 'ds data'
         }, {
             'uri': 'datapoint1',
             'type': Metrics.DATAPOINT.value,
             'content': '1233'
         }, {
             'uri': 'datapoint2',
             'type': Metrics.DATAPOINT.value,
             'content': '1234'
         }, {
             'uri': 'datapoint3',
             'type': Metrics.DATAPOINT.value,
             'content': '1234'
         }, {
             'uri': 'datapoint4',
             'type': Metrics.DATAPOINT.value,
             'content': '1235'
         }, {
             'uri': 'datapoint5',
             'type': Metrics.DATAPOINT.value,
             'content': '1235'
         }]
         msg = messages.SendMultiData(t, updated_uris)
         updated_metrics = [
             Datasource('datasource2', session=session),
             Datasource('datasource5', session=session),
             Datapoint('datapoint1', session=session),
             Datapoint('datapoint2', session=session),
             Datapoint('datapoint5', session=session)
         ]
         self.assertIsNone(
             prmsg.process_message_send_multi_data(msg, session))
         for uri in updated_uris:
             if uri['type'] == Metrics.DATASOURCE.value:
                 smp = Sample(Datasource(uri['uri']), t, uri['content'])
             else:
                 smp = Sample(Datapoint(uri['uri']), t, uri['content'])
             self.assertTrue(
                 session.store.is_in(smp.metric, smp.t, smp.value))
         self.assertEqual(tmIndex.metrics_updated.call_args[1]['t'], t)
         self.assertEqual(tmIndex.metrics_updated.call_args[1]['metrics'],
                          updated_metrics)
         sessionIndex.unregister_session(session.sid)
         tmIndex.metrics_updated = bck
     except:
         tmIndex.metrics_updated = bck
         raise
예제 #20
0
 async def test_send_samples_success(self):
     ''' send_samples should create messages and send through the sessions to Komlog '''
     username1 = 'username1'
     privkey1 = crypto.generate_rsa_key()
     username2 = 'username2'
     privkey2 = crypto.generate_rsa_key()
     session1 = KomlogSession(username=username1, privkey=privkey1)
     session2 = KomlogSession(username=username2, privkey=privkey2)
     session1.send_message = test.AsyncMock(return_value=None)
     session2.send_message = test.AsyncMock(return_value=None)
     t_common = TimeUUID()
     t1 = TimeUUID()
     t2 = TimeUUID()
     samples_s1 = [
         Sample(Datasource('datasource1', session=session1), t_common,
                'value'),
         Sample(Datasource('datasource2', session=session1), t_common,
                'value'),
         Sample(Datasource('datasource3', session=session1), t_common,
                'value'),
         Sample(Datasource('datasource4', session=session1), t_common,
                'value'),
         Sample(Datapoint('datapoint1', session=session1), t_common, 1),
         Sample(Datapoint('datapoint2', session=session1), t_common, 1),
         Sample(Datapoint('datapoint3', session=session1), t_common, 1),
         Sample(Datapoint('datapoint4', session=session1), t_common, 1),
         Sample(Datasource('datasource5', session=session1), t1, 'value'),
         Sample(Datapoint('datapoint5', session=session1), t2, 1),
     ]
     samples_s2 = [
         Sample(Datasource('datasource1', session=session2), t_common,
                'value'),
         Sample(Datasource('datasource2', session=session2), t_common,
                'value'),
         Sample(Datasource('datasource3', session=session2), t_common,
                'value'),
         Sample(Datasource('datasource4', session=session2), t_common,
                'value'),
         Sample(Datapoint('datapoint1', session=session2), t_common, 1),
         Sample(Datapoint('datapoint2', session=session2), t_common, 1),
         Sample(Datapoint('datapoint3', session=session2), t_common, 1),
         Sample(Datapoint('datapoint4', session=session2), t_common, 1),
         Sample(Datasource('datasource5', session=session2), t1, 'value'),
         Sample(Datapoint('datapoint5', session=session2), t2, 1),
     ]
     total_samples = []
     for smp in samples_s1:
         total_samples.append(smp)
     for smp in samples_s2:
         total_samples.append(smp)
     response = await prproc.send_samples(total_samples)
     self.assertEqual(session1.send_message.call_count, 3)
     self.assertEqual(session2.send_message.call_count, 3)
     self.assertEqual(response['success'], False)
     self.assertEqual(len(response['errors']), 6)
     for i, m in enumerate(response['errors']):
         self.assertEqual(m['success'], False)
         self.assertEqual(m['error'], 'Unexpected message type')
         msg = m['msg']
         if i == 0:
             self.assertTrue(isinstance(msg, messages.SendMultiData))
             self.assertEqual(msg.t, t_common)
             self.assertEqual(msg.uris, [{
                 'uri': s.metric.uri,
                 'type': s.metric._m_type_,
                 'content': s.value
             } for s in samples_s1[:-2]])
         elif i == 1:
             self.assertTrue(isinstance(msg, messages.SendDsData))
             self.assertEqual(msg.uri, samples_s1[-2].metric.uri)
             self.assertEqual(msg.t, samples_s1[-2].t)
             self.assertEqual(msg.content, samples_s1[-2].value)
         elif i == 2:
             self.assertTrue(isinstance(msg, messages.SendDpData))
             self.assertEqual(msg.uri, samples_s1[-1].metric.uri)
             self.assertEqual(msg.t, samples_s1[-1].t)
             self.assertEqual(msg.content, samples_s1[-1].value)
         elif i == 3:
             self.assertTrue(isinstance(msg, messages.SendMultiData))
             self.assertEqual(msg.t, t_common)
             self.assertEqual(msg.uris, [{
                 'uri': s.metric.uri,
                 'type': s.metric._m_type_,
                 'content': s.value
             } for s in samples_s2[:-2]])
         elif i == 4:
             self.assertTrue(isinstance(msg, messages.SendDsData))
             self.assertEqual(msg.uri, samples_s2[-2].metric.uri)
             self.assertEqual(msg.t, samples_s2[-2].t)
             self.assertEqual(msg.content, samples_s2[-2].value)
         elif i == 5:
             self.assertTrue(isinstance(msg, messages.SendDpData))
             self.assertEqual(msg.uri, samples_s2[-1].metric.uri)
             self.assertEqual(msg.t, samples_s2[-1].t)
             self.assertEqual(msg.content, samples_s2[-1].value)
     sessionIndex.unregister_session(session1.sid)
     sessionIndex.unregister_session(session2.sid)
예제 #21
0
class CPU:

    def __init__(self, root_uri):
        self.info = Datasource(
            uri = '.'.join((root_uri,'cpu')),
            supplies = ['ldavg.1min','ldavg.5min','ldavg.15min','cpu_count','all.user','all.nice','all.system','all.iowait','all.steal','all.idle','tasks.runnable','tasks.total','tasks.blocked']
        )
        self.use_metrics= {
            'user':Datapoint('.'.join((self.info.uri,'all.user'))),
            'nice':Datapoint('.'.join((self.info.uri,'all.nice'))),
            'system':Datapoint('.'.join((self.info.uri,'all.system'))),
            'steal':Datapoint('.'.join((self.info.uri,'all.steal'))),
            'iowait':Datapoint('.'.join((self.info.uri,'all.iowait'))),
            'idle':Datapoint('.'.join((self.info.uri,'all.idle'))),
            'cpu_count':Datapoint('.'.join((self.info.uri,'cpu_count'))),
            't_runnable':Datapoint('.'.join((self.info.uri,'tasks.runnable'))),
            'ldavg1':Datapoint('.'.join((self.info.uri,'ldavg.1min')))
        }

        self.tms = [
            transfermethod(f=self.check_anom, schedule=OnUpdateSchedule(activation_metrics=self.use_metrics)),
        ]

    async def load(self):
        for tm in self.tms:
            await tm.bind()

    async def update(self, t, content):
        if not content:
            return
        cpu_block = False
        load_block = False
        header = ''
        cpu_block_lines = []
        load_block_lines = []
        for i,line in enumerate(content.split('\n')):
            fields = line.split()
            if len(fields) > 1:
                if fields[0].count(':') == 0:
                    # this should be the header with kernel information
                    header = line
                elif fields[0].count(':') == 1:
                    # this should be a resume or average line
                    if (fields[1] == 'CPU' and fields[2] == '%user') or cpu_block:
                        cpu_block = True
                        load_block = False
                        cpu_block_lines.append(line)
                    elif fields[1] == 'runq-sz' or load_block:
                        load_block = True
                        cpu_block = False
                        load_block_lines.append(line)
            else:
                cpu_block = False
                load_block = False
        if header and (cpu_block_lines or load_block_lines):
            value = header+'\n\n'
            for block in [cpu_block_lines, load_block_lines]:
                margin = get_margin(block)
                if margin > -1:
                    for line in block:
                        value += line[margin:]+'\n'
                    value += '\n'
            self.info.insert(t=t, value=value)

    async def check_anom(self, t):
        user = await self.use_metrics['user'].get(t=t)
        system = await self.use_metrics['system'].get(t=t)
        steal = await self.use_metrics['steal'].get(t=t)
        nice = await self.use_metrics['nice'].get(t=t)
        cpu_count = await self.use_metrics['cpu_count'].get(t=t)
        t_runnable = await self.use_metrics['t_runnable'].get(t=t)
        ldavg1 = await self.use_metrics['ldavg1'].get(t=t)
        use = user + system + steal + nice
        sat_sz = t_runnable / cpu_count
        sat_ld = ldavg1 / cpu_count
        if use is None and (sat_sz is None or sat_ld is None):
            return
        elif use is not None and use[0] > 90:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=1)
        elif sat_sz is not None and sat_ld is not None and sat_sz[0] > 1 and sat_ld[0] > 1:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=1)
        else:
            anom_metric = Anomaly(metric=self.info)
            anom_metric.insert(t=t, value=0)