def process_event(self, event_type, event): self.log.debug('process_event() Event: %s %s' % (event_type, event)) # Enable plugins to define state/status caches specific to # one or more event types for res in hook.run('process_event', self, event_type, event): pass # Enable post event processing for res in hook.run('post_process_event', self, event_type, event): pass time4 = time.time()*1000 # Report final event processing latency self.log.info('Event Processing Final Latency: %2.4sms' % (time4 - event['shtime1']))
def process_event(self, event_type, event): self.log.debug('process_event() Event: %s %s' % (event_type, event)) # Enable plugins to define state/status caches specific to # one or more event types for res in hook.run('process_event', self, event_type, event): pass # Enable post event processing for res in hook.run('post_process_event', self, event_type, event): pass time4 = time.time() # Report final event processing latency self.log.info('Event Processing Final Latency: %2.4sms' % ((time4 - event['shtime1']) * 1000))
def collect(self): self.app.log.debug("Inside SysChangeMonBaseController.collect()") db = self.app.storage.db assert isinstance(db, Model) session = db.new_session() session['start_time'] = datetime.now(tz=get_localzone()) session.save() plugins = self.plugins urls = [] for plugin in plugins.values(): urls += plugin.list_urls() for plugin in plugins.values(): urls = plugin.process_urls(urls) for label, plugin in plugins.items(): with db.transaction(): for url in urls: try: statedict = plugin.get_state(url) if statedict is not None: state = session.new_state(url=url, plugin=label, **statedict) #self.app.log.debug("read state: %s" % state) state.save() except UnsupportedException: pass with db.transaction(): for state in session.find_states(): for plugin in plugins.values(): old = state.copy() new = plugin.process_state(state) if old != new: state.save() #self.app.log.debug("updated state: %s" % state) for res in hook.run('enumerate', self.app): self.app.log.debug('enumerate result: %s' % res) session['hostname'] = socket.gethostbyaddr(socket.gethostname())[0] session['item_count'] = len(urls) session['closed'] = True session['end_time'] = datetime.now(tz=get_localzone()) session.save() print("session " + session['uuid'] + " saved") self.app.exit_code = 0 return
def test_run(self): hook.register('nosetests_hook', cement_hook_one, weight=99) hook.register('nosetests_hook', cement_hook_two, weight=-1) hook.register('nosetests_hook', cement_hook_three, weight=-99) results = [] for res in hook.run('nosetests_hook'): results.append(res) self.eq(results[0], 'kapla 3') self.eq(results[1], 'kapla 2') self.eq(results[2], 'kapla 1')
def test_register_hooks_meta(self): def my_custom_hook_func(): raise HookTestException('OK') app = self.make_app(APP, define_hooks=['my_custom_hook'], hooks=[('my_custom_hook', my_custom_hook_func)]) app.setup() for res in hook.run('my_custom_hook'): pass
def test_run(self): hook.register("nosetests_hook", cement_hook_one, weight=99) hook.register("nosetests_hook", cement_hook_two, weight=-1) hook.register("nosetests_hook", cement_hook_three, weight=-99) results = [] for res in hook.run("nosetests_hook"): results.append(res) self.eq(results[0], "kapla 3") self.eq(results[1], "kapla 2") self.eq(results[2], "kapla 1")
def test_run_bad_hook(self): for res in hook.run('some_bogus_hook'): pass
def handle_event(self, request): self.log.debug('shEventEngine handle_event') try: text = yield from request.text() events = json.loads(text) # Assemble individual events from incoming stream # for event in events: self.log.debug('Event Type: %s' % event['name']) # Initialize raw event cache if it does not exist yet if not self._memory.raw[event['name']]: # TODO: Lookup cache size from config self._memory.raw[event['name']] = deque(maxlen=5000) for p in event['points']: # Perform a simple sanity check if len(event['columns']) != len(p): self.log.error('Number of Columns %s mismatches number \ of Points %s' % (len(event['columns']), len(p))) # Populate raw event memory raw = dict() # Carry forward initial timestamp from feed raw['shtime1'] = event['shtime1'] # Timestamp the assembled event in milliseconds since epoch raw['shtime2'] = time.time()*1000 for x in range(0, len(event['columns'])): raw[event['columns'][x]] = p[x] self.log.debug('raw event: %s' % raw) self._memory.raw[event['name']].appendleft(raw) # Enable plugins to define state/status caches specific to # one or more event types for res in hook.run('event_state', self, event['name'], raw): pass # Enable pre event processing for res in hook.run('pre_process_event', self, event['name'], raw): pass # Create a task to process event rule(s) # This allows us to quickly get back to the service call # while taking all the time we need to process the event # async self._loop.create_task( self.process_event(event['name'], raw)) time3 = time.time()*1000 # Report event latency self.log.info('Event Latency: %2.4sms' % (raw['shtime2'] - raw['shtime1'])) self.log.info('Event Processing Init Latency: %2.4sms' % (time3 - raw['shtime1'])) output = {'msg': 'Event Received'} except Exception as e: app.log.error('Event Error: %s' % e) output = {'msg': 'Event Error; Event Rejected'} return web.Response(body=json.dumps(output).encode('utf-8'))
def handle_event(self, request): self.log.debug('shEventEngine handle_event') try: text = yield from request.text() events = json.loads(text) # Assemble individual events from incoming stream # for event in events: self.log.debug('Event Type: %s' % event['measurement']) # Initialize raw event cache if it does not exist yet if not self._memory.raw[event['measurement']]: # TODO: Lookup cache size from config self._memory.raw[event['measurement']] = deque(maxlen=5000) # Populate raw event memory raw = copy.deepcopy(event) # Timestamp the assembled event raw['shtime2'] = time.time() # Remove redundant measurement name del raw['measurement'] self.log.debug('raw event: %s' % raw) self._memory.raw[event['measurement']].appendleft(raw) # Enable plugins to define state/status caches specific to # one or more event types for res in hook.run('event_state', self, event['measurement'], raw): pass # Enable pre event processing for res in hook.run('pre_process_event', self, event['measurement'], raw): pass # Create a task to process event rule(s) # This allows us to quickly get back to the service call # while taking all the time we need to process the event # async self._loop.create_task( self.process_event(event['measurement'], raw)) time3 = time.time() # Report event latency self.log.info('Event Latency: %2.4sms' % ((raw['shtime2'] - raw['shtime1'])*1000)) self.log.info('Event Processing Init Latency: %2.4sms' % ((time3 - raw['shtime1'])*1000)) output = {'msg': 'Event Received'} except Exception as e: app.log.error('Event Error: %s' % e) output = {'msg': 'Event Error; Event Rejected'} return web.Response(body=json.dumps(output).encode('utf-8'))
def handle_event(self, request): self.log.debug('shEventEngine handle_event') try: text = yield from request.text() events = json.loads(text) # Assemble individual events from incoming stream # for event in events: self.log.debug('Event Type: %s' % event['measurement']) # Initialize raw event cache if it does not exist yet if not self._memory.raw[event['measurement']]: # TODO: Lookup cache size from config self._memory.raw[event['measurement']] = deque(maxlen=5000) # Populate raw event memory raw = copy.deepcopy(event) # Timestamp the assembled event raw['shtime2'] = time.time() # Remove redundant measurement name del raw['measurement'] self.log.debug('raw event: %s' % raw) self._memory.raw[event['measurement']].appendleft(raw) # Enable plugins to define state/status caches specific to # one or more event types for res in hook.run('event_state', self, event['measurement'], raw): pass # Enable pre event processing for res in hook.run('pre_process_event', self, event['measurement'], raw): pass # Create a task to process event rule(s) # This allows us to quickly get back to the service call # while taking all the time we need to process the event # async self._loop.create_task( self.process_event(event['measurement'], raw)) time3 = time.time() # Report event latency self.log.info('Event Latency: %2.4sms' % ((raw['shtime2'] - raw['shtime1']) * 1000)) self.log.info('Event Processing Init Latency: %2.4sms' % ((time3 - raw['shtime1']) * 1000)) output = {'msg': 'Event Received'} except Exception as e: app.log.error('Event Error: %s' % e) output = {'msg': 'Event Error; Event Rejected'} return web.Response(body=json.dumps(output).encode('utf-8'))