def _discovery_lost(self, host, attribute): """ Autodiscovery failed to find this attribute in the host but the database has this attribute. Possibly delete or set disabled this Attribute. """ # Host has this attribute but it wasn't discovered if attribute.attribute_type.ad_validate and attribute.poll_enabled: self.logger.debug('H:%d AT:%d Not found in host: %s', host.id, attribute.attribute_type_id, attribute.index) if host.autodiscovery_policy.can_del(): if self.print_only: DBSession.delete(attribute) event_info = u' - Deleted' elif host.autodiscovery_policy.can_disable(): if self.print_only: attribute.set_disabled() event_info = u' - Disabled' else: event_info = u'' if host.autodiscovery_policy.alert_delete and not self.print_only: new_event = Event.create_admin( host, attribute, 'Attribute not found in host{0}'.format(event_info)) if new_event is not None: DBSession.add(new_event) new_event.process()
def import_user(self): """ Users in JFFNMS become users in RNMS """ add_count = 0 users = {} result = self.db_handle.execute( """SELECT id,username,name FROM clients WHERE id > 2 ORDER BY id""") for row in result: user = model.User() user.user_name = unicode(row[1]) user.display_name = unicode(row[2]) user.email_address = unicode(row[1]) user.password = u'password' try: DBSession.add(user) DBSession.flush() except IntegrityError as errmsg: self.log.error('Error importing users: %s', errmsg) transaction.abort() return None else: users[row[0]] = user.user_id add_count += 1 self.log.info('Users: %d added.', add_count) return users
def recv_trap(self, recv_addr, recv_msg): # Fix the IPv4 mapped addresses if recv_addr[0][:7] == '::ffff:': host_ip = recv_addr[0][7:] else: host_ip = recv_addr[0] host_id = self._get_host_id(host_ip) if host_id is None: self.logger.debug('Notification message from unknown host %s', host_ip) return while recv_msg: msg_ver = int(api.decodeMessageVersion(recv_msg)) if msg_ver in api.protoModules: pmod = api.protoModules[msg_ver] else: self.logger.info('H:%d - Unsupported SNMP version %s from %s', host_id, msg_ver, host_ip) return req_msg, recv_msg = decoder.decode( recv_msg, asn1Spec=pmod.Message(), ) req_pdu = pmod.apiMessage.getPDU(req_msg) if req_pdu.isSameTypeWith(pmod.TrapPDU()): trap_oid = None if msg_ver == api.protoVersion1: trap_oid = self._get_trapv1_oid(pmod, req_pdu) if trap_oid is None: return new_trap = SnmpTrap(host_id, trap_oid) new_trap.set_uptime( pmod.apiTrapPDU.getTimeStamp(req_pdu).prettyPrint()) var_binds = pmod.apiTrapPDU.getVarBindList(req_pdu) else: new_trap = SnmpTrap(host_id, None) var_binds = pmod.apiPDU.getVarBindList(req_pdu) for var_bind in var_binds: oid, val = pmod.apiVarBind.getOIDVal(var_bind) if oid == SNMP_TRAP_OID: new_trap.trap_oid = val.prettyPrint() else: new_trap.set_varbind(oid.prettyPrint(), val.prettyPrint()) if new_trap.trap_oid is None: self.logger.info('H:%d Trap with no trap_oid?') else: if self._duplicate_trap(host_id, new_trap.trap_oid): self.logger.debug( 'H:%d Duplicate Trap,not added OID:%s', host_id, new_trap.trap_oid) else: self.logger.debug('H:%d New Trap v%s OID:%s', host_id, msg_ver, new_trap.trap_oid) DBSession.add(new_trap) transaction.commit()
def import_user(self): """ Users in JFFNMS become users in RNMS """ add_count = 0 users = {} result = self.db_handle.execute("""SELECT id,username,name FROM clients WHERE id > 2 ORDER BY id""") for row in result: user = model.User() user.user_name = unicode(row[1]) user.display_name = unicode(row[2]) user.email_address = unicode(row[1]) user.password = u'password' try: DBSession.add(user) DBSession.flush() except IntegrityError as errmsg: self.log.error('Error importing users: %s', errmsg) transaction.abort() return None else: users[row[0]] = user.user_id add_count += 1 self.log.info('Users: %d added.', add_count) return users
def import_interface(self): add_count = 0 ifaces = {} try: result = self.db_handle.execute( """SELECT i.id,i.host,f.field,f.value FROM interfaces AS i, interfaces_values AS f WHERE i.type=4 and i.id=f.interface AND f.field IN (3,4,6)""") for row in result: ifid = row[0] if ifid not in ifaces: ifaces[ifid] = model.Iface() ifaces[ifid].host_id = self.host_id(row[1]) if row[2] == 3: ifaces[ifid].ifindex = int(row[3]) elif row[2] == 4: ifaces[ifid].display_name = unicode(row[3]) elif row[2] == 6: ifaces[ifid].speed = int(row[3]) for iface in ifaces.values(): DBSession.add(iface) DBSession.flush() add_count += 1 except IntegrityError as errmsg: self.log.error('Error importing interfaces: %s', errmsg) transaction.abort() return None else: self.log.info('Interfaces: %d added.', add_count) return []
def _run_matches(self, logfile_id, match_rows, loglines): """ Go over the loglines looking for matches """ line_count = 0 for line in loglines: line_count += 1 for row in match_rows: match_data = row.try_match(line) if match_data: new_event = Event(**match_data) DBSession.add(new_event) self.logger.info("LOGF(%s): %d messages processed", logfile_id, line_count)
def setUp(self): """Setup test fixture for each model test method.""" try: new_attrs = {} new_attrs.update(self.attrs) new_attrs.update(self.do_get_dependencies()) self.obj = self.klass(**new_attrs) DBSession.add(self.obj) DBSession.flush() return self.obj except: DBSession.rollback() raise
def import_snmp(self, old_comm): if old_comm is None: comm = model.SnmpCommunity.by_name(u'None') if comm is not None: return comm.id try: (comm_ver, comm_data) = old_comm.split(':') except ValueError: pass else: comm_ver = int(comm_ver[1:]) display_name = unicode(old_comm.split('|')[0]) comm_fields = comm_data.split('|') comm_name = comm_fields[0] comm_id = DBSession.query(model.SnmpCommunity.id).\ select_from(model.SnmpCommunity).\ filter(and_( model.SnmpCommunity.community == comm_name, model.SnmpCommunity.version == comm_ver)).\ scalar() if comm_id is not None: return comm_id new_comm = model.SnmpCommunity() new_comm.display_name = display_name new_comm.version = comm_ver if comm_ver == 3: if comm_fields[1] == 'noAuthNoPriv': new_comm.set_v3auth_none() elif comm_fields[1] in ('authNoPriv', 'authPriv'): if comm_fields[2] == 'md5': new_comm.set_v3auth_md5(comm_name, comm_fields[3]) else: new_comm.set_v3auth_sha(comm_name, comm_fields[3]) if comm_fields[1] != 'authPriv' or comm_fields[5] == '': new_comm.set_v3privacy_none() elif comm_fields[4] == 'des': new_comm.set_v3privacy_des(comm_fields[5]) else: new_comm.set_v3privacy_aes(comm_fields[5]) else: new_comm.community = comm_name DBSession.add(new_comm) DBSession.flush() return new_comm.id return 1
def _discovery_found(self, host, atype_id, attribute): """ Autodiscovery has found a new attribute that is not stored in the database. """ if host.autodiscovery_policy.can_add(attribute): self.logger.debug('H:%d AT:%d New Interface Found: %s', host.id, atype_id, attribute.index) if host.autodiscovery_policy.permit_add: if self.print_only: self.logger.debug('H:%d AT:%d Added %s', host.id, atype_id, attribute.index) else: real_att = Attribute.from_discovered(host, attribute) DBSession.add(real_att) DBSession.flush() self.logger.debug('H:%d AT:%d Added %s = %d', host.id, atype_id, attribute.index, real_att.id)
def bulk_add(self, h, attribs): """ From a discovery phase, add the following attributes """ if tmpl_context.form_errors: self.process_form_errors() return {} host = Host.by_id(h) if host is None: return dict(errors='Unknown Host ID {}'.format(h)) old_att_id = None new_count = 0 decoded_attribs = json.loads(attribs) for vals in decoded_attribs: if old_att_id != vals['atype_id']: attribute_type = AttributeType.by_id(vals['atype_id']) if attribute_type is None: return dict(errors='Unknown Attribute Type ID {}'.format( vals['atype_id'])) if Attribute.discovered_exists(host.id, attribute_type.id, vals['id']): continue new_attribute = Attribute(host=host, attribute_type=attribute_type, display_name=vals['display_name'], index=vals['id']) try: admin_state = State(name=vals['admin_state']) except ValueError: new_attribute.admin_state = State.UNKNOWN else: new_attribute.admin_state = int(admin_state) new_attribute.state = EventState.by_name(vals['oper_state']) if new_attribute.state is None: new_attribute.state = EventState.get_up() for tag, value in vals['fields'].items(): new_attribute.set_field(tag, value) DBSession.add(new_attribute) new_count += 1 return dict(status='{} Attributes added'.format(new_count))
def bulk_add(self, h, attribs): """ From a discovery phase, add the following attributes """ if tmpl_context.form_errors: self.process_form_errors() return {} host = Host.by_id(h) if host is None: return dict(errors='Unknown Host ID {}'.format(h)) old_att_id = None new_count = 0 decoded_attribs = json.loads(attribs) for vals in decoded_attribs: if old_att_id != vals['atype_id']: attribute_type = AttributeType.by_id(vals['atype_id']) if attribute_type is None: return dict(errors='Unknown Attribute Type ID {}'. format(vals['atype_id'])) if Attribute.discovered_exists(host.id, attribute_type.id, vals['id']): continue new_attribute = Attribute( host=host, attribute_type=attribute_type, display_name=vals['display_name'], index=vals['id']) try: admin_state = State(name=vals['admin_state']) except ValueError: new_attribute.admin_state = State.UNKNOWN else: new_attribute.admin_state = int(admin_state) new_attribute.state = EventState.by_name(vals['oper_state']) if new_attribute.state is None: new_attribute.state = EventState.get_up() for tag, value in vals['fields'].items(): new_attribute.set_field(tag, value) DBSession.add(new_attribute) new_count += 1 return dict(status='{} Attributes added'.format(new_count))
def import_hostconfig(self): conf_count = 0 try: result = self.db_handle.execute("""SELECT date,host,config FROM hosts_config WHERE id > 1 ORDER BY id""") for row in result: conf = model.HostConfig() conf.created = datetime.datetime.fromtimestamp(row[0]) conf.host_id = self.host_id(row[1]) conf.config = row[2] DBSession.add(conf) DBSession.flush() except IntegrityError as errmsg: self.log.error('Error importing host configs: %s', errmsg) transaction.abort() return None else: self.log.info('Hosts Config: %d added.', conf_count) return []
def import_hostconfig(self): conf_count = 0 try: result = self.db_handle.execute( """SELECT date,host,config FROM hosts_config WHERE id > 1 ORDER BY id""") for row in result: conf = model.HostConfig() conf.created = datetime.datetime.fromtimestamp(row[0]) conf.host_id = self.host_id(row[1]) conf.config = row[2] DBSession.add(conf) DBSession.flush() except IntegrityError as errmsg: self.log.error('Error importing host configs: %s', errmsg) transaction.abort() return None else: self.log.info('Hosts Config: %d added.', conf_count) return []
def _discovery_found(self, host, atype_id, attribute): """ Autodiscovery has found a new attribute that is not stored in the database. """ if host.autodiscovery_policy.can_add(attribute): self.logger.debug('H:%d AT:%d New Interface Found: %s', host.id, atype_id, attribute.index) if host.autodiscovery_policy.permit_add: if self.print_only: self.logger.debug( 'H:%d AT:%d Added %s', host.id, atype_id, attribute.index) else: real_att = Attribute.from_discovered(host, attribute) DBSession.add(real_att) DBSession.flush() self.logger.debug('H:%d AT:%d Added %s = %d', host.id, atype_id, attribute.index, real_att.id)
def _discovery_validate(self, host, known_att, disc_att): """ Autodiscovery has found an known Attribute. If required this method will validate the fields to the latest values """ changed_fields = [] if not known_att.attribute_type.ad_validate: return if known_att.display_name !=\ disc_att.display_name[:known_att.display_name_len]: changed_fields.append( "Display Name to \"{0}\" was \"{1}\"".format( disc_att.display_name, known_att.display_name)) if self.print_only: known_att.display_name = disc_att.display_name tracked_fields = [(f.tag, f.display_name) for f in known_att.attribute_type.fields if f.tracked] for tag, fname in tracked_fields: known_value = known_att.get_field(tag) disc_value = disc_att.get_field(tag) if known_value is not None and disc_value is not None and\ known_value != disc_value: changed_info = "{0} to \"{1}\" was \"{2}\"".format( fname, disc_value, known_value) self.logger.debug("H:%d A:%d Changed Field: %s", known_att.host.id, known_att.id, changed_info) changed_fields.append(changed_info) if host.autodiscovery_policy.permit_modify and \ not self.print_only: known_att.set_field(tag, disc_value) if not self.print_only and changed_fields != []: new_event = Event.create_admin( host, known_att, 'detected modification'+(', '.join(changed_fields))) if new_event is not None: DBSession.add(new_event) new_event.process()
def import_host(self): add_count = 0 hosts = {} try: result = self.db_handle.execute( '''SELECT id,ip,name,rocommunity,rwcommunity,zone,tftp, autodiscovery,autodiscovery_default_customer,show_host, poll,creation_date,modification_date,last_poll_date, sysobjectid,config_type FROM hosts WHERE id>1 ORDER by id''') for row in result: host = model.Host(mgmt_address=row[1], display_name=row[2]) host.ro_community_id = self.import_snmp(row[3]) host.trap_community_id = host.ro_community_id host.rw_community_id = self.import_snmp(row[4]) host.zone_id = self.zone_id(row[5]) host.tftp_server = row[6] host.autodiscovery_policy_id = row[7] host.default_user_id = self.user_id(row[8]) host.show_host = (row[9] == 1) host.pollable = (row[10] == 1) host.created = datetime.datetime.fromtimestamp(row[11]) host.updated = datetime.datetime.fromtimestamp(row[12]) host.discovered = datetime.datetime.now() host.next_discover = host.discovered + \ datetime.timedelta(minutes=30) host.sysobjid = row[14] # host.config_backup_type_id = row[15] DBSession.add(host) DBSession.flush() hosts[row[0]] = host.id add_count += 1 except IntegrityError as errmsg: self.log.error('Error importing users: %s', errmsg) transaction.abort() return None else: self.log.info('Hosts: %d added.', add_count) return hosts
def analyze_attribute(self, attribute): """ Analyze the SLA against the given attribute """ try: sla = self.slas[attribute.sla_id] except KeyError: self.logger.error("A:%d - SLA id %d not found", attribute.id, attribute.sla_id) return sla_result, event_details = sla.analyze(self.logger, attribute) if sla_result is False: self.logger.debug('A%d: Final Result: False', attribute.id) elif sla_result is True: self.logger.debug('A%d: Final Result: True', attribute.id) new_event = Event.create_sla( attribute, sla.event_text, ', '.join(event_details)) if new_event is None: self.logger.error('A%d: Cannot create event', attribute.id) else: DBSession.add(new_event)
def analyze_attribute(self, attribute): """ Analyze the SLA against the given attribute """ try: sla = self.slas[attribute.sla_id] except KeyError: self.logger.error("A:%d - SLA id %d not found", attribute.id, attribute.sla_id) return sla_result, event_details = sla.analyze(self.logger, attribute) if sla_result is False: self.logger.debug('A%d: Final Result: False', attribute.id) elif sla_result is True: self.logger.debug('A%d: Final Result: True', attribute.id) new_event = Event.create_sla(attribute, sla.event_text, ', '.join(event_details)) if new_event is None: self.logger.error('A%d: Cannot create event', attribute.id) else: DBSession.add(new_event)
def _discovery_validate(self, host, known_att, disc_att): """ Autodiscovery has found an known Attribute. If required this method will validate the fields to the latest values """ changed_fields = [] if not known_att.attribute_type.ad_validate: return if known_att.display_name !=\ disc_att.display_name[:known_att.display_name_len]: changed_fields.append("Display Name to \"{0}\" was \"{1}\"".format( disc_att.display_name, known_att.display_name)) if self.print_only: known_att.display_name = disc_att.display_name tracked_fields = [(f.tag, f.display_name) for f in known_att.attribute_type.fields if f.tracked] for tag, fname in tracked_fields: known_value = known_att.get_field(tag) disc_value = disc_att.get_field(tag) if known_value is not None and disc_value is not None and\ known_value != disc_value: changed_info = "{0} to \"{1}\" was \"{2}\"".format( fname, disc_value, known_value) self.logger.debug("H:%d A:%d Changed Field: %s", known_att.host.id, known_att.id, changed_info) changed_fields.append(changed_info) if host.autodiscovery_policy.permit_modify and \ not self.print_only: known_att.set_field(tag, disc_value) if not self.print_only and changed_fields != []: new_event = Event.create_admin( host, known_att, 'detected modification' + (', '.join(changed_fields))) if new_event is not None: DBSession.add(new_event) new_event.process()
def import_zone(self): add_count = 0 zones = {} try: result = self.db_handle.execute( """SELECT id,zone,shortname,image,show_zone FROM zones WHERE id > 1 ORDER BY id""") for row in result: zone = model.Zone(display_name=unicode(row[1]), short_name=unicode(row[2]), icon=row[3]) zone.showable = (row[4] == 1) DBSession.add(zone) DBSession.flush() zones[row[0]] = zone.id add_count += 1 except IntegrityError as errmsg: self.log.error('Error importing zones: %s', errmsg) transaction.abort() exit() return None else: self.log.info('Zones: %d added.', add_count) return zones
def _run_event(self, poller_row, attribute, poller_result, always=False): """ Backend: event Raises an event if required. poller parameters: <event_type_tag>,[<default>],[<damp_time>] event_type_tag: tag used to find the correct EventType default: if poller_result is nothing use this string damp_time: time to wait before raising event poller_result: dictionary or (state,info) tuple state - optional display_name to match EventState model other items are copied into event fields """ event_type = EventType.by_tag(self.parameters[0]) if event_type is None: return "Tag \"{0}\" is not found in EventType table.".\ format(self.parameters[0]) try: default_input = self.parameters[1] except IndexError: default_input = '' try: damp_time = int(self.parameters[2]) except (IndexError, ValueError): damp_time = 1 if default_input == '': event_state_name = 'down' elif default_input != 'nothing': event_state_name = default_input event_fields = {} if type(poller_result) in (list, tuple): event_state_name = poller_result[0] try: event_fields['info'] = poller_result[1] except IndexError: event_fields['info'] = '' elif type(poller_result) is dict: try: event_state_name = poller_result['state'] except KeyError: pass event_fields = {k: v for k, v in poller_result.items() if k != 'state'} else: event_state_name = poller_result if event_state_name is None: return "Event state is None, nothing done" event_state = EventState.by_name(event_state_name) if event_state is None: return "Description \"{0}\" is not found in EventState table.".\ format(event_state_name) if always or self._backend_raise_event(attribute, event_type, event_state, damp_time): new_event = Event( event_type=event_type, attribute=attribute, event_state=event_state, field_list=event_fields) DBSession.add(new_event) return "Event added: {0}".format(new_event.id) else: return "Nothing was done"
def _run_event(self, poller_row, attribute, poller_result, always=False): """ Backend: event Raises an event if required. poller parameters: <event_type_tag>,[<default>],[<damp_time>] event_type_tag: tag used to find the correct EventType default: if poller_result is nothing use this string damp_time: time to wait before raising event poller_result: dictionary or (state,info) tuple state - optional display_name to match EventState model other items are copied into event fields """ event_type = EventType.by_tag(self.parameters[0]) if event_type is None: return "Tag \"{0}\" is not found in EventType table.".\ format(self.parameters[0]) try: default_input = self.parameters[1] except IndexError: default_input = '' try: damp_time = int(self.parameters[2]) except (IndexError, ValueError): damp_time = 1 if default_input == '': event_state_name = 'down' elif default_input != 'nothing': event_state_name = default_input event_fields = {} if type(poller_result) in (list, tuple): event_state_name = poller_result[0] try: event_fields['info'] = poller_result[1] except IndexError: event_fields['info'] = '' elif type(poller_result) is dict: try: event_state_name = poller_result['state'] except KeyError: pass event_fields = { k: v for k, v in poller_result.items() if k != 'state' } else: event_state_name = poller_result if event_state_name is None: return "Event state is None, nothing done" event_state = EventState.by_name(event_state_name) if event_state is None: return "Description \"{0}\" is not found in EventState table.".\ format(event_state_name) if always or self._backend_raise_event(attribute, event_type, event_state, damp_time): new_event = Event(event_type=event_type, attribute=attribute, event_state=event_state, field_list=event_fields) DBSession.add(new_event) return "Event added: {0}".format(new_event.id) else: return "Nothing was done"
def import_event(self): add_count = 0 events = {} try: result = self.db_handle.execute( '''SELECT e.id,e.date, e.host, e.interface, e.state, e.username, e.info, e.referer, e.ack, e.analized, et.description FROM events e, types et WHERE e.type = et.id AND date > adddate(now(),-7) ORDER by e.id LIMIT 300''') for row in result: event_type = model.EventType.by_name(unicode(row[10])) if event_type is None: event_type = model.EventType.by_id(1) self.log.warning( 'Event Type %s unable to be found by name.', row[10]) ev = model.Event(event_type) ev.host_id = self.host_id(row[2]) ev.acknowledged = (row[8] == 1) ev.processed = (row[9] == 1) ev.created = row[1] # Interface could either be referencing a real attribute or # a field called that ev.attribute = DBSession.query(model.Attribute).filter( model.Attribute.host_id == ev.host_id).filter( model.Attribute.display_name == unicode( row[3])).first() if ev.attribute is None: interface_field = model.EventField('interface', unicode(row[3])) ev.fields.append(interface_field) DBSession.add(interface_field) # Alarm can be a state, or just a field called 'state' ev.event_state = model.EventState.by_name(unicode(row[4])) if ev.event_state is None: state_field = model.EventField('state', unicode(row[4])) ev.fields.append(state_field) DBSession.add(state_field) # username and info are just fields if row[5] != '': username_field = model.EventField('user', unicode(row[5])) ev.fields.append(username_field) DBSession.add(username_field) if row[6] != '': info_field = model.EventField('info', unicode(row[6])) ev.fields.append(info_field) DBSession.add(info_field) model.DBSession.add(ev) model.DBSession.flush() events[row[0]] = ev.id add_count += 1 except IntegrityError as errmsg: self.log.error('Error importing events: %s', errmsg) transaction.abort() return None else: self.log.info('Events: %d added.', add_count) return events
def import_event(self): add_count = 0 events = {} try: result = self.db_handle.execute( '''SELECT e.id,e.date, e.host, e.interface, e.state, e.username, e.info, e.referer, e.ack, e.analized, et.description FROM events e, types et WHERE e.type = et.id AND date > adddate(now(),-7) ORDER by e.id LIMIT 300''') for row in result: event_type = model.EventType.by_name(unicode(row[10])) if event_type is None: event_type = model.EventType.by_id(1) self.log.warning( 'Event Type %s unable to be found by name.', row[10]) ev = model.Event(event_type) ev.host_id = self.host_id(row[2]) ev.acknowledged = (row[8] == 1) ev.processed = (row[9] == 1) ev.created = row[1] # Interface could either be referencing a real attribute or # a field called that ev.attribute = DBSession.query(model.Attribute).filter( model.Attribute.host_id == ev.host_id).filter( model.Attribute.display_name == unicode(row[3])).first() if ev.attribute is None: interface_field = model.EventField( 'interface', unicode(row[3])) ev.fields.append(interface_field) DBSession.add(interface_field) # Alarm can be a state, or just a field called 'state' ev.event_state = model.EventState.by_name(unicode(row[4])) if ev.event_state is None: state_field = model.EventField('state', unicode(row[4])) ev.fields.append(state_field) DBSession.add(state_field) # username and info are just fields if row[5] != '': username_field = model.EventField('user', unicode(row[5])) ev.fields.append(username_field) DBSession.add(username_field) if row[6] != '': info_field = model.EventField('info', unicode(row[6])) ev.fields.append(info_field) DBSession.add(info_field) model.DBSession.add(ev) model.DBSession.flush() events[row[0]] = ev.id add_count += 1 except IntegrityError as errmsg: self.log.error('Error importing events: %s', errmsg) transaction.abort() return None else: self.log.info('Events: %d added.', add_count) return events