def __init__(self, retention=0, dca_min_length=250, logging_level=logging.INFO, cache=True, **kwargs): super(manager, self).__init__() self.logger = logging.getLogger('manager') self.logger.setLevel(logging_level) self.storage = get_storage(account=Account(user="******", group="root")) # Store self.store = store(logging_level=self.logger.level, **kwargs) self.dca_min_length = dca_min_length # Seconds self.retention = retention # Cache self.cache = cache self.cache_max_size = 5000 self.cache_size = 0 self.md5_cache = {} self.fields_map = { 'retention': ('r', self.retention), 'type': ('t', 'GAUGE'), 'unit': ('u', None), 'min': ('mi', None), 'max': ('ma', None), 'thd_warn': ('tw', None), 'thd_crit': ('tc', None) }
def __init__(self, *args, **kwargs): super(IndexesModule, self).__init__(*args, **kwargs) self.storage = get_storage( account=Account(user='******', group='root'), namespace='object' )
def __init__(self, config, logger, amqp_pub): self.config = config self.logger = logger self.amqp_pub = amqp_pub server = self.config.get('server', {}) self.debug = server.get('debug', DEFAULT_DEBUG) self.enable_crossdomain_send_events = server.get( 'enable_crossdomain_send_events', DEFAULT_ECSE) self.root_directory = os.path.expanduser( server.get('root_directory', DEFAULT_ROOT_DIR)) auth = self.config.get('auth', {}) self.providers = cfg_to_array(auth.get('providers', '')) if len(self.providers) == 0: self.logger.critical( 'Missing providers. Cannot launch webcore module.') raise RuntimeError('Missing providers') session = self.config.get('session', {}) self.cookie_expires = int(session.get('cookie_expires', DEFAULT_COOKIES_EXPIRE)) self.secret = session.get('secret', DEFAULT_SECRET) self.data_dir = session.get('data_dir', DEFAULT_DATA_DIR) self.webservices = self.config.get('webservices', {}) # TODO: Replace with MongoStorage self.db = get_storage(account=Account(user='******', group='root')) self.stopping = False self.webmodules = {} self.auth_backends = {}
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.clean_collection = {} collections_to_clean = ['events', 'events_log'] for collection in collections_to_clean: self.clean_collection[collection] = get_storage( collection, account=Account(user='******') ).get_backend() self.object = get_storage( 'object', account=Account(user='******') ).get_backend()
def pre_run(self): self.storage = get_storage( namespace='object', account=Account(user="******", group="root") ) self.reload_selectors() self.beat()
def __init__( self, namespace, confnamespace='object', storage=None, autolog=False, *args, **kwargs ): super(Archiver, self).__init__(*args, **kwargs) self.namespace = namespace self.namespace_log = namespace + '_log' # Bulk operation configuration self.last_bulk_insert_date = time() self.bulk_ids = [] # How many events can be buffered self.bulk_amount = 500 # What is the maximum duration until bulk insert self.bulk_delay = 3 self.incoming_events = {} self.autolog = autolog self.logger.debug(u"Init Archiver on %s" % namespace) self.account = Account(user="******", group="root") if not storage: self.logger.debug(" + Get storage") self.storage = get_storage( namespace=namespace, logging_level=self.log_lvl ) else: self.storage = storage self.conf_storage = get_storage( namespace=confnamespace, logging_level=self.log_lvl ) self.conf_collection = self.conf_storage.get_backend(confnamespace) self.collection = self.storage.get_backend(namespace) self.amqp = Amqp( logging_level=self.log_lvl, logging_name='archiver-amqp' ) self.reset_stealthy_event_duration = time() self.reset_stats()
def get_collection(self, collection): if collection not in self.clean_collection: self.clean_collection[collection] = get_storage( collection, account=Account(user='******') ) return self.clean_collection[collection].get_backend()
def __init__(self, *args, **kwargs): super(IndexesModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) self.storage = get_storage( account=Account(user='******', group='root'), namespace='object' )
def __init__(self, storage=None): if not storage: storage = get_storage( namespace='object', account=Account(user="******", group="root")) self.storage = storage self.backend = storage.get_backend('downtime')
def __init__( self, namespace, confnamespace='object', storage=None, autolog=False, amqp_pub=None, *args, **kwargs ): super(Archiver, self).__init__() self.namespace = namespace self.namespace_log = namespace + '_log' # Bulk operation configuration self.last_bulk_insert_date = time() self.bulk_ids = [] # How many events can be buffered self.bulk_amount = 500 # What is the maximum duration until bulk insert self.bulk_delay = 3 self.incoming_events = {} self.autolog = autolog self.logger.debug("Init Archiver on %s" % namespace) self.account = Account(user="******", group="root") if not storage: self.logger.debug(" + Get storage") self.storage = get_storage( namespace=namespace, logging_level=self.log_lvl ) else: self.storage = storage self.conf_storage = get_storage( namespace=confnamespace, logging_level=self.log_lvl ) self.conf_collection = self.conf_storage.get_backend(confnamespace) self.collection = self.storage.get_backend(namespace) if amqp_pub is None: self.amqp_pub = AmqpPublisher( get_default_amqp_connection(), self.logger) self.reset_stealthy_event_duration = time() self.reset_stats()
def init_callback(): log('Init plugin') from canopsis.old.storage import get_storage from canopsis.old.account import Account global storage root = Account(user="******", group="root") storage = get_storage(account=root, namespace='object')
def __init__(self, acknowledge_on='canopsis.events', *args, **kargs): super(engine, self).__init__(*args, **kargs) account = Account(user="******", group="root") self.storage = get_storage(namespace='ack', account=account) self.events_collection = self.storage.get_backend('events') self.stbackend = self.storage.get_backend('ack') self.objects_backend = self.storage.get_backend('object') self.acknowledge_on = acknowledge_on
def __init__(self, json_path=None, *args, **kwargs): super(JSONLoaderModule, self).__init__(*args, **kwargs) if json_path is not None: self.json_path = json_path self.storage = get_storage( account=Account(user='******', group='root'), namespace='object' )
def __init__(self, *args, **kargs): super(engine, self).__init__(*args, **kargs) account = Account(user="******", group="root") self.storage = get_storage(logging_level=self.logging_level, account=account) self.derogations = [] self.name = kargs['name'] self.drop_event_count = 0 self.pass_event_count = 0
def __init__(self, *args, **kargs): super(engine, self).__init__(*args, **kargs) self.selectors = [] self.thd_warn_sec_per_evt = 1.5 self.thd_crit_sec_per_evt = 2 self.storage = get_storage( namespace='object', account=Account(user="******", group="root") )
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) account = Account(user="******", group="root") self.storage = get_storage(namespace='downtime', account=account) self.dt_backend = self.storage.get_backend('downtime') self.evt_backend = self.storage.get_backend('events') self.cdowntime = Downtime(storage=self.storage) self.cdowntime.reload(delta_beat=self.beat_interval) self.beat()
def __init__(self, *args, **kwargs): super(WebServer, self).__init__(*args, **kwargs) self.log_name = 'webserver' # TODO: Replace with MongoStorage self.db = get_storage(account=Account(user='******', group='root')) self.amqp = Amqp() self.stopping = False self.webmodules = {} self.auth_backends = {}
def init(): from canopsis.old.account import Account from canopsis.old.storage import get_storage storage = get_storage(account=Account(user="******", group="root"), namespace='object') for collection in collections_indexes: storage.get_backend(collection).drop_indexes() for index in collections_indexes[collection]: storage.get_backend(collection).ensure_index(index) logger.info(" + {} Indexe(s) recreated for collection {}".format(len(collections_indexes[collection]), collection))
def __init__(self, *args, **kargs): super(engine, self).__init__(*args, **kargs) self.storage = get_storage( namespace='events', account=Account( user="******", group="root" ) ) self.manager = PerfData() self.perf_data = PerfDataUtils()
def _init(app): """ For each configured webservice, run exports_v3 if function exists. Expected configuration: [webservices] wsname=0|1 other_wsname=0|1 0: skip webservice 1: load webservice """ logfile_handler = logging.FileHandler( os.path.join(root_path, 'var/log/webserver.log')) app.logger.addHandler(logfile_handler) app.logger.setLevel(logging.INFO) configuration = os.path.join(root_path, 'etc/webserver.conf') conf = Configuration.load(configuration, Ini) webservices = conf.get('webservices') from beaker.middleware import SessionMiddleware from flask.sessions import SessionInterface from canopsis.old.account import Account from canopsis.old.storage import get_storage db = get_storage(account=Account(user='******', group='root')) cfg_session = conf.get('session', {}) session_opts = { 'session.type': 'mongodb', 'session.cookie_expires': int(cfg_session.get('cookie_expires', 300)), 'session.url': '{0}.beaker'.format(db.uri), 'session.secret': cfg_session.get('secret', 'canopsis'), 'session.lock_dir': cfg_session.get('data_dir'), } class BeakerSessionInterface(SessionInterface): def open_session(self, app, request): return request.environ['beaker.session'] def save_session(self, app, session, response): session.save() app.wsgi_app = SessionMiddleware(app.wsgi_app, session_opts) app.session_interface = BeakerSessionInterface() api = Api(app) _auto_import(app, api, webservices) return app, api
def __init__(self, json_path=None, *args, **kwargs): super(JSONLoaderModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) if json_path is not None: self.json_path = json_path else: self.json_path = os.path.expanduser(DEFAULT_JSON_PATH) self.storage = get_storage(account=Account(user='******', group='root'), namespace='object')
def _init(app): """ For each configured webservice, run exports_v3 if function exists. Expected configuration: [webservices] wsname=0|1 other_wsname=0|1 0: skip webservice 1: load webservice """ logfile_handler = logging.FileHandler(os.path.join(root_path, 'var/log/webserver.log')) app.logger.addHandler(logfile_handler) app.logger.setLevel(logging.INFO) configuration = os.path.join(root_path, 'etc/webserver.conf') conf = Configuration.load(configuration, Ini) webservices = conf.get('webservices') from beaker.middleware import SessionMiddleware from flask.sessions import SessionInterface from canopsis.old.account import Account from canopsis.old.storage import get_storage db = get_storage(account=Account(user='******', group='root')) cfg_session = conf.get('session', {}) session_opts = { 'session.type': 'mongodb', 'session.cookie_expires': int(cfg_session.get('cookie_expires', 300)), 'session.url': '{0}.beaker'.format(db.uri), 'session.secret': cfg_session.get('secret', 'canopsis'), 'session.lock_dir': cfg_session.get('data_dir'), } class BeakerSessionInterface(SessionInterface): def open_session(self, app, request): return request.environ['beaker.session'] def save_session(self, app, session, response): session.save() app.wsgi_app = SessionMiddleware(app.wsgi_app, session_opts) app.session_interface = BeakerSessionInterface() api = Api(app) _auto_import(app, api, webservices) return app, api
def __init__(self, engine, name, *args, **kwargs): super(Engine.Lock, self).__init__() self.name = name self.lock_id = '{0}.{1}'.format(engine.etype, name) self.storage = get_storage( namespace='lock', logging_level=engine.logging_level, account=Account(user='******', group='root')).get_backend() self.engine = engine self.lock = {}
def __init__(self, engine, name, *args, **kwargs): super(Engine.Lock, self).__init__() self.name = name self.lock_id = '{0}.{1}'.format(engine.etype, name) self.storage = get_storage( namespace='lock', logging_level=engine.logging_level, account=Account(user='******', group='root') ).get_backend() self.engine = engine self.lock = {}
def __init__(self, json_path=None, *args, **kwargs): super(JSONLoaderModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) if json_path is not None: self.json_path = json_path else: self.json_path = os.path.expanduser(DEFAULT_JSON_PATH) self.storage = get_storage( account=Account(user='******', group='root'), namespace='object' )
def storage_connection(self, namespace): ''' Get the cconnection to canopsis DataBase. :param namespace: specify teh collection name. :return: a connection. :rtype: Mongo Connection. ''' connection = get_storage( namespace=namespace, account=lambda: self.root_account ).get_backend() return connection
def is_component_problem(event): if event.get('resource', False) and event['state'] != 0: storage = get_storage( namespace='entities', account=Account(user='******', group='root')).get_backend() component = storage.find_one({ 'type': 'component', 'name': event['component'] }) if component and 'state' in component and component['state'] != 0: return True return False
def is_component_problem(event): if event.get(Event.RESOURCE, '') and event['state'] != 0: storage = get_storage(namespace='entities', account=Account(user='******', group='root')).get_backend() component = storage.find_one({ 'type': 'component', 'name': event[Event.COMPONENT] }) if component and 'state' in component and component['state'] != 0: return True return False
def setUp(self): self.storage = get_storage( namespace='object', account=Account(user="******", group="root")) self.ack = self.storage.get_backend('ack') self.event = { "connector": "unit-test", "connector_name": "canopsis", "event_type": "check", "source_type": "resource", "component": "ack-test-event", "resource": "error-test-event", "state": 1, "state_type": 1, "output": "ERROR-UNITTEST", } self.rk = get_rk(self.event)
def is_host_acknowledged(event): if is_component_problem(event): storage = get_storage( namespace='entities', account=Account(user='******', group='root')).get_backend() ack = storage.find_one({ 'type': 'ack', 'component': event['component'], 'resource': None }) if ack: return True return False
def is_host_acknowledged(event): if is_component_problem(event): storage = get_storage(namespace='entities', account=Account(user='******', group='root')).get_backend() ack = storage.find_one({ 'type': 'ack', 'component': event[Event.COMPONENT], 'resource': None }) if ack: return True return False
def pre_run(self): # Load crecords from database self.storage = get_storage( namespace='object', account=Account( user="******", group="root" )) self.backend = self.storage.get_backend('object') self.logger.info('Release crecrord dispatcher lock') self.Lock.release('load_crecords', self.backend) self.ha_engine_triggers = {} self.beat()
def storage_connection(self, namespace): ''' Get the cconnection to canopsis DataBase. :param namespace: specify teh collection name. :return: a connection. :rtype: Mongo Connection. ''' connection = get_storage( namespace=namespace, account=Account( user="******", group="root" ) ).get_backend() return connection
def setUpClass(cls): cls.logger = logging.getLogger('TF_Archiver') cls.logger.setLevel(LOGGING_LEVEL) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(LOGGING_LEVEL) stdout_handler.setFormatter( logging.Formatter( '%(asctime)s [%(name)s] [%(levelname)s] %(message)s' ) ) cls.logger.addHandler(stdout_handler) cls.logger.debug(u' + Init TF_Archiver on {}'.format(NAMESPACE)) cls.account = Account(user='******', group='root') cls.logger.debug(u' + Get storage') cls.storage = get_storage(namespace=NAMESPACE, logging_level=LOGGING_LEVEL) cls.collection = cls.storage.get_backend('events') cls.default_conf = cls.collection.find( {'crecord_type': 'statusmanagement'}, namespace='object' ) if cls.default_conf.count(): cls.default_conf = cls.default_conf[0] else: cls.default_conf = { '_id': 'statusmanagement', 'crecord_type': 'statusmanagement', 'restore_event': True, 'bagot_time': 3600, 'bagot_freq': 10, 'stealthy_time': 300, 'stealthy_show': 300 } cls.amqp = Amqp(logging_level=LOGGING_LEVEL, logging_name='Amqp')
def get(schema_id): """ Get schema from its ID. Will look in database if the schema isn't loaded in cache. :param schema_id: Schema identifier (value of _id field in Mongo document). :type schema_id: str :returns: schema field of Mongo document. """ if schema_id not in cache: db = get_storage('schemas', account=Account(user='******', group='root')).get_backend() doc = db.find_one(schema_id) del db if not doc: raise NoSchemaError(schema_id) cache[schema_id] = doc['schema'] return cache[schema_id]
from canopsis.pbehavior.manager import PBehaviorManager from canopsis.task.core import register_task from canopsis.event import Event from canopsis.old.account import Account from canopsis.old.storage import get_storage from datetime import datetime, timedelta from icalendar import Event as vEvent ctxmgr = Context() #: default context manager pbmgr = PBehaviorManager() #: default pbehavior manager events = get_storage( namespace='events', account=Account(user='******', group='root') ).get_backend() DOWNTIME = 'downtime' #: downtime pbehavior value DOWNTIME_QUERY = PBehaviorManager.get_query(behaviors=DOWNTIME) @register_task def event_processing( engine, event, context=None, manager=None, logger=None, **kwargs ): """Process input event. :param dict event: event to process. :param Engine engine: engine which consumes the event.
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.store = get_storage('object', account=Account(user='******'))
import logging import traceback logging.basicConfig( format=r"%(asctime)s [%(process)d] [%(name)s] [%(levelname)s] %(message)s", datefmt=r"%Y-%m-%d %H:%M:%S", level=logging.DEBUG) import wsgi_webserver from webtest import TestApp from canopsis.old.account import Account from canopsis.old.storage import get_storage storage = get_storage(namespace='object') user = '******' pwd = 'root' shadow = Account().make_shadow('root') crypted = Account().make_tmp_cryptedKey(shadow=shadow) authkey = storage.get('account.%s' % user, account=Account(user='******')).data['authkey'] app = TestApp(wsgi_webserver.app) def quit(code=0): wsgi_webserver.unload_webservices() sys.exit(code)
def pre_run(self): self.storage = get_storage( namespace='events', account=Account(user="******", group="root"))
def __init__(self, *args, **kwargs): super(IndexesModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) self.storage = get_storage(account=Account(user='******', group='root'), namespace='object')