def setUp(self): self.anonymous_account = Account() self.root_account = Account(user="******", group="root") self.user_account = Account(user="******", group="capensis") self.data = { 'mydata1': 'data1', 'mydata2': 'data2', 'mydata3': 'data3'}
def __init__(self, config, logger, amqp_pub): self.config = config self.logger = logger self.amqp_pub = amqp_pub server = self.config.get('server', {}) self.debug = server.get('debug', DEFAULT_DEBUG) self.enable_crossdomain_send_events = server.get( 'enable_crossdomain_send_events', DEFAULT_ECSE) self.root_directory = os.path.expanduser( server.get('root_directory', DEFAULT_ROOT_DIR)) auth = self.config.get('auth', {}) self.providers = cfg_to_array(auth.get('providers', '')) if len(self.providers) == 0: self.logger.critical( 'Missing providers. Cannot launch webcore module.') raise RuntimeError('Missing providers') session = self.config.get('session', {}) self.cookie_expires = int(session.get('cookie_expires', DEFAULT_COOKIES_EXPIRE)) self.secret = session.get('secret', DEFAULT_SECRET) self.data_dir = session.get('data_dir', DEFAULT_DATA_DIR) self.webservices = self.config.get('webservices', {}) # TODO: Replace with MongoStorage self.db = get_storage(account=Account(user='******', group='root')) self.stopping = False self.webmodules = {} self.auth_backends = {}
def handle_task(self, job): user = job.get('user', 'root') group = job.get('group', 'root') mail = job.get('sender', None) account = Account(user=user, group=group, mail=mail) recipients = job.get('recipients', None) subject = ensure_unicode(job.get('subject', '')) body = ensure_unicode(job.get('body', '')) attachments = job.get('attachments', None) smtp_host = job.get('smtp_host', 'localhost') smtp_port = job.get('smtp_port', 25) html = job.get('html', False) template_data = job.get('jobctx', {}) body = Template(body)(template_data) subject = Template(subject)(template_data) if not html: h = HTMLParser() body = h.unescape(body) subject = h.unescape(subject) # Execute the task return self.sendmail(account, recipients, subject, body, attachments, smtp_host, smtp_port, html)
def mock_get_account(_id=None): if not _id: _id = 'account.anonymous' len_prefix = len('account.') user = _id[len_prefix:] return Account(user=user)
def login_plain_ldap(): storage.remove('account.toto', account=Account(user='******')) get('/auth/toto/aqzsedrftg123;', status=[200]) logout() get('/auth/toto/tata', status=[403]) get('/auth/toto/aqzsedrftg123;', status=[200]) logout()
def get_collection(self, collection): if collection not in self.clean_collection: self.clean_collection[collection] = get_storage( collection, account=Account(user='******') ) return self.clean_collection[collection].get_backend()
def get_storage(namespace='object', account=None, logging_level=logging.INFO): global STORAGES if namespace not in STORAGES: if not account: account = Account() STORAGES[namespace] = Storage(account, namespace=namespace, logging_level=logging_level) return STORAGES[namespace]
def __init__(self, acknowledge_on='canopsis.events', *args, **kargs): super(engine, self).__init__(*args, **kargs) account = Account(user="******", group="root") self.storage = get_storage(namespace='ack', account=account) self.events_collection = self.storage.get_backend('events') self.stbackend = self.storage.get_backend('ack') self.objects_backend = self.storage.get_backend('object') self.acknowledge_on = acknowledge_on
def test_03_Passwd(self): ACCOUNT = Account(user="******", group="capensis") passwd = 'root' ACCOUNT.passwd(passwd) shadow = ACCOUNT.make_shadow(passwd) if not ACCOUNT.check_shadowpasswd(shadow): raise Exception('Invalid shadow passwd ... (%s)' % shadow) if not ACCOUNT.check_passwd(passwd): raise Exception('Invalid passwd ... (%s)' % passwd) cryptedKey = ACCOUNT.make_tmp_cryptedKey() if not ACCOUNT.check_tmp_cryptedKey(cryptedKey): raise Exception('Invalid cryptedKey ... (%s)' % authkey)
def __init__(self, json_path=None, *args, **kwargs): super(JSONLoaderModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) if json_path is not None: self.json_path = json_path else: self.json_path = os.path.expanduser(DEFAULT_JSON_PATH) self.storage = get_storage(account=Account(user='******', group='root'), namespace='object')
def _init(app): """ For each configured webservice, run exports_v3 if function exists. Expected configuration: [webservices] wsname=0|1 other_wsname=0|1 0: skip webservice 1: load webservice """ logfile_handler = logging.FileHandler( os.path.join(root_path, 'var/log/webserver.log')) app.logger.addHandler(logfile_handler) app.logger.setLevel(logging.INFO) configuration = os.path.join(root_path, 'etc/webserver.conf') conf = Configuration.load(configuration, Ini) webservices = conf.get('webservices') from beaker.middleware import SessionMiddleware from flask.sessions import SessionInterface from canopsis.old.account import Account from canopsis.old.storage import get_storage db = get_storage(account=Account(user='******', group='root')) cfg_session = conf.get('session', {}) session_opts = { 'session.type': 'mongodb', 'session.cookie_expires': int(cfg_session.get('cookie_expires', 300)), 'session.url': '{0}.beaker'.format(db.uri), 'session.secret': cfg_session.get('secret', 'canopsis'), 'session.lock_dir': cfg_session.get('data_dir'), } class BeakerSessionInterface(SessionInterface): def open_session(self, app, request): return request.environ['beaker.session'] def save_session(self, app, session, response): session.save() app.wsgi_app = SessionMiddleware(app.wsgi_app, session_opts) app.session_interface = BeakerSessionInterface() api = Api(app) _auto_import(app, api, webservices) return app, api
def init(): from canopsis.old.account import Account from canopsis.old.storage import get_storage storage = get_storage(account=Account(user="******", group="root"), namespace='object') for collection in collections_indexes: storage.get_backend(collection).drop_indexes() for index in collections_indexes[collection]: storage.get_backend(collection).ensure_index(index) logger.info(" + {} Indexe(s) recreated for collection {}".format(len(collections_indexes[collection]), collection))
def __init__(self, engine, name, *args, **kwargs): super(Engine.Lock, self).__init__() self.name = name self.lock_id = '{0}.{1}'.format(engine.etype, name) self.storage = get_storage( namespace='lock', logging_level=engine.logging_level, account=Account(user='******', group='root')).get_backend() self.engine = engine self.lock = {}
def __init__(self, collections=None, *args, **kwargs): super(PurgeModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) self.config = Configuration.load(PurgeModule.CONF_PATH, Json) conf = self.config.get(self.CATEGORY, {}) self.storage = Storage(account=Account(user='******', group='root')) if collections is not None: self.collections = collections else: self.collections = conf.get('collections', DEFAULT_COLLECTIONS)
def test_03_Passwd(self): ACCOUNT = Account(user="******", group="capensis") passwd = 'root' ACCOUNT.passwd(passwd) shadow = ACCOUNT.make_shadow(passwd) if not ACCOUNT.check_shadowpasswd(shadow): raise Exception('Invalid shadow passwd ... (%s)' % shadow ) if not ACCOUNT.check_passwd(passwd): raise Exception('Invalid passwd ... (%s)' % passwd) cryptedKey = ACCOUNT.make_tmp_cryptedKey() if not ACCOUNT.check_tmp_cryptedKey(cryptedKey): raise Exception('Invalid cryptedKey ... (%s)' % authkey)
def is_component_problem(event): if event.get(Event.RESOURCE, '') and event['state'] != 0: storage = get_storage(namespace='entities', account=Account(user='******', group='root')).get_backend() component = storage.find_one({ 'type': 'component', 'name': event[Event.COMPONENT] }) if component and 'state' in component and component['state'] != 0: return True return False
def is_host_acknowledged(event): if is_component_problem(event): storage = get_storage(namespace='entities', account=Account(user='******', group='root')).get_backend() ack = storage.find_one({ 'type': 'ack', 'component': event[Event.COMPONENT], 'resource': None }) if ack: return True return False
def __init__( self, namespace, confnamespace='object', storage=None, autolog=False, amqp_pub=None, *args, **kwargs ): super(Archiver, self).__init__() self.namespace = namespace self.namespace_log = namespace + '_log' # Bulk operation configuration self.last_bulk_insert_date = time() self.bulk_ids = [] # How many events can be buffered self.bulk_amount = 500 # What is the maximum duration until bulk insert self.bulk_delay = 3 self.incoming_events = {} self.autolog = autolog self.logger.debug("Init Archiver on %s" % namespace) self.account = Account(user="******", group="root") if not storage: self.logger.debug(" + Get storage") self.storage = get_storage( namespace=namespace, logging_level=self.log_lvl ) else: self.storage = storage self.conf_storage = get_storage( namespace=confnamespace, logging_level=self.log_lvl ) self.conf_collection = self.conf_storage.get_backend(confnamespace) self.collection = self.storage.get_backend(namespace) if amqp_pub is None: self.amqp_pub = AmqpPublisher( get_default_amqp_connection(), self.logger) self.reset_stealthy_event_duration = time() self.reset_stats()
def get(schema_id): """ Get schema from its ID. Will look in database if the schema isn't loaded in cache. :param schema_id: Schema identifier (value of _id field in Mongo document). :type schema_id: str :returns: schema field of Mongo document. """ if schema_id not in cache: db = get_storage('schemas', account=Account(user='******', group='root')).get_backend() doc = db.find_one(schema_id) del db if not doc: raise NoSchemaError(schema_id) cache[schema_id] = doc['schema'] return cache[schema_id]
def __init__(self, *args, **kwargs): super(engine, self).__init__(*args, **kwargs) self.store = get_storage('object', account=Account(user='******'))
def test_09_Remove(self): # Anonymous cant remove account self.assertRaises(ValueError, STORAGE.remove, ACCOUNT, Account()) # But root can ;) STORAGE.remove(ACCOUNT)
def test_05_Store(self): ACCOUNT = Account(user="******", group="capensis") STORAGE.put(ACCOUNT)
def test_04_authkey(self): ACCOUNT = Account(user="******", group="capensis") authkey = ACCOUNT.get_authkey() if not authkey: raise Exception('Invalid authkey ... (%s)' % authkey)
def pre_run(self): self.storage = get_storage( namespace='events', account=Account(user="******", group="root"))
def test_01_Init(self): user_account = Account(user="******", group="capensis")
raise Exception('Invalid cryptedKey ... (%s)' % authkey) def test_04_authkey(self): ACCOUNT = Account(user="******", group="capensis") authkey = ACCOUNT.get_authkey() if not authkey: raise Exception('Invalid authkey ... (%s)' % authkey) def test_05_Store(self): ACCOUNT = Account(user="******", group="capensis") STORAGE.put(ACCOUNT) def test_09_Remove(self): # Anonymous cant remove account self.assertRaises(ValueError, STORAGE.remove, ACCOUNT, Account()) # But root can ;) STORAGE.remove(ACCOUNT) def test_99_DropNamespace(self): STORAGE.drop_namespace('unittest') if __name__ == "__main__": STORAGE = Storage(Account(user="******", group="root"), namespace='unittest') output = root_path + "/tmp/tests_report" unittest.main(testRunner=xmlrunner.XMLTestRunner(output=output), verbosity=3)
def __init__(self, *args, **kwargs): super(IndexesModule, self).__init__(*args, **kwargs) self.logger = Logger.get('migrationmodule', MigrationModule.LOG_PATH) self.storage = get_storage(account=Account(user='******', group='root'), namespace='object')
logging.basicConfig( format=r"%(asctime)s [%(process)d] [%(name)s] [%(levelname)s] %(message)s", datefmt=r"%Y-%m-%d %H:%M:%S", level=logging.DEBUG) import wsgi_webserver from webtest import TestApp from canopsis.old.account import Account from canopsis.old.storage import get_storage storage = get_storage(namespace='object') user = '******' pwd = 'root' shadow = Account().make_shadow('root') crypted = Account().make_tmp_cryptedKey(shadow=shadow) authkey = storage.get('account.%s' % user, account=Account(user='******')).data['authkey'] app = TestApp(wsgi_webserver.app) def quit(code=0): wsgi_webserver.unload_webservices() sys.exit(code) def get(uri, args={}, status=None, params=None): print("Get %s" % uri) resp = app.get(uri, status=status, params=params)
def __init__( self, account, namespace='object', logging_level=logging.ERROR, mongo_uri=None, mongo_host="127.0.0.1", mongo_port=27017, mongo_userid=None, mongo_password=None, mongo_db='canopsis', mongo_autoconnect=True, groups=[], mongo_safe=True, *args, **kwargs ): super(Storage, self).__init__(*args, **kwargs) self.logger = logging.getLogger('Storage') self.logger.setLevel(logging_level) try: self.mongo_uri = CONFIG.get('master', 'db_uri') except ConfigParser.Error: self.mongo_uri = mongo_uri try: self.mongo_host = CONFIG.get("master", "host") except ConfigParser.Error: self.mongo_host = mongo_host try: self.mongo_port = CONFIG.getint("master", "port") except ConfigParser.Error: self.mongo_port = mongo_port try: self.mongo_db = CONFIG.get("master", "db") except ConfigParser.Error: self.mongo_db = mongo_db try: self.mongo_userid = CONFIG.get("master", "userid") except ConfigParser.Error: self.mongo_userid = mongo_userid try: self.mongo_password = CONFIG.get("master", "password") except ConfigParser.Error: self.mongo_password = mongo_password try: self.fetch_limit = int(CONFIG.get("master", "fetch_limit")) except ConfigParser.Error: self.fetch_limit = 10000 try: self.no_count_limit = int(CONFIG.get("master", "no_count_limit")) except ConfigParser.Error: self.no_count_limit = 200000 self.mongo_safe = mongo_safe self.account = account self.root_account = Account(user="******", group="root") self.namespace = namespace self.backend = None self.gridfs_namespace = "binaries" self.logger.debug("Object initialised.") self.backend = {} self.connected = False if mongo_autoconnect: self.connect()