def __init__(self, connString=None, perf='no', batch='no', mysql_engine=None, **kw): """Init object @type connString: string @param connString: SQLAlchemy connection string - REQUIRED """ BaseAnalyzer.__init__(self, **kw) _kw = { } if connString is None: raise ValueError("connString is required") dialect = dsn_dialect(connString) _kw[dialect] = { } if dialect == 'mysql': # mySQL-specific options if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine # This mixin adds a class member "self.session" after initialization. # This is the session handler that the code logic uses for queries # and other DB interaction. The arg "initializeToPegasusDB" is # a function from the stampede_schema module. try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB, **_kw) except exc.OperationalError, e: self.log.error('init', msg='Connection String %s %s' % (connString ,ErrorStrings.get_init_error(e))) raise RuntimeError
def __init__(self, connString=None, perf='no', batch='no', mysql_engine=None, **kw): """Init object @type connString: string @param connString: SQLAlchemy connection string - REQUIRED """ BaseAnalyzer.__init__(self, **kw) _kw = {} if connString is None: raise ValueError("connString is required") dialect = dsn_dialect(connString) _kw[dialect] = {} if dialect == 'mysql': # mySQL-specific options if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine # This mixin adds a class member "self.session" after initialization. # This is the session handler that the code logic uses for queries # and other DB interaction. The arg "initializeToDashboardDB" is # a function from the stampede_schema module. try: SQLAlchemyInit.__init__(self, connString, initializeToDashboardDB, **_kw) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError
def finish(self): BaseAnalyzer.finish(self) if self._batch: self.log.info('finish', msg='Executing final flush') self.hard_flush() self.disconnect() if self._perf: run_time = time.time() - self._start_time self.log.info("performance", insert_time=self._insert_time, insert_num=self._insert_num, total_time=run_time, run_time_delta=run_time - self._insert_time, mean_time=self._insert_time / self._insert_num)
def __init__(self, host="localhost", port=27017, database='application', collection='netlogger', indices="", datetime='yes', intvals="", floatvals="", event_filter="", user="", password="", batch=0, perf=None, **kw): BaseAnalyzer.__init__(self, _validate=True, **kw) # map for converting values self._convert = {} # mongo database and collection self.db_name, self.coll_name = database, collection # connect try: self.connection = pymongo.Connection(host=host, port=port) except ConnectionFailure: raise ConnectionException("Couldn't connect to DB " "at %s:%d" % (host, port)) # create/use database, by retrieving it if self._dbg: self.log.debug("init.database_name", value=self.db_name) self.database = self.connection[self.db_name] # if authentication is on, use it if user != "": success = self.database.authenticate(user, password) if not success: raise ConnectionException( "Could not authenticate to " "database=%s, collection=%s as user '%s'" % (self.db_name, self.coll_name, user)) # create/use collection, by retrieving it if self._dbg: self.log.debug("init.collection_name", value=self.coll_name) self.collection = self.database[self.coll_name] # ensure indexes are set index_fields = indices.split(",") for field in index_fields: field = field.strip() if not field or field == "^": continue if self._dbg: self.log.debug("init.index", value=field) if field[0] == '^': unique = True field = field[1:] else: unique = False self.collection.ensure_index(field, unique=unique) # datetime flag self._datetime = util.as_bool(datetime) # Add numeric values to conversion map if intvals.strip(): self._convert.update(dict.fromkeys(intvals.split(','), int)) if floatvals.strip(): self._convert.update(dict.fromkeys(floatvals.split(','), float)) # filter, if given self._event_re = None if event_filter: self._event_re = re.compile(event_filter) # batch, if requested if batch: self._batch = int(batch) self._curbatch = [] self._batchlen = 0 else: self._batch = 0