from datetime import timedelta from multiprocessing import Process import falcon from meniscus.api.version.resources import VersionResource from meniscus import env from meniscus.personas.common import publish_stats from meniscus.queue import celery from meniscus.sinks import hdfs _LOG = env.get_logger(__name__) def start_up(): application = api = falcon.API() api.add_route('/', VersionResource()) celery.conf.CELERYBEAT_SCHEDULE = { 'hdfs': { 'task': 'hdfs.send', 'schedule': timedelta(seconds=hdfs.FREQUENCY) }, 'worker_stats': { 'task': 'stats.publish', 'schedule': timedelta(seconds=publish_stats.WORKER_STATUS_INTERVAL) }, } #include blank argument to celery in order for beat to start correctly
def test_should_get_logger(self): logger = env.get_logger('meniscus.env_test') self.assertIsNotNone(logger)
from meniscus import env from meniscus.correlation import correlator from meniscus.storage import dispatch from meniscus import transport from meniscus.normalization.normalizer import * _LOG = env.get_logger(__name__) class CorrelationInputServer(transport.ZeroMQInputServer): def process_msg(self): msg = self._get_msg() try: #Queue the message for correlation correlator.correlate_syslog_message.delay(msg) except Exception: _LOG.exception('unable to place persist_message task on queue') def new_correlation_input_server(): """ Create a correlation input server for receiving json messages form the syslog parser of ZeroMQ """ zmq_receiver = transport.new_zmq_receiver() return CorrelationInputServer(zmq_receiver)