Exemple #1
0
def create_data_loader(meta_configs):
    """
    create a data loader with default event_writer, job_scheudler
    """

    import splunktalib.event_writer as ew
    import splunktalib.schedule.scheduler as sched

    writer = ew.EventWriter()
    scheduler = sched.Scheduler()
    loader = GlobalDataLoader.get_data_loader(meta_configs, scheduler, writer)
    return loader
Exemple #2
0
def create_data_loader():
    """
    create a data loader with default event_writer, job_scheudler
    """

    from splunktalib import event_writer as ew
    from splunktalib.schedule import scheduler as sched

    writer = ew.EventWriter()
    scheduler = sched.Scheduler()
    loader = GlobalDataLoader.get_data_loader(scheduler, writer)
    return loader
Exemple #3
0
    def start(self):
        if self._started:
            return
        self._started = True

        self._timer_queue.start()

        process_safe = self._use_multiprocess()
        logger.info("Use multiprocessing=%s", process_safe)

        event_writer = ew.EventWriter(process_safe=process_safe)
        event_writer.start()

        tear_down_q = self._create_tear_down_queue(process_safe)

        loaders = []
        for task in self._task_configs:
            task[c.data_loader] = event_writer
            loader = KafkaConcurrentDataLoader(task, tear_down_q, process_safe)
            loader.start()
            loaders.append(loader)

        logger.info("KafkaDataLoaderManager started")
        _wait_for_tear_down(self._wakeup_queue, None)
        logger.info("KafkaDataLoaderManager got stop signal")

        for loader in loaders:
            logger.info("Notify loader=%s", loader.name)
            loader.stop()

        event_writer.tear_down()
        self._timer_queue.tear_down()

        if self._mgr is not None:
            self._mgr.shutdown()

        logger.info("KafkaDataLoaderManager stopped")