def create_event_writer(config, process_safe):
    use_hec = scutil.is_true(config.get(tac.use_hec))
    use_raw_hec = scutil.is_true(config.get(tac.use_raw_hec))
    if use_hec or use_raw_hec:
        # if use hec, leave each worker process/thread to create event writer
        event_writer = None
    else:
        event_writer = ew.create_event_writer(config, process_safe)
        event_writer.start()
    return event_writer
示例#2
0
def create_data_loader_mgr(config):
    """
    create a data loader with default event_writer, job_scheudler
    """

    import splunktalib.event_writer as ew
    import splunktalib.schedule.scheduler as sched

    writer = ew.create_event_writer(
        config, scutil.is_true(config.get("use_multiprocess")))
    scheduler = sched.Scheduler()
    loader_mgr = DataLoaderManager(config, scheduler, writer)
    return loader_mgr
示例#3
0
def create_data_loader_mgr(config):
    """
    create a data loader with default event_writer, job_scheudler
    """

    import splunktalib.event_writer as ew
    import splunktalib.schedule.scheduler as sched

    writer = ew.create_event_writer(
        config, scutil.is_true(config.get("use_multiprocess")))
    scheduler = sched.Scheduler()
    loader_mgr = DataLoaderManager(config, scheduler, writer)
    return loader_mgr
示例#4
0
    def start(self):
        if self._started:
            return
        self._started = True

        self._timer_queue.start()

        process_safe = self._use_multiprocess()
        logger.info("Use multiprocessing=%s", process_safe)

        event_writer = ew.create_event_writer(self._task_configs[0],
                                              process_safe)
        event_writer.start()

        tear_down_q = self._create_tear_down_queue(process_safe)

        loaders = []
        for task in self._task_configs:
            task[ggc.event_writer] = event_writer
            loader = GoogleConcurrentDataLoader(task, tear_down_q,
                                                process_safe)
            loader.start()
            loaders.append(loader)

        logger.info("GoogleDataLoaderManager started")
        _wait_for_tear_down(self._wakeup_queue, None)
        logger.info("GoogleDataLoaderManager got stop signal")

        for loader in loaders:
            logger.info("Notify loader=%s", loader.name)
            loader.stop()

        event_writer.tear_down()
        self._timer_queue.tear_down()

        if self._mgr is not None:
            self._mgr.shutdown()

        logger.info("GoogleDataLoaderManager stopped")
    def start(self):
        if self._started:
            return
        self._started = True

        self._timer_queue.start()

        process_safe = self._use_multiprocess()
        logger.info("Use multiprocessing=%s", process_safe)

        event_writer = ew.create_event_writer(
            self._task_configs[0], process_safe)
        event_writer.start()

        tear_down_q = self._create_tear_down_queue(process_safe)

        loaders = []
        for task in self._task_configs:
            task[ggc.event_writer] = event_writer
            loader = GoogleConcurrentDataLoader(
                task, tear_down_q, process_safe)
            loader.start()
            loaders.append(loader)

        logger.info("GoogleDataLoaderManager started")
        _wait_for_tear_down(self._wakeup_queue, None)
        logger.info("GoogleDataLoaderManager got stop signal")

        for loader in loaders:
            logger.info("Notify loader=%s", loader.name)
            loader.stop()

        event_writer.tear_down()
        self._timer_queue.tear_down()

        if self._mgr is not None:
            self._mgr.shutdown()

        logger.info("GoogleDataLoaderManager stopped")