def main():
    """
    main entry point
    """
    args = _parse_commandline()

    _initialize_logging(args.verbose)

    for address in [args.zmq_pub_socket_address, args.zmq_pull_socket_address]:
        if is_ipc_protocol(address):
            prepare_ipc_path(address)

    topic_bytes = args.topic.encode("utf-8")

    context = zmq.Context()

    pub_socket = context.socket(zmq.PUB)
    _log.info("binding pub_socket to {0}".format(args.zmq_pub_socket_address))
    pub_socket.bind(args.zmq_pub_socket_address)
    pub_socket.setsockopt(zmq.HWM, args.hwm)

    pull_socket = context.socket(zmq.PULL)
    _log.info("binding pull_socket to {0}".format(args.zmq_pull_socket_address))
    pull_socket.bind(args.zmq_pull_socket_address)

    poller = zmq.Poller()
    poller.register(pull_socket, zmq.POLLIN)

    halt_event = set_signal_handler()
    while not halt_event.is_set():

        try:
            result = dict(poller.poll())
        except zmq.ZMQError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise
        _log.debug("poller received {0}".format(result))

        if pull_socket in result and result[pull_socket] == zmq.POLLIN:

            # we expect a compressed header followed by a compressed body
            header = pull_socket.recv()
            assert pull_socket.rcvmore
            body = pull_socket.recv()
            assert not pull_socket.rcvmore

            # send out what we got in, preceded by the pub topic
            pub_socket.send(topic_bytes, zmq.SNDMORE)
            pub_socket.send(header, zmq.SNDMORE)
            pub_socket.send(body)

    _log.info("shutting down")
    pub_socket.close()
    pull_socket.close()
    context.term()
    return 0
示例#2
0
def main():
    """
    main entry point
    """
    returncode = 0
    args = _parse_commandline()
    context = zmq.Context()
    
    _initialize_logging()

    if is_ipc_protocol(args.data_source_address):
        prepare_ipc_path(args.data_source_address)

    pull_socket = context.socket(zmq.PULL)
    pull_socket.connect(args.data_source_address)

    halt_event = set_signal_handler()
    while not halt_event.is_set():

        try:
            line_number_bytes = pull_socket.recv()
        except zmq.ZMQError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise

        assert pull_socket.rcvmore
        line_number = int(line_number_bytes.decode("utf-8"))

        log_text_bytes = pull_socket.recv()
        assert not pull_socket.rcvmore

        # 2013-04-15 dougfort -- the logging StreamHandler really wants
        # ascii, and it goes into an endless exception loop if it
        # gets non ascii characters in utf-8
        try:
            log_text = log_text_bytes.decode("ascii")
        except Exception:
            log_text = "??? not ascii ???"
        
        _log.info("{0:08} {1}".format(line_number, log_text))

    pull_socket.close()
    context.term()

    return returncode
示例#3
0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    _initialize_logging(args.verbose)
    _log.debug("program starts")

    if is_ipc_protocol(args.push_socket_address):
        prepare_ipc_path(args.push_socket_address)

    context = zmq.Context()

    push_socket = context.socket(zmq.PUSH)
    _log.info("binding push socket to {0}".format(args.push_socket_address))
    push_socket.bind(args.push_socket_address)

    halt_event = set_signal_handler()
    line_count = 0
    _log.debug("opening {0}".format(args.source_path))
    with open(args.source_path, "r") as input_file:
        for line in input_file.readlines():
            halt_event.wait(args.interval)
            if halt_event.is_set():
                _log.warn("breaking read loop: halt_event is set")
                break
            line_count += 1
            try:
                push_socket.send(str(line_count).encode("utf-8"), zmq.SNDMORE)
                push_socket.send(line[:-1].encode("utf-8"))
            except zmq.ZMQError:
                instance = sys.exc_info()[1]
                if is_interrupted_system_call(instance) and halt_event.is_set():
                    pass
                else:
                    raise

    _log.debug("shutting down: published {0} lines".format(line_count))
    push_socket.close()
    context.term()
    return 0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    zmq_context = zmq.Context()
    log_line_pusher = LogLinePusher(zmq_context, args.log_path)

    halt_event = set_signal_handler()
    while not halt_event.is_set():
        try:
            line = sys.stdin.readline()
        except IOError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise 
        log_line_pusher.push_log_line(line[:-1])

    log_line_pusher.close()
    zmq_context.term()
    return 0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    if not os.path.isdir(args.log_dir):
        os.makedirs(args.log_dir)

    _initialize_log(args)
    log = logging.getLogger("main")
    log.info("program starts")

    header_filters = _create_header_filters(args)
    body_filter = _create_body_filter(args)

    if is_ipc_protocol(args.zmq_sub_address):
        prepare_ipc_path(args.zmq_sub_address)

    identity_bytes = args.zmq_identity.encode("utf-8")

    context = zmq.Context()

    sub_socket = context.socket(zmq.SUB)
    sub_socket.connect(args.zmq_sub_address)
    sub_socket.setsockopt(zmq.IDENTITY, identity_bytes)
    sub_socket.setsockopt(zmq.SUBSCRIBE, "".encode("utf-8"))

    log_handlers = dict()
    halt_event = set_signal_handler()
    while not halt_event.is_set():

        try:
            header, body = _get_one_message(sub_socket)
        except zmq.ZMQError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise
        log.debug("received {0}".format(header))

        if not all([f(header) for f in header_filters]):
            log.debug("header does not pass filters {0}".format(header))
            continue

        if not body_filter(body):
            log.debug("body does not pass filters {0}".format(body))
            continue

        log_filename = _compute_log_filename(args, header)
        if not log_filename in log_handlers:
            log_handlers[log_filename] = _create_log_handler(args, 
                                                             log_filename)
        log_handler = log_handlers[log_filename]
        log_record = DummyLogRecord(body)
        log_handler.emit(log_record)
        log_handler.flush()

    log.info("program shutting down")
    sub_socket.close()
    context.term()

    for log_handler in log_handlers.values():
        log_handler.close()

    return 0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    _initialize_logging(args.verbose)

    sub_address_list = _load_sub_list(args.sub_list_path)
    for address in sub_address_list:
        if is_ipc_protocol(address):
            prepare_ipc_path(address)

    for directory in [args.output_work_dir, args.output_complete_dir, ]:
        if not os.path.isdir(directory):
            _log.info("creating {0}".format(directory))
            os.makedirs(directory) 

    context = zmq.Context()

    poller = zmq.Poller()

    sub_socket_list = list()
    for sub_socket_address in sub_address_list:
        sub_socket = context.socket(zmq.SUB)
        sub_socket.setsockopt(zmq.SUBSCRIBE, "".encode("utf-8"))
        _log.info("connecting sub_socket to {0}".format(sub_socket_address))
        sub_socket.connect(sub_socket_address)
        poller.register(sub_socket, zmq.POLLIN)
        sub_socket_list.append(sub_socket)

    stream_writer = LogStreamWriter(args.output_prefix,
                                    args.output_suffix,
                                    args.granularity,
                                    args.output_work_dir,
                                    args.output_complete_dir)

    halt_event = set_signal_handler()
    polling_intgerval_milliseconds = args.polling_interval * 1000
    while not halt_event.is_set():

        try:
            result_list = poller.poll(polling_intgerval_milliseconds)
        except zmq.ZMQError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise

        if len(result_list) == 0:
            stream_writer.check_for_rollover()
            continue

        for sub_socket, event in result_list: 
            assert event == zmq.POLLIN, event

            # we expect topic, compressed header, compressed body
            _topic = sub_socket.recv()
            assert sub_socket.rcvmore
            compressed_header = sub_socket.recv()
            assert sub_socket.rcvmore
            compressed_data = sub_socket.recv()
            assert not sub_socket.rcvmore

            # send out what we got in
            header = zlib.decompress(compressed_header)
            data = zlib.decompress(compressed_data)

            stream_writer.write(header, data)

    _log.debug("shutting down")
    for sub_socket in sub_socket_list:
        sub_socket.close()
    context.term()
    return 0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    _initialize_logging(args.log_path)
    log = logging.getLogger("main")
    log.info("program starts")

    config = _load_config_file(args.config_path)

    # config is a dict with keys ['global', <node-name-1>...<node-name-n>]
    # not necessarily in order
    node_names = list(config.keys())
    node_names.remove("global")
    node_names.sort()

    _constuct_aggregate_sub_socket_file(config, node_names)

    processes = list()

    # we start the processes from back to front

    # optional wormhole to nimbus.io
    if "directory_wormhole" in config["global"]:
        process = _start_directory_wormhole(args.directory_wormhole_path,
                                            config["global"])
        processes.append(process)

    # log stream writer 
    process = _start_zmq_log_stream_writer(args.old_log_inn_path,
                                           config["global"])
    processes.append(process)

    # the aggregator
    process = _start_zmq_subscription_aggregator(args.old_log_inn_path,
                                                 config["global"])
    processes.append(process)

    # optional file logger
    if "zmq_log_file_logger" in config["global"]:
        process = \
            _start_zmq_log_file_logger(
                args.old_log_inn_path,
                config["global"]["host_name"],
                config["global"]["aggregate_pub_socket_address"],
                config["global"]["zmq_log_file_logger"])
        processes.append(process)

    data_source_address = \
        config["global"]["data_source"]["push_socket_address"]

    for node_name in node_names:
        process = _start_zmq_push_pub_forwarder(args.old_log_inn_path,
                                                node_name, 
                                                config[node_name])
        processes.append(process)

        if "log_spewers" in config[node_name]:
            for log_spewer_config in config[node_name]["log_spewers"]:
                process = _start_log_spewer(args.old_log_inn_path,
                                            node_name, 
                                            config[node_name],
                                            log_spewer_config,
                                            data_source_address)
                processes.append(process)

        if "stdout_spewers" in config[node_name]:
            for stdout_spewer_config in config[node_name]["stdout_spewers"]:
                # start a stdout spewer pipelined to a stdin_to_zmq_log_proxy
                spewer_process = \
                    _start_stdout_spewer(args.old_log_inn_path,
                                         node_name, 
                                         stdout_spewer_config,
                                         data_source_address)
                proxy_process = \
                    _start_stdin_to_zmq_log_proxy(args.old_log_inn_path,
                                                  node_name, 
                                                  config[node_name],
                                                  stdout_spewer_config,
                                                  spewer_process.stdout)
                # The p1.stdout.close() call after starting the p2 is important 
                # in order for p1 to receive a SIGPIPE if p2 exits before p1.
                spewer_process.stdout.close()

                processes.append(spewer_process)
                processes.append(proxy_process)

    # start the data source last, hoping everyone has had time to hook up
    process = _start_data_source(args.old_log_inn_path,
                                 config["global"]["data_source"])
    processes.append(process)

    data_source_finished = False
    halt_event = set_signal_handler()
    start_time = time.time()
    while not halt_event.is_set():
        elapsed_time = int(time.time() - start_time)
        if elapsed_time > args.duration:
            log.info("time expired {0} seconds: stopping test".format(
                     elapsed_time))
            halt_event.set()
            break
        terminated_subprocesses = _poll_processes(processes)

        # if the data source is done, we are done
        if "data_source" in terminated_subprocesses:
            log.info("data_source has terminated")
            data_source_finished = True            

        halt_event.wait(5.0)

    log.info("shutting down")
    if not data_source_finished:
        log.warn("data source has not finished")
        
    for process in processes:
        if not process.active:
            log.warn("node {0} process {1} already terminated {2}".format(
                     process.node_name,
                     process.program_name,
                     process.returncode))
        else:
            log.debug("terminating node {0} process {1}".format(
                      process.node_name,
                      process.program_name))
            process.terminate()
            _check_process_termination(process)

    log.info("program terminates with return code 0")
    return 0
def main():
    """
    main entry point
    """
    args = _parse_commandline()

    _initialize_logging(args.verbose)

    sub_address_list = _load_sub_list(args.sub_list_path)
    for address in sub_address_list:
        if is_ipc_protocol(address):
            prepare_ipc_path(address)

    if is_ipc_protocol(args.zmq_pub_socket_address):
        prepare_ipc_path(args.zmq_pub_socket_address)

    context = zmq.Context()

    pub_socket = context.socket(zmq.PUB)
    _log.info("binding pub socket to {0}".format(args.zmq_pub_socket_address))
    pub_socket.bind(args.zmq_pub_socket_address)
    pub_socket.setsockopt(zmq.HWM, args.hwm)

    poller = zmq.Poller()

    sub_socket_list = list()
    for sub_socket_address in sub_address_list:
        sub_socket = context.socket(zmq.SUB)
        sub_socket.setsockopt(zmq.SUBSCRIBE, "".encode("utf-8"))
        _log.info("connecting sub_socket to {0}".format(sub_socket_address))
        sub_socket.connect(sub_socket_address)
        poller.register(sub_socket, zmq.POLLIN)
        sub_socket_list.append(sub_socket)

    halt_event = set_signal_handler()
    while not halt_event.is_set():

        try:
            result_list = poller.poll()
        except zmq.ZMQError:
            instance = sys.exc_info()[1]
            if instance.errno == errno.EINTR and halt_event.is_set():
                break
            raise

        for sub_socket, event in result_list: 
            assert event == zmq.POLLIN, event

            _log.debug("traffic on socket {0}".format(sub_socket))

            # we expect topic, compressed header, compressed body
            topic = sub_socket.recv()
            assert sub_socket.rcvmore
            header = sub_socket.recv()
            assert sub_socket.rcvmore
            body = sub_socket.recv()
            assert not sub_socket.rcvmore

            # send out what we got in
            pub_socket.send(topic, zmq.SNDMORE)
            pub_socket.send(header, zmq.SNDMORE)
            pub_socket.send(body)

    _log.debug("shutting down")
    pub_socket.close()
    for sub_socket in sub_socket_list:
        sub_socket.close()
    context.term()
    return 0