Ejemplo n.º 1
0
def _run_saver(db, message_queue):
    if not (dispatchers.repository.DispatcherRepository.get_repo().has_dispatcher(message_queue) and
            dispatchers.repository.ConsumerRepository.get_repo().has_consumer(message_queue)):
        click.echo(f"Error: no dispatcher/consumer pair for {message_queue.scheme}")
        return -1

    database = _get_db_or_die(db)

    saver_repo = repository.Repository.get()
    tees = []
    for saver in saver_repo.handlers():
        # TODO: consider creating a single consumer for all save topics that can route to correct saver.
        with logging.log_exception(module_logger, to_suppress=(RuntimeError, Exception),
                                   format=lambda x: f"Error running saver service {db}: {x}"):
            consumer = dispatchers.get_topic_consumer(
                configuration.get_parsed_data_topic_name(saver.target),
                message_queue
            )

            tee = dispatchers.tee.Tee(consumer, db_dispatcher.DBDispatcher(database))
            runner = PluginRunner(repository.Repository.get(), json.loads, json.dumps)
            runner.run_with_tee(saver.target, tee)
            tees.append(tee)
    with suppress(KeyboardInterrupt):
        while True:
            time.sleep(1)
    module_logger.info("Stopping all tees...")
    for tee in tees:
        tee.stop()
Ejemplo n.º 2
0
def test_log_exception_does_log_and_suppresses(capsys):
    logger = logging.get_logger("name1")
    exception_message = "This is a message"
    with logging.log_exception(logger, to_suppress=(Exception, )):
        raise Exception(exception_message)
    output = capsys.readouterr().out
    assert exception_message in output
    assert 'ERROR' in output
Ejemplo n.º 3
0
def parse(name, path):
    with logging.log_exception(
            module_logger,
            format=lambda x: f"Error running parser {name}: {x}"):
        runner = PluginRunner(repository.Repository.get(),
                              snapshot_xcoder.snapshot_decoder)
        out = runner.run(name, pathlib.Path(path).read_bytes())
        click.echo(json.dumps(str(out)))
Ejemplo n.º 4
0
def test_log_exception_does_log(capsys):
    logger = logging.get_logger("name")
    exception_message = "This is a message"
    with pytest.raises(Exception):
        with logging.log_exception(logger):
            raise Exception(exception_message)
    output = capsys.readouterr().out
    assert exception_message in output
    assert 'ERROR' in output
Ejemplo n.º 5
0
def test_log_exception_does_not_log_on_ignore(capsys):
    logger = logging.get_logger("name2")
    typeerror_message = "this is a TypeError"
    with pytest.raises(TypeError):
        with logging.log_exception(logger, to_ignore=(TypeError, )):
            raise TypeError(typeerror_message)
    output = capsys.readouterr().out
    assert typeerror_message not in output
    assert 'ERROR' not in output
Ejemplo n.º 6
0
def run_server_with_url(host,
                        port,
                        publish_url,
                        run_threaded=False,
                        encoder=None):
    publisher = None
    with logging.log_exception(logger=module_logger,
                               format="Could not find publisher"):
        publisher = dispatchers.repository.DispatcherRepository.get_repo(
        ).get_dispatcher(
            publish_url,
            configuration.get_config()[
                configuration.CONFIG_SERVER_PUBLISH_TOPICS])

    module_logger.info(f"got publisher {publisher}")

    with logging.log_exception(logger=module_logger,
                               to_suppress=(Exception, )):
        _run_server(host, port, publisher)
Ejemplo n.º 7
0
 def get_handler(self, target):
     """
     returns a single parser whose target matches the request
     """
     with log_exception(self._logger,
                        to_suppress=(IndexError, ),
                        format=f"No hadnler for target {target}"):
         target_parsers = ([
             i for i in self.handlers() if i.target == target
         ])
         out = target_parsers.pop(0)
         self._logger.debug(f"got handler {out} for {target}")
         return out
Ejemplo n.º 8
0
def snapshot_decoder(message_string):
    d = loads(message_string)
    with log_exception(
            logger,
            to_suppress=(
                FileNotFoundError,
                KeyError,
            ),
            format=lambda x:
            f"Could not read MessageRecord: {repr(x)}, parsed_message: {d}"):
        with MessageRecord.open(d['snapshot']) as mr:
            d['snapshot'] = mr.read()
            return d

    return None
Ejemplo n.º 9
0
def _run_all_parsers(url):
    repo = repository.Repository.get()
    waiting = []
    for parser in repo.handlers():
        t = threading.Thread(target=run_parser,
                             args=(parser.target, url),
                             kwargs=dict(blocking=False))
        t.start()
        waiting.append(t)

    with logging.log_exception(module_logger,
                               to_suppress=(KeyboardInterrupt, ),
                               format=lambda x: f"stopping all parsers..."):
        while True:
            time.sleep(1)
Ejemplo n.º 10
0
def run_parser(name, url, blocking=True):
    if not (dispatchers.repository.DispatcherRepository.get_repo(
    ).has_dispatcher(url) and dispatchers.repository.ConsumerRepository.
            get_repo().has_consumer(url)):
        module_logger.error(
            f"Error: no dispatcher/consumer pair for {url.scheme}")
        return -1

    with logging.log_exception(
            module_logger,
            to_suppress=(RuntimeError, Exception),
            format=lambda x: f"Error running parser {name}: {x}"):
        runner = PluginRunner(repository.Repository.get(),
                              snapshot_xcoder.snapshot_decoder, json.dumps)
        runner.run_with_uri(name, uri=url)
Ejemplo n.º 11
0
    def _consume_indefinitely(self, topic, record):
        # assumes that the channel is connected and ready at this point
        while True:
            with log_exception(
                    self._logger,
                    to_suppress=(pika.exceptions.StreamLostError, ),
                    format=lambda e:
                    f"thread {threading.current_thread()} lost stream, reconnecting"
            ):
                self._logger.info(f"start consuming {topic}")
                record.channel.start_consuming()

            record.channel = self._make_channel(
                topic,
                handler=record.callback,
                message_decoder=record.message_decoder)
Ejemplo n.º 12
0
    def _run_consumer(self, topic):
        with self._io_list_lock:
            record = self.handlers.get(topic, None)
        if record is None:
            self._logger.error(f"handler for {topic} is none")

        else:

            with log_exception(
                    self._logger,
                    to_suppress=(Exception, RuntimeError),
                    format=lambda e:
                    f"thread {threading.current_thread()} exception while consuming: {e}"
            ):
                self._consume_indefinitely(topic, record)
        with self._io_list_lock:
            self.handlers.pop(topic)
            if not len(self.handlers):
                self._running = False
Ejemplo n.º 13
0
def run_server(host, port, database):
    with log_exception(logger, to_suppress=(ValueError, Exception)):
        run_api_server(host=host, port=port, database_url=database)
Ejemplo n.º 14
0
def saver(name, path, database):
    db = _get_db_or_die(database)
    saver = functools.partial(repository.get_saver(name).handler, db)
    with logging.log_exception(module_logger, format=lambda x: f"Error running saver {name}: {x}"):
            saver(json.loads(pathlib.Path(path).read_bytes()))
Ejemplo n.º 15
0
def run_server_cli(host, port, publish_url):
    with logging.log_exception(logging.get_module_logger(__file__),
                               to_suppress=(Exception, )):
        server.run_server_with_url(host or "127.0.0.1", port or 8080,
                                   publish_url)