예제 #1
0
def test_logging5():
    reset_unittests()
    set_config(standard_logging_redirect=False)
    x = logging.getLogger()
    x.info("foobar")
    assert UNIT_TESTS_STDERR == []
    assert UNIT_TESTS_JSON == []
    assert UNIT_TESTS_STDOUT == []
예제 #2
0
def test_json_only_keys1():
    reset_unittests()
    set_config(json_only_keys=["extra_context_key1", "extra_context_key2"])
    x = get_logger("foo.bar")
    x = x.bind(k1=1, k2="bar")
    x.info("foo", k1=2, k3=2)
    assert UNIT_TESTS_STDERR == []
    assert UNIT_TESTS_JSON == []
    _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "foo", "{k1=2 k2=bar k3=2}")
예제 #3
0
def test_extra_context():
    reset_unittests()
    set_config(extra_context_func=extra_context)
    x = get_logger("foo.bar")
    x = x.bind(k1=1, k2="bar")
    x.info("foo", k1=2, k3=2)
    assert UNIT_TESTS_STDERR == []
    assert UNIT_TESTS_JSON == []
    _test_stdxxx(
        UNIT_TESTS_STDOUT, "INFO", "foo",
        "{extra_context_key1=extra_context_value1 "
        "extra_context_key2=extra_context_value2 k1=2 k2=bar k3=2}")
def main():
    parser = argparse.ArgumentParser(description=DESCRIPTION)
    parser.add_argument("--transform-func",
                        default="jsonlog2elasticsearch.no_transform",
                        help="python function to transform each json line")
    parser.add_argument("--debug",
                        action="store_true",
                        help="force debug mode")
    parser.add_argument("--index-override-func",
                        default="no",
                        help="python function to override the ES_INDEX value")
    parser.add_argument("ES_HOST", help="ES hostname/ip")
    parser.add_argument("ES_PORT", help="ES port", type=int)
    parser.add_argument("ES_INDEX", help="ES index name", type=str)
    parser.add_argument("LOG_PATH", help="json log file fullpath")
    args = parser.parse_args()
    if args.debug:
        set_config(minimal_level="DEBUG")
    if not os.path.isfile(args.LOG_PATH):
        touch(args.LOG_PATH)
    transform_func = get_transform_func(args.transform_func)
    if args.index_override_func == "no":
        index_func = functools.partial(default_index_func, args.ES_INDEX)
    else:
        index_func = get_index_func(args.index_override_func)
    pygtail.Pygtail._is_new_file = patched_is_new_file
    pygtail.Pygtail._update_offset_file = patched_update_offset_file
    pt = pygtail.Pygtail(filename=args.LOG_PATH, read_from_end=True)
    es = elasticsearch.Elasticsearch(hosts=[{
        "host": args.ES_HOST,
        "port": args.ES_PORT,
        "use_ssl": False
    }])
    signal.signal(signal.SIGTERM, signal_handler)
    LOG.info("started")
    while RUNNING:
        while True:
            try:
                line = pt.next()
            except StopIteration:
                break
            except FileNotFoundError:
                touch(args.LOG_PATH)
                break
            else:
                if process(line, transform_func, index_func):
                    commit(es)
        commit(es, True)
        LOG.debug("sleeping %i seconds..." % SLEEP_AFTER_EACH_ITERATION)
        time.sleep(SLEEP_AFTER_EACH_ITERATION)
    LOG.info("exited")
예제 #5
0
 def __init__(
     self,
     services_to_add: List[Service] = [],
     port: int = 0,
     bind_host: str = "127.0.0.1",
     log_configure_logger: bool = True,
     log_minimal_level: str = "INFO",
     log_fancy_output: Optional[bool] = None,
 ):
     self.manager: Manager = Manager()
     self.__wait_task = None
     self.services_to_add = services_to_add
     self.__shutdown_task = None
     self.port = port
     self.bind_host = bind_host
     self.log_minimal_level = log_minimal_level
     self.log_fancy_output = log_fancy_output
     self.log_configure_logger = log_configure_logger
     if self.log_configure_logger:
         mflog.set_config(fancy_output=self.log_fancy_output,
                          minimal_level=self.log_minimal_level)
     self.logger = mflog.get_logger("alwaysup.daemon")
예제 #6
0
        except Exception:
            return "default", "default"

    app.add_middleware(
        RateLimitMiddleware,
        authenticate=auth_function,
        backend=RedisBackend(
            host=get_settings().ratelimit_redis_host,
            port=get_settings().ratelimit_redis_port,
        ),
        config={
            r"^/login/check": [Rule(minute=20)],
        },
    )
templates = Jinja2Templates(directory=os.path.join(DIR, "templates"))
mflog.set_config()


@cache
def get_auth_controller() -> AuthenticationController:
    auth_backend = make_auth_backend()
    return AuthenticationController(auth_backend)


def check_redirect_url(url: str):
    parsed_url = urlparse(url)
    if parsed_url.netloc != "":
        if not parsed_url.netloc.endswith("." + get_settings().domain):
            raise HTTPException(
                400,
                "the redirection url: %s is not in the %s domain" %
예제 #7
0
USE_I18N = True

USE_L10N = True

USE_TZ = False


# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/

STATIC_URL = '/static/'

# ADDED BY METWORK/MFSERV/DJANGO PLUGIN TEMPLATE
# TO PROVIDE PREFIX BASED ROUTING
STATIC_URL = '/apicvf_django/apicvf_django/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "apicvf_django/static")

# ADDED BY METWORK/MFSERV/DJANGO PLUGIN TEMPLATE
# TO PROVIDE DEBUG FEATURE
DEBUG = (os.environ.get('MFSERV_CURRENT_PLUGIN_DEBUG', '0') == '1')
import mflog
if DEBUG:
    mflog.set_config(minimal_level="DEBUG")
else:
    mflog.set_config()

ALLOWED_HOSTS = ['localhost', '127.0.0.1', '[::1]']

if not LOCAL_VM:
    ALLOWED_HOSTS.append('apicf31-sidev')
예제 #8
0
def main():
    global SYSLOG_THREAD, DISCARDED
    parser = argparse.ArgumentParser(description=DESCRIPTION)
    parser.add_argument("--transform-func",
                        default="jsonsyslog2elasticsearch.no_transform",
                        help="python function to transform each json line")
    parser.add_argument("--debug",
                        action="store_true",
                        help="force debug mode")
    parser.add_argument("--syslog-port",
                        type=int,
                        default=5144,
                        help="UDP port to listen for syslog messages "
                        "(warning: the default value: 5144 is not the "
                        "default syslog port)")
    parser.add_argument("--syslog-host",
                        type=str,
                        default="0.0.0.0",
                        help="can be use to bind to a specific interface "
                        "(default: 0.0.0.0 => all)")
    parser.add_argument(
        "--internal-queue-size",
        type=int,
        default=10000,
        help="internal queue maximum size (in messages) between syslog "
        "receiver and elasticsearch upload (default: 10000)")
    parser.add_argument(
        "--internal-queue-timeout",
        type=int,
        default=3,
        help="if the internal queue is full during this interval (in seconds "
        ", default to 3), then we will drop some messages ")
    parser.add_argument("--index-override-func",
                        default="no",
                        help="python function to override the ES_INDEX value")
    parser.add_argument("ES_HOST", help="ES hostname/ip")
    parser.add_argument("ES_PORT", help="ES port", type=int)
    parser.add_argument("ES_INDEX", help="ES index name", type=str)
    args = parser.parse_args()
    if args.debug:
        set_config(minimal_level="DEBUG")
    silent_elasticsearch_logger()
    transform_func = get_transform_func(args.transform_func)
    if args.index_override_func == "no":
        index_func = functools.partial(default_index_func, args.ES_INDEX)
    else:
        index_func = get_index_func(args.index_override_func)
    es = elasticsearch.Elasticsearch(hosts=[{
        "host": args.ES_HOST,
        "port": args.ES_PORT,
        "use_ssl": False
    }])
    signal.signal(signal.SIGTERM, signal_handler)
    q = queue.Queue(maxsize=args.internal_queue_size)
    SYSLOG_THREAD = SysLogUDPReceiverThread(args.syslog_host, args.syslog_port,
                                            q, args.internal_queue_timeout)
    SYSLOG_THREAD.start()
    last_queue_size_log = time.time()
    while RUNNING:
        now = time.time()
        if (now - last_queue_size_log) > LOG_QUEUE_SIZE_EVERY:
            size = q.qsize()
            if size > 0:
                LOG.info("Internal queue size: %i" % size)
            if DISCARDED > 0:
                LOG.warning(
                    "%i lines were discarded because of full queue during "
                    "last %i seconds" % (DISCARDED, LOG_QUEUE_SIZE_EVERY))
                DISCARDED = 0
            last_queue_size_log = now
        try:
            item = q.get(block=True, timeout=1)
        except Exception:
            commit(es, True)
            continue
        f = item.find('{')
        if f == -1:
            LOG.warning("can't find { char in read message: %s => ignoring",
                        item)
            continue
        json_string = item[f:]
        if process(json_string, transform_func, index_func):
            commit(es)
예제 #9
0
 def __init__(self, app, raise_exception=False, debug=False):
     self.app = app
     self.raise_exception = raise_exception
     self.debug = debug
     if self.debug:
         mflog.set_config("DEBUG")