Exemplo n.º 1
0
def config_logger(*logger_names, log_dir=BASE_LOG_FOLDER):
    """
    Used to allow isolated parts of this project to easily change the log output folder, e.g. allow Django
    management commands to change the logs folder to ``crons/``

    Currently only used by :class:`payments.management.CronLoggerMixin`

    Usage:

    >>> config_logger('someapp', 'otherlogger', 'mylogger', log_dir='/full/path/to/log/folder')

    :param str logger_names: List of logger names to replace logging config for (see LOGGER_NAMES)
    :param str log_dir:      Fully qualified path. Set each logger's timed_file log directory to this
    :return: :class:`logging.Logger` instance of BASE_LOGGER
    """
    _lh = LogHelper(BASE_LOGGER, formatter=LOG_FORMATTER, handler_level=logging.DEBUG)
    _lh.log.handlers.clear()  # Force reset the handlers on the base logger to avoid double/triple logging.
    _lh.add_console_handler(level=CONSOLE_LOG_LEVEL)  # Log to console with CONSOLE_LOG_LEVEL
    
    _dbg_log = os.path.join(log_dir, 'debug.log')
    _err_log = os.path.join(log_dir, 'error.log')
    
    _lh.add_timed_file_handler(_dbg_log, when='D', interval=1, backups=14, level=DBGFILE_LEVEL)
    _lh.add_timed_file_handler(_err_log, when='D', interval=1, backups=14, level=ERRFILE_LEVEL)
    
    l = _lh.get_logger()
    
    # Use the same logging configuration for all privex modules
    _lh.copy_logger(*logger_names)
    
    return l
Exemplo n.º 2
0
def set_logging_level(level: int, *loggers: Optional[str], formatter=LOG_FORMATTER):
    lgs = []
    loggers = ['rpcscanner'] if len(loggers) == 0 else loggers
    level = logging.getLevelName(str(level).upper()) if isinstance(level, str) else level
    
    for lg in loggers:
        l_handler = LogHelper(lg, handler_level=level, formatter=formatter)
        l_handler.add_console_handler(level=level, stream=sys.stderr)
        lgs.append(l_handler)
    return lgs
Exemplo n.º 3
0
def set_logging_level(level: Union[str, int] = None, logger='colfixer'):
    global log
    if empty(level):
        level = 'ERROR' if settings.QUIET else env(
            'LOG_LEVEL', ('DEBUG' if settings.DEBUG else 'WARNING'))
    if isinstance(level, str):
        level = logging.getLevelName(level)
    _lh = LogHelper(logger, handler_level=level)
    _lh.add_console_handler()
    if logger == 'colfixer':
        log = _lh.get_logger()
    return _lh.get_logger()
Exemplo n.º 4
0
def main():
    global log
    try:
        vargs = parser.parse_args()
    except Exception as e:
        parser.error(f"{type(e)} - {str(e)}")
        return sys.exit(1)
    if vargs.verbose_mode:
        _lh2 = LogHelper('privex.cspgen', handler_level=logging.DEBUG)
        _lh2.add_console_handler(stream=sys.stderr)
        log = _lh2.get_logger()

    log.debug(f"parser args: {vargs!r}")
    if vargs.show_version:
        oprint(COPYRIGHT)
        return COPYRIGHT
    if vargs.show_example:
        exfile, expath = read_example_file()
        exnote = "#####", "#", "# Privex CSPGen example.ini file", f"# Original Location within Python Package: {expath}", "#", "#####\n"
        oprint(*exnote, exfile, *exnote, sep="\n")
        return sys.exit(0)
    filenames = vargs.filenames
    file_sep, sec_sep = literal(vargs.file_sep), literal(vargs.section_sep)
    str_secs = []
    list_secs = []
    if empty(filenames, itr=True):
        if sys.stdin.isatty():
            parser.error("No filenames specified, and no data piped to stdin")
            return sys.exit(1)
        log.debug(
            "Assuming config piped via STDIN. Reading config from stdin.")
        confd = read_stdin()
        builder = get_builder(contents=confd)
        str_secs += [builder.generate('string', sep=sec_sep)]
        list_secs += [builder.generate('list')]
    else:
        for fn in filenames:
            if fn in ['-', '/dev/stdin', 'STDIN']:
                log.debug(
                    "Assuming config piped via STDIN. Reading config from stdin."
                )
                builder = get_builder(contents=read_stdin())
            else:
                builder = get_builder(fn)

            str_secs += [builder.generate('string', sep=sec_sep)]
            list_secs += [builder.generate('list')]

    # oprint('file_sep: ', repr(file_sep))
    # oprint('sec_sep: ', repr(sec_sep))
    oprint(file_sep.join(str_secs))
    return list_secs, str_secs
Exemplo n.º 5
0
def setup_loggers(*loggers, console=True, file_dbg=True, file_err=True):
    loggers = ['rpcscanner'] if len(loggers) == 0 else loggers
    
    for lg in loggers:
        _lh = LogHelper(lg, formatter=LOG_FORMATTER, handler_level=LOG_LEVEL)
        con, tfh_dbg, tfh_err = None, None, None
        if console: con = _lh.add_console_handler(level=LOG_LEVEL, stream=sys.stderr)
        if file_dbg:
            tfh_dbg = _lh.add_timed_file_handler(
                join(LOG_DIR, 'debug.log'), when='D', interval=1, backups=14, level=LOG_LEVEL
            )
        if file_err:
            tfh_err = _lh.add_timed_file_handler(
                join(LOG_DIR, 'error.log'), when='D', interval=1, backups=14, level=logging.WARNING
            )
        yield con, tfh_dbg, tfh_err, lg
Exemplo n.º 6
0
from privex.db import _setup_logging
from privex.db.sqlite import SqliteAsyncWrapper

try:
    dotenv.read_dotenv()
except AttributeError:
    dotenv.load_dotenv()


LOG_LEVEL = env('LOG_LEVEL')
LOG_LEVEL = logging.getLevelName(str(LOG_LEVEL).upper()) if LOG_LEVEL is not None else logging.WARNING
_setup_logging(LOG_LEVEL)
LOG_FORMATTER = logging.Formatter('[%(asctime)s]: %(name)-55s -> %(funcName)-20s : %(levelname)-8s:: %(message)s')
_lh = LogHelper('privex.db.tests', handler_level=LOG_LEVEL, formatter=LOG_FORMATTER)
_lh.copy_logger('privex.db')
_lh.add_console_handler()
log = _lh.get_logger()


class PrivexTestBase(TestCase):
    pass


class PrivexDBTestBase(PrivexTestBase):
    """
    Base class for all privex-db test classes. Includes :meth:`.tearDown` to reset database after each test.
    """

    def setUp(self) -> None:
        self.wrp = ExampleWrapper()
Exemplo n.º 7
0
app.url_map.strict_slashes = False

for k, v in settings.cf.items():
    cf[k] = v

# if empty(LOG_LEVEL):
#     LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO

lh = LogHelper('myip')
if settings.USE_RICH_LOGGING:
    lh.get_logger().addHandler(
        RichHandler(level=settings.LOG_LEVEL,
                    console=console_err,
                    rich_tracebacks=settings.RICH_TRACEBACKS))
else:
    lh.add_console_handler(level=settings.LOG_LEVEL, stream=sys.stderr)

lh.add_timed_file_handler(settings.DBG_LOG,
                          when='D',
                          interval=1,
                          backups=14,
                          level=settings.LOG_LEVEL)
lh.add_timed_file_handler(settings.ERR_LOG,
                          when='D',
                          interval=1,
                          backups=14,
                          level=logging.WARNING)

log = lh.get_logger()

#######################################
Exemplo n.º 8
0
from typing import Tuple, List, Union

from privex.helpers import empty
from quart import request
from quart.exceptions import BadRequest
from rethinkdb import RethinkDB
from rethinkdb.ast import DB
from rethinkdb.net import DefaultConnection

from postfixparser import settings
from privex.loghelper import LogHelper

from postfixparser.settings import AppError, DEFAULT_ERR, ERRORS

_lh = LogHelper('postfixparser')
_lh.add_console_handler(level=logging.INFO)

log = logging.getLogger(__name__)

__STORE = {}


async def get_rethink() -> Tuple[DB, DefaultConnection, RethinkDB]:
    """

    Usage:

        >>> from postfixparser.core import get_rethink
        >>>
        >>> r, conn, rDB = await get_rethink()
        >>> r.table('blocks').insert(dict(block_num=1234)).run(conn)
Exemplo n.º 9
0
import argparse
from privex.loghelper import LogHelper
from typing import Union, Optional, List, Tuple, Dict, Set

__all__ = [
    'CSPBuilder', 'get_builder', 'main', 'parser', 'log_level', 'PKG_DIR',
    'EXAMPLE_DIR', 'EXAMPLE_INI'
]

PKG_DIR = Path(__file__).parent.resolve()
EXAMPLE_DIR = PKG_DIR / 'examples'
EXAMPLE_INI = EXAMPLE_DIR / 'example.ini'

log_level = env('LOG_LEVEL', 'WARNING')
_lh = LogHelper('privex.cspgen', handler_level=logging.getLevelName(log_level))
_lh.add_console_handler(stream=sys.stderr)

log = _lh.get_logger()

argc, argv = len(sys.argv), sys.argv


class CSPBuilder:
    def __init__(self,
                 filename: str = None,
                 file_handle=None,
                 contents: Union[str, list, tuple] = None,
                 **kwargs):
        self.config = configparser.ConfigParser()
        self.conf_file = None
        if not empty(filename):
Exemplo n.º 10
0
    def handle(self, *args, **options):

        _lh = LogHelper(__name__,
                        formatter=LOG_FORMATTER,
                        handler_level=logging.INFO)
        _lh.add_console_handler()
        _lh.get_logger().propagate = False
        lockmgr.clean_locks()  # Clean up any locks due for expiration.

        fail = is_true(options['fail'])
        no_renew = is_true(options['no_renew'])
        only_renew = is_true(options['only_renew'])
        no_timeout = is_true(options['no_timeout'])

        locks: list = options['locks']
        process_id: int = int(options['process_id']
                              ) if options['process_id'] is not None else None
        locked_by: str = options['locked_by']
        timeout = None if no_timeout else int(options['timeout'])
        lock_args = dict(expires=timeout,
                         locked_by=locked_by,
                         lock_process=process_id)
        if len(locks) == 0:
            print('No lock names specified.')
            return

        _create = False if only_renew else True
        _renew = False if no_renew else True

        try:
            res = set_lock(*locks,
                           timeout=timeout,
                           locked_by=locked_by,
                           process_id=process_id,
                           fail=fail,
                           create=_create,
                           renew=_renew)
            print(f"Finished creating / renewing {len(locks)} locks.\n")

            print("\n====================Status Report=====================\n")
            print(f"  Per-lock:\n")
            print("\t\t{:<20}{:<20}{:<20}{:<20}\n".format(
                "Name", "Was Locked?", "Now Locked?", "Status"))
            for lck_name, lres in res.statuses:
                print("\t\t{:<20}{:<20}{:<20}{:<20}".format(
                    lck_name, 'YES' if lres.was_locked else 'NO',
                    'YES' if lres.locked else 'NO', lres.status))
            print(
                "\n========================================================\n")
            print("  Summary:\n")
            print(f"    Locks Created:      {res.counts['created']}")
            print(f"    Locks Renewed:      {res.counts['renewed']}")
            print(f"    Renewals Skipped:   {res.counts['skip_renew']}")
            print(f"    Creations Skipped:  {res.counts['skip_create']}")

        except LockFail as e:
            print(
                "\n---------------------------------------------------------------------------\n"
            )
            print(
                " [lockmgr.management.commands.set_lock] Caught exception LockFail while creating/setting locks..."
            )
            print(
                " [lockmgr.management.commands.set_lock] The following existing lock was encountered:\n"
            )
            print(f"\t{e.lock}\n")
            print(
                " >>> As you have set -e / --fail, this means that any lock creations or updates triggered during "
                "this run of set_lock should have been rolled back.")
            print(
                " >>> If in doubt, run './manage.py list_locks' to view all current locks.\n"
            )
            print(" !!! Now exiting with return code 2...\n")
            return sys.exit(2)

        print("")
        print("\n=========================================================\n")
        print("Finished creating / renewing locks.")
        print("\n=========================================================\n")
Exemplo n.º 11
0
# with automatic daily log rotation (up to 14 days of logs)
# Due to the amount of output from logging.DEBUG, we only log INFO and higher to a file.
# Valid environment log levels (from least to most severe) are:
# DEBUG, INFO, WARNING, ERROR, FATAL, CRITICAL

LOG_FORMATTER = logging.Formatter('[%(asctime)s]: %(name)-25s -> %(funcName)-20s : %(levelname)-8s:: %(message)s')

lh = LogHelper('lg', formatter=LOG_FORMATTER)

CONSOLE_LOG_LEVEL = env('LOG_LEVEL', None)
CONSOLE_LOG_LEVEL = logging.getLevelName(str(CONSOLE_LOG_LEVEL).upper()) if CONSOLE_LOG_LEVEL is not None else None

if CONSOLE_LOG_LEVEL is None:
    CONSOLE_LOG_LEVEL = logging.DEBUG if cf['DEBUG'] else logging.INFO

lh.add_console_handler(level=CONSOLE_LOG_LEVEL)

DBG_LOG, ERR_LOG = os.path.join(BASE_DIR, 'logs', 'debug.log'), os.path.join(BASE_DIR, 'logs', 'error.log')
lh.add_timed_file_handler(DBG_LOG, when='D', interval=1, backups=14, level=CONSOLE_LOG_LEVEL)
lh.add_timed_file_handler(ERR_LOG, when='D', interval=1, backups=14, level=logging.WARNING)

log = lh.get_logger()
lh.copy_logger('privex')

#######################################
#
# RabbitMQ, Redis, and CouchDB Configuration
#
#######################################

RMQ_HOST = cf['RMQ_HOST'] = env('RMQ_HOST', 'localhost')
Exemplo n.º 12
0
def setup_logging(log_level=logging.INFO):
    from privex.loghelper import LogHelper
    lh = LogHelper(__name__, handler_level=log_level)
    lh.add_console_handler()
Exemplo n.º 13
0
LOG_FORMATTER = logging.Formatter(
    '[%(asctime)s]: %(name)-55s -> %(funcName)-20s : %(levelname)-8s:: %(message)s'
)
# LOG_FORMATTER = logging.Formatter('[%(asctime)s]: %(funcName)-14s : %(levelname)-8s:: %(message)s')

LOG_DIR = env('LOG_DIR', BASE_DIR / 'logs')

if not LOG_DIR.exists():
    os.makedirs(str(LOG_DIR))

_lh = LogHelper('pinapp', formatter=LOG_FORMATTER, handler_level=logging.DEBUG)

# Log to console with LOG_LEVEL, as well as output logs >=debug / >=warning to respective files
# with automatic daily log rotation (up to 14 days of logs)
_lh.add_console_handler(level=LOG_LEVEL)
_lh.add_timed_file_handler(str(BASE_DIR / 'logs' / 'debug.log'),
                           when='D',
                           interval=1,
                           backups=14,
                           level=LOG_LEVEL)
_lh.add_timed_file_handler(str(BASE_DIR / 'logs' / 'error.log'),
                           when='D',
                           interval=1,
                           backups=14,
                           level=logging.WARNING)

_lh.copy_logger('pincore')

TEMPLATES = [
    {
Exemplo n.º 14
0
def set_log_level(level: Union[str, int]) -> logging.Logger:
    level = logging.getLevelName(str(level).upper()) if isinstance(level, str) else level
    _lh = LogHelper('steempeers', handler_level=level)
    _lh.add_console_handler()
    return _lh.get_logger()