コード例 #1
0
ファイル: logging.py プロジェクト: wrouesnel/j2live
def configure_logging():
    global _logging_configured
    global _log_level
    # Note: this is here because logging is weird and Python is GIL'd.
    if _logging_configured is True:
        return

    structlog.configure_once(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(
                remove_positional_args=False),
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    pre_chain = [
        # Add the log level and a timestamp to the event_dict if the log entry
        # is not from structlog.
        structlog.stdlib.add_log_level,
        structlog.processors.format_exc_info,
        structlog.processors.TimeStamper(fmt="iso"),
    ]

    # The rules: all logs go to stdout and all logs are formatted as JSON.
    if _debug_logs.lower() == "kv" or _debug_logs.lower() == "keyvalue":
        processor = structlog.processors.KeyValueRenderer(key_order=["event"],
                                                          drop_missing=True,
                                                          sort_keys=True)
    else:
        processor = structlog.processors.JSONRenderer(serializer=json.dumps)
    formatter = structlog.stdlib.ProcessorFormatter(
        processor=processor, foreign_pre_chain=pre_chain)

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(formatter)

    root_logger = logging.getLogger()
    root_logger.handlers = [handler]
    root_logger.setLevel(logging._nameToLevel[_log_level.upper()])

    root_logger.info("Logging configured")

    _logging_configured = True
コード例 #2
0
ファイル: loggers.py プロジェクト: williamkibira/chat-service
def initialize_logging() -> None:
    """
    Initialize our logging system:
    * the stdlib logging package for proper structlog use
    * structlog processor chain, etc.
    This should be called once for each application
    NOTES:
    * To enable human readable, colored, positional logging, set LOG_MODE=LOCAL
      Note that this hides many of the boilerplate log entry elements that is
      clutter for local development.
    """
    debug = os.environ.get('DEBUG', 'false') != 'false'
    logging.basicConfig(level='DEBUG' if debug else 'INFO',
                        stream=sys.stdout,
                        format="%(message)s")

    if os.getenv('LOG_MODE', 'JSON') == 'LOCAL':
        chain = [
            structlog.stdlib.add_log_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.dev.ConsoleRenderer()
        ]
    else:
        chain = [
            LogEntryProcessor.add_app_info,
            LogEntryProcessor.add_logger_name,
            LogEntryProcessor.add_timestamp,
            LogEntryProcessor.censor_password,
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            LogEntryProcessor.cleanup_keynames,
            structlog.twisted.JSONRenderer()
        ]

    structlog.configure_once(
        processors=chain,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    structlog.configure_once(
        processors=chain,
        context_class=dict,
        logger_factory=structlog.twisted.LoggerFactory(),
        wrapper_class=structlog.twisted.BoundLogger,
        cache_logger_on_first_use=True,
    )
コード例 #3
0
def configure_structlog(level='INFO', development_mode=False):
    """Configures structlog loggers. If development_mode set to True, will pretty print exception traces.
    :param level: defaults to 'info'
    :param development_mode: defaults to False.
    :return:
    """
    log_levels = {
        'DEBUG': logging.DEBUG,
        'INFO': logging.INFO,
        'WARNING': logging.WARNING,
        'WARN': logging.WARNING,
        'ERROR': logging.ERROR,
        'CRITICAL': logging.CRITICAL
    }

    processors = [
        structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso", utc=True),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info
    ]

    if sys.version_info < (3, 0):
        processors.append(
            structlog.processors.UnicodeEncoder(encoding='utf-8'))

    if development_mode:
        processors.append(structlog.processors.ExceptionPrettyPrinter())

    # append the renderer last
    # processors.append(structlog.processors.JSONRenderer())
    processors.append(structlog.processors.KeyValueRenderer(
    ))  # if we want to use 'kv' tag in filebeats, lets use this renderer

    structlog.configure_once(
        processors=processors,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    logging.basicConfig(stream=sys.stdout,
                        format='%(message)s',
                        level=log_levels[level.upper()])

    # quiet chatty libs
    logging.getLogger("requests").setLevel(logging.WARNING)
コード例 #4
0
    def STRUCTLOG_CONFIGURED(self):
        import structlog

        structlog.configure_once(
            processors=[
                structlog.stdlib.filter_by_level,
                structlog.processors.StackInfoRenderer(),
                structlog.processors.format_exc_info,
                structlog.processors.JSONRenderer
            ],
            context_class=dict,
            logger_factory=structlog.stdlib.LoggerFactory(),
            wrapper_class=structlog.stdlib.BoundLogger,
            cache_logger_on_first_use=True
        )
        return True
コード例 #5
0
def configure_logging(logging_levels, plain=False, stream=sys.stderr):
    renderer = (
        PlainRenderer() if plain else
        structlog.processors.JSONRenderer())

    attr_processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.processors.TimeStamper(fmt='iso')
    ]

    structlog.configure_once(
        processors=(
            [structlog.stdlib.filter_by_level] +
            attr_processors +
            [
                structlog.stdlib.PositionalArgumentsFormatter(),
                structlog.processors.StackInfoRenderer(),
                structlog.processors.format_exc_info,
                structlog.stdlib.ProcessorFormatter.wrap_for_formatter
            ]
        ),
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    formatter = structlog.stdlib.ProcessorFormatter(
        processor=renderer, foreign_pre_chain=attr_processors)

    handler = AtomicStreamHandler(stream)
    handler.setFormatter(formatter)

    root_logger = logging.getLogger()
    root_logger.handlers = [handler]

    # warnings issued by the ``warnings`` module will be
    # redirected to the ``py.warnings`` logger
    logging.captureWarnings(True)

    for logger, level in logging_levels.items():

        if logger.lower() == 'root':
            logger = ''

        logging.getLogger(logger).setLevel(level.upper())
コード例 #6
0
def setup_logging(verbose: bool = False) -> None:
    import logging
    """Configure logging verbosity and destination."""

    loglevel = logging.DEBUG if verbose else logging.INFO

    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setLevel(loglevel)

    root_logger = logging.getLogger()
    root_logger.setLevel(loglevel)
    root_logger.addHandler(handler)

    # The following sets the minimum level that we will see for the given component. So we will
    # only see warnings and higher for paramiko, boto3 and botocore. We will only see errors / fatal
    # / critical log messages for /dev/null
    logging.getLogger("paramiko").setLevel(logging.WARNING)
    logging.getLogger("boto3").setLevel(logging.WARNING)
    logging.getLogger("botocore").setLevel(logging.WARNING)
    logging.getLogger("error_only").setLevel(logging.ERROR)
    logging.getLogger("asyncio").setLevel(logging.INFO)
    logging.getLogger("blib2to3").setLevel(logging.WARN)

    structlog.configure_once(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.processors.StackInfoRenderer(),
            # Same as 'iso' format but without the useless milliseconds
            structlog.processors.TimeStamper(fmt="%Y-%m-%dT%H:%M:%SZ"),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.dev.ConsoleRenderer(pad_event=20,
                                          colors=True,
                                          force_colors=True),
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )
    # Initialize colorama. Structlog does this but it doesn't have the strip=False
    # so we don't get colors on Evergreen pages (which usually doesn't give us a TTY).
    c.init(strip=False)  # Don't strip ansi colors even if we're not on a tty.

    _tweak_structlog_log_line()
コード例 #7
0
def main(format, path, debug, no_clobber, close_delay, driver=None, dump_html=False):
    """get tag from illustration2vec."""
    if debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)
    structlog.configure_once(logger_factory=structlog.stdlib.LoggerFactory())
    log = structlog.getLogger()

    if not path:
        raise ValueError('PATH required.')

    # init folder
    os.makedirs(user_data_dir, exist_ok=True)
    os.makedirs(thumb_folder, exist_ok=True)

    # database
    db_path = os.path.join(user_data_dir, 'main.db')
    if not os.path.isfile(db_path):
        Path(db_path).touch()
    models.database.init(db_path)
    try:
        models.init_all_tables()
    except peewee.OperationalError:
        log.debug('Table already created')

    session = Session(driver=driver)
    try:
        for p in path:
            if os.path.isfile(p):
                print('path:{}'.format(os.path.basename(p)))
            elif is_url(p):
                print('url:{}'.format(p))
                p = download(p, no_clobber=no_clobber)
            else:
                log.error('Unknown path format or path is not exist', path=p)
                continue
            result = get_print_result(
                path=p, db_path=db_path, format=format, session=session)
            print(result)
    finally:
        delay_close(close_delay)
        if hasattr(session, 'browser'):
            session.browser.quit()
コード例 #8
0
    def STRUCTLOG_CONFIGURED(self):
        import structlog

        key_order = ["event"]
        if self.LOGGING_ADD_REQUEST_ID:
            key_order.append("request_id")

        rendered = self.get_structlog_renderer()
        structlog.configure_once(
            processors=[
                structlog.stdlib.filter_by_level,
                structlog.processors.StackInfoRenderer(),
                structlog.processors.format_exc_info,
                rendered(key_order=key_order)
            ],
            context_class=dict,
            logger_factory=structlog.stdlib.LoggerFactory(),
            wrapper_class=structlog.stdlib.BoundLogger,
            cache_logger_on_first_use=True
        )
        return True
コード例 #9
0
ファイル: common.py プロジェクト: gmoben/docker-dev-env
def configure_log(log_level=None, **kwargs):
    """Setup structlog"""
    log_level = (os.environ.get('LOG_LEVEL', log_level) or 'INFO').upper()
    logging.basicConfig(level=log_level)

    kw = dict(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_log_level,
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.dev.ConsoleRenderer()
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )
    kw.update(**kwargs)

    structlog.configure_once(**kw)
コード例 #10
0
ファイル: log.py プロジェクト: andrewkrug/subhub
def _setup_once():

    structlog.configure_once(
        processors=[
            structlog.stdlib.filter_by_level,
            _add_caller_info,
            _add_log_level,
            _add_event_uuid,
            _event_uppercase,
            structlog.stdlib.PositionalArgumentsFormatter(True),
            _add_timestamp,
            _order_keys,
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.processors.JSONRenderer(),
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )

    logging.config.dictConfig(LOGGING_CONFIG)
    logger = get_logger(__name__)
    logger.info(
        "logging initialized",
        DEPLOY_ENV=CFG.DEPLOY_ENV,
        PROJECT_NAME=CFG.PROJECT_NAME,
        BRANCH=CFG.BRANCH,
        REVISION=CFG.REVISION,
        VERSION=CFG.VERSION,
        REMOTE_ORIGIN_URL=CFG.REMOTE_ORIGIN_URL,
        LOG_LEVEL=CFG.LOG_LEVEL,
        DEPLOYED_BY=CFG.DEPLOYED_BY,
        DEPLOYED_WHEN=CFG.DEPLOYED_WHEN,
    )
コード例 #11
0
ファイル: circle_svc.py プロジェクト: magnificrab/pi_mc
def launch_gunicorn():
    from structlog.stdlib import LoggerFactory
    from structlog.threadlocal import wrap_dict
    structlog.configure(context_class=wrap_dict(dict), logger_factory=LoggerFactory())

    structlog.configure_once(
        #processors=chain,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    try:
        httpd = HTTPServer(('0.0.0.0', 8000), MetricsHandler)
    except (OSError, socket.error):
        return

    thread = PrometheusEndpointServer(httpd)
    thread.daemon = True
    thread.start()
    #log.info('Exporting Prometheus metrics on port 8000')

    app.run(host='0.0.0.0')
コード例 #12
0
    )
    print(
        dumps({
            "event": "Error reporting is enabled.",
            "level": "info",
            "logger": __name__,
        }))

structlog.configure_once(
    processors=[
        structlog.stdlib.add_log_level,
        structlog.stdlib.add_logger_name,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(),
        structlog.processors.StackInfoRenderer(),
        # structlog.processors.format_exc_info,
        structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
    ],
    context_class=structlog.threadlocal.wrap_dict(dict),
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    cache_logger_on_first_use=True,
)

LOG_PRE_CHAIN = [
    # Add the log level and a timestamp to the event_dict if the log entry
    # is not from structlog.
    structlog.stdlib.add_log_level,
    structlog.stdlib.add_logger_name,
    structlog.processors.TimeStamper(),
]
コード例 #13
0
        'AWS_LAMBDA_FUNCTION_VERSION', 0)
    event_dict['region'] = os.environ.get('AWS_REGION', os.uname().nodename)

    extras = ['SERVICE', 'STACK', 'STAGE']
    for extra in extras:
        if os.environ.get(extra):
            event_dict[extra.lower()] = os.environ[extra]

    return event_dict


structlog.configure_once(processors=[
    structlog.stdlib.add_log_level, _add_service_context,
    structlog.processors.TimeStamper(fmt='iso', utc=True, key='ts'),
    structlog.processors.format_exc_info,
    structlog.processors.UnicodeDecoder(),
    structlog.processors.JSONRenderer()
],
                         context_class=dict,
                         logger_factory=structlog.stdlib.LoggerFactory(),
                         cache_logger_on_first_use=True)

logger = structlog.get_logger()


def log_invocation(func):
    """
    A decorator for Lambda handlers that logs the input event and the return
    value of the function. Easy and convenient way how to add more visibility
    to the runtime of your Lambdas.
    """
    @functools.wraps(func)
コード例 #14
0
def main(args, logger=None):
    """
    main entrypoint
    Args:
        args():
    Returns:
        (void)
    """

    analysis_id = uuid.uuid4()

    curDir = os.getcwd()
    output_dir = args.outdir
    # metadata_file = args.metadata_file
    reference = os.path.abspath(args.reference)

    # sensitivePath = str(options.sensitivePath).lstrip().rstrip()
    # sensitiveCols = str(options.sensitiveCols).lstrip().rstrip()
    # outputFile = str(options.outputFile).lstrip().rstrip()
    # bcidCol = str( str(options.bcidCol).lstrip().rstrip() )
    # naValue = str( str(options.naValue).lstrip().rstrip() )

    # metadata = result_parsers.parse_workflow_results(metadata_file)
    # distance = read(distancePath)
    # treeFile = "".join(read(treePath))

    if not logger:
        logging.basicConfig(
            format="%(message)s",
            stream=sys.stdout,
            level=logging.DEBUG,
        )

        structlog.configure_once(
            processors=[
                structlog.stdlib.add_log_level,
                structlog.processors.JSONRenderer()
            ],
            logger_factory=structlog.stdlib.LoggerFactory(),
            wrapper_class=structlog.stdlib.BoundLogger,
            context_class=structlog.threadlocal.wrap_dict(dict),
        )
        logger = structlog.get_logger(
            analysis_id=str(uuid.uuid4()),
            pipeline_version=cpo_pipeline.__version__,
        )

    inputs = []
    with open(args.input_file) as input_file:
        fieldnames = [
            'sample_id',
            'reads1',
            'reads2',
        ]
        reader = csv.DictReader(
            (row for row in input_file if not row.startswith('#')),
            delimiter='\t',
            fieldnames=fieldnames)
        for row in reader:
            inputs.append(row)

    os.environ['QT_QPA_PLATFORM'] = 'offscreen'

    paths = {
        'logs': os.path.abspath(os.path.join(
            output_dir,
            'logs',
        )),
        'snippy_output': os.path.abspath(os.path.join(output_dir, "snippy")),
    }

    for output_subdir in paths.values():
        try:
            os.makedirs(output_subdir)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

    job_script_path = resource_filename('data', 'job_scripts')

    contigs_paths = []
    for sample_id in [input["sample_id"] for input in inputs]:
        contigs = os.path.abspath(
            os.path.join(args.result_dir, sample_id, "assembly", "contigs.fa"))
        contigs_paths.append(contigs)

    snippy_dirs = [
        os.path.join(
            paths['snippy_output'],
            os.path.basename(os.path.dirname(os.path.dirname(contigs))))
        for contigs in contigs_paths
    ]

    snippy_jobs = [{
        'job_name':
        'snippy',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8 -shell y',
        'remote_command':
        os.path.join(job_script_path, 'snippy.sh'),
        'args': [
            "--ref",
            reference,
            "--R1",
            input['reads1'],
            "--R2",
            input['reads2'],
            "--outdir",
            os.path.join(
                paths['snippy_output'],
                input['sample_id'],
            ),
        ]
    } for input in inputs]

    run_jobs(snippy_jobs)

    snippy_core_jobs = [{
        'job_name':
        'snippy-core',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8 -shell y',
        'remote_command':
        os.path.join(job_script_path, 'snippy-core.sh'),
        'args': [
            "--ref",
            reference,
            "--outdir",
            paths["snippy_output"],
        ] + snippy_dirs
    }]

    run_jobs(snippy_core_jobs)

    snp_dists_jobs = [{
        'job_name':
        'snp-dists',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'snp-dists.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.aln"),
            "--output_file",
            os.path.join(paths["snippy_output"], "core.aln.matrix.tsv"),
        ]
    }]

    run_jobs(snp_dists_jobs)

    iqtree_jobs = [{
        'job_name':
        'iqtree',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'iqtree.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.aln"),
            "--model",
            "GTR+G4",
        ]
    }]

    run_jobs(iqtree_jobs)

    clonalframeml_jobs = [{
        'job_name':
        'clonalframeml',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'clonalframeml.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.aln"),
            "--treefile",
            os.path.join(paths["snippy_output"], "core.full.aln.treefile"),
            "--output_file",
            os.path.join(paths["snippy_output"],
                         "core.full.aln.clonalframeml"),
        ]
    }]

    run_jobs(clonalframeml_jobs)

    maskrc_svg_jobs = [{
        'job_name':
        'maskrc-svg',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'maskrc-svg.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.aln"),
            "--svg",
            os.path.join(paths["snippy_output"], "core.full.maskrc.svg"),
            "--clonalframeml",
            os.path.join(paths["snippy_output"],
                         "core.full.aln.clonalframeml"),
            "--output_file",
            os.path.join(paths["snippy_output"], "core.full.maskrc.aln"),
        ]
    }]

    run_jobs(maskrc_svg_jobs)

    snp_sites_jobs = [{
        'job_name':
        'snp-sites',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'snp-sites.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.maskrc.aln"),
            "--output_file",
            os.path.join(paths["snippy_output"], "core.full.maskrc.snp.aln"),
        ]
    }]

    run_jobs(snp_sites_jobs)

    iqtree_jobs = [{
        'job_name':
        'iqtree',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'iqtree.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.maskrc.aln"),
            "--model",
            "GTR+G+ASC",
        ]
    }]

    run_jobs(iqtree_jobs)

    snp_dists_jobs = [{
        'job_name':
        'snp-sites',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'snp-dists.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.aln"),
            "--output_file",
            os.path.join(paths["snippy_output"], "core.matrix.tab"),
        ]
    }, {
        'job_name':
        'snp-sites',
        'output_path':
        paths['logs'],
        'error_path':
        paths['logs'],
        'native_specification':
        '-pe smp 8',
        'remote_command':
        os.path.join(job_script_path, 'snp-dists.sh'),
        'args': [
            "--alignment",
            os.path.join(paths["snippy_output"], "core.full.maskrc.snp.aln"),
            "--output_file",
            os.path.join(paths["snippy_output"],
                         "core.full.maskrc.snp.matrix.tab"),
        ]
    }]

    run_jobs(snp_dists_jobs)

    exit(0)
    distanceDict = {}  #store the distance matrix as rowname:list<string>

    for i in range(len(distance)):
        temp = distance[i].split("\t")
        distanceDict[temp[0]] = temp[1:]

    #region create box tree
    #region step5: tree construction
    treeFile = "".join(read(treePath))
    t = e.Tree(treeFile)
    t.set_outgroup(t & "Reference")

    #set the tree style
    ts = e.TreeStyle()
    ts.show_leaf_name = True
    ts.show_branch_length = True
    ts.scale = 2000  #pixel per branch length unit
    ts.branch_vertical_margin = 15  #pixel between branches
    style2 = e.NodeStyle()
    style2["fgcolor"] = "#000000"
    style2["shape"] = "circle"
    style2["vt_line_color"] = "#0000aa"
    style2["hz_line_color"] = "#0000aa"
    style2["vt_line_width"] = 2
    style2["hz_line_width"] = 2
    style2["vt_line_type"] = 0  # 0 solid, 1 dashed, 2 dotted
    style2["hz_line_type"] = 0
    for n in t.traverse():
        n.set_style(style2)

    #find the plasmid origins
    plasmidIncs = {}
    for key in metadata:
        for plasmid in metadata[key]['plasmids']:
            for inc in plasmid['PlasmidRepType'].split(","):
                if (inc.lower().find("inc") > -1):
                    if not (inc in plasmidIncs):
                        plasmidIncs[inc] = [metadata[key]['ID']]
                    else:
                        if metadata[key]['ID'] not in plasmidIncs[inc]:
                            plasmidIncs[inc].append(metadata[key]['ID'])
    #plasmidIncs = sorted(plasmidIncs)
    for n in t.traverse():  #loop through the nodes of a tree
        if (n.is_leaf() and n.name == "Reference"):
            #if its the reference branch, populate the faces with column headers
            index = 0

            if len(sensitivePath) > 0:  #sensitive metadat @ chris
                for sensitive_data_column in sensitive_meta_data.get_columns():
                    (t & "Reference").add_face(addFace(sensitive_data_column),
                                               index, "aligned")
                    index = index + 1

            (t & "Reference").add_face(addFace("SampleID"), index, "aligned")
            index = index + 1
            (t & "Reference").add_face(addFace("New?"), index, "aligned")
            index = index + 1
            for i in range(
                    len(plasmidIncs)
            ):  #this loop adds the columns (aka the incs) to the reference node
                (t & "Reference").add_face(
                    addFace(list(plasmidIncs.keys())[i]), i + index, "aligned")
            index = index + len(plasmidIncs)
            (t & "Reference").add_face(addFace("MLSTScheme"), index, "aligned")
            index = index + 1
            (t & "Reference").add_face(addFace("Sequence Type"), index,
                                       "aligned")
            index = index + 1
            (t & "Reference").add_face(addFace("Carbapenamases"), index,
                                       "aligned")
            index = index + 1
            (t & "Reference").add_face(addFace("Plasmid Best Match"), index,
                                       "aligned")
            index = index + 1
            (t & "Reference").add_face(addFace("Best Match Identity"), index,
                                       "aligned")
            index = index + 1
            for i in range(len(
                    distanceDict[list(distanceDict.keys())
                                 [0]])):  #this loop adds the distance matrix
                (t & "Reference").add_face(
                    addFace(distanceDict[list(distanceDict.keys())[0]][i]),
                    index + i, "aligned")
            index = index + len(distanceDict[list(distanceDict.keys())[0]])
        elif (n.is_leaf() and not n.name == "Reference"):
            #not reference branches, populate with metadata
            index = 0

            if (n.name.replace(".fa", "") in metadata.keys()):
                mData = metadata[n.name.replace(".fa", "")]
            else:
                mData = metadata["na"]
            n.add_face(addFace(mData.ID), index, "aligned")
            index = index + 1
            if (mData['new']):  #new column
                face = e.RectFace(
                    30, 30, "green",
                    "green")  # TextFace("Y",fsize=10,tight_text=True)
                face.border.margin = 5
                face.margin_right = 5
                face.margin_left = 5
                face.vt_align = 1
                face.ht_align = 1
                n.add_face(face, index, "aligned")
            index = index + 1
            for incs in plasmidIncs:  #this loop adds presence/absence to the sample nodes
                if (n.name.replace(".fa", "") in plasmidIncs[incs]):
                    face = e.RectFace(
                        30, 30, "black",
                        "black")  # TextFace("Y",fsize=10,tight_text=True)
                    face.border.margin = 5
                    face.margin_right = 5
                    face.margin_left = 5
                    face.vt_align = 1
                    face.ht_align = 1
                    n.add_face(face,
                               list(plasmidIncs.keys()).index(incs) + index,
                               "aligned")
            index = index + len(plasmidIncs)
            n.add_face(addFace(mData['MLSTSpecies']), index, "aligned")
            index = index + 1
            n.add_face(addFace(mData['SequenceType']), index, "aligned")
            index = index + 1
            n.add_face(addFace(mData['CarbapenemResistanceGenes']), index,
                       "aligned")
            index = index + 1
            n.add_face(addFace(mData['plasmidBestMatch']), index, "aligned")
            index = index + 1
            n.add_face(addFace(mData['plasmididentity']), index, "aligned")
            index = index + 1
            for i in range(len(
                    distanceDict[list(distanceDict.keys())
                                 [0]])):  #this loop adds distance matrix
                if (n.name in distanceDict
                    ):  #make sure the column is in the distance matrice
                    n.add_face(addFace(list(distanceDict[n.name])[i]),
                               index + i, "aligned")

    t.render(outputFile, w=5000, units="mm",
             tree_style=ts)  #save it as a png, pdf, svg or an phyloxml
コード例 #15
0
    format='%(message)s',
    level=logging.DEBUG,
)

# Adds handlers for logging to console and to file.
logging.root.handlers = [
    logging.StreamHandler(sys.stdout),
    handlers.RotatingFileHandler(log_path,
                                 maxBytes=(1000000 * 5))  # 5Mb rollover
]

structlog.configure_once(
    processors=[
        structlog.stdlib.add_logger_name,  # Takes basic config's log name
        structlog.stdlib.add_log_level,  # From basic config
        structlog.processors.TimeStamper(
            fmt='%Y-%m-%d %H:%M:%S',
            utc=False),  # Adds timestamp, very cool (local time)
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.UnicodeDecoder(),
        _custom_processor  # Replace with structlog.dev.ConsoleRenderer() if you don't like the custom one
    ],
    context_class=dict,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,  # type: ignore
    cache_logger_on_first_use=True,
)

utg.main_log = get_logger()
コード例 #16
0
ファイル: settings.py プロジェクト: vicelikedust/authentik
# https://docs.djangoproject.com/en/2.1/howto/static-files/

STATIC_URL = "/static/"
MEDIA_URL = "/media/"

LOG_LEVEL = CONFIG.y("log_level").upper()

structlog.configure_once(
    processors=[
        structlog.stdlib.add_log_level,
        structlog.stdlib.add_logger_name,
        structlog.threadlocal.merge_threadlocal_context,
        add_process_id,
        structlog_add_request_id,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso", utc=False),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
    ],
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.make_filtering_bound_logger(
        getattr(logging, LOG_LEVEL, logging.WARNING)),
    cache_logger_on_first_use=True,
)

LOG_PRE_CHAIN = [
    # Add the log level and a timestamp to the event_dict if the log entry
    # is not from structlog.
    structlog.stdlib.add_log_level,
    structlog.stdlib.add_logger_name,
    structlog.processors.TimeStamper(),
コード例 #17
0
import logging
import structlog
import sys
log_format = "%(filename)s:%(lineno)s %(funcName)10s %(message)s"

logging.basicConfig(level='DEBUG', stream=sys.stdout, format=log_format)

chain = [
    structlog.stdlib.filter_by_level, structlog.stdlib.add_log_level,
    structlog.processors.StackInfoRenderer(),
    structlog.processors.format_exc_info,
    structlog.processors.JSONRenderer()
]

structlog.configure_once(
    processors=chain,
    context_class=dict,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    cache_logger_on_first_use=True,
)

# logger = logging.getLogger()
logger = structlog.get_logger()

if __name__ == "__main__":
    logger.warning("strat")
    logger.warning("end")
    logger.error("done")
    logger.info("info")
コード例 #18
0
def _control_logging(
    dev_mode: bool,
    settings: Union[Development, Production],
    log_file: Optional[Path] = None,
):

    level = settings.LOG_LEVEL
    formatter = settings.LOG_FORMAT
    dest = settings.LOG_DESTINATION

    if formatter == LogFormatter.JSON.value:
        fmt = {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.processors.JSONRenderer(),
            "foreign_pre_chain": COMMON_CHAIN,
        }
    elif formatter == LogFormatter.COLOR.value:

        fmt = {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.dev.ConsoleRenderer(
                colors=dev_mode and COLORAMA_INSTALLED
            ),
            "foreign_pre_chain": COMMON_CHAIN,
        }

    else:
        raise NotImplementedError(
            "Pydantic shouldn't allow this."
        )  # pragma: no cover

    if dest == LogDest.CONSOLE.value:
        hndler = {
            "level": level,
            "class": "logging.StreamHandler",
            "formatter": "default",
        }
    elif dest == LogDest.FILE.value:

        if not log_file:
            raise NotImplementedError("`log_file` must be specified")

        hndler = {
            "level": level,
            "class": "logging.handlers.RotatingFileHandler",
            "filename": str(log_file),
            "formatter": "default",
            "maxBytes": 10e6,
            "backupCount": 100,
        }
        log_file.parent.mkdir(parents=True, exist_ok=True)
    else:
        raise NotImplementedError(
            "Pydantic shouldn't allow this."
        )  # pragma: no cover

    logging.config.dictConfig(
        {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {"default": fmt},
            "handlers": {"default": hndler},
            "loggers": {
                "": {
                    "handlers": ["default"],
                    "level": level,
                    "propagate": True,
                }
            },
        }
    )
    structlog.configure_once(
        processors=[
            structlog.stdlib.filter_by_level,
            *COMMON_CHAIN,
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )
コード例 #19
0
ファイル: loggin.py プロジェクト: pwoolvett/petri
def _control_logging(
    level: LogLevel,
    dest: LogDest,
    formatter: LogFormatter,
    log_file: Path,
    dev_mode: bool,
):

    if formatter == LogFormatter.JSON:
        fmt = {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.processors.JSONRenderer(),
            "foreign_pre_chain": COMMON_CHAIN,
        }
    elif formatter == LogFormatter.COLOR:
        fmt = {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.dev.ConsoleRenderer(
                colors=dev_mode and COLORAMA_INSTALLED
            ),
            "foreign_pre_chain": COMMON_CHAIN,
        }
    else:
        raise NotImplementedError(  # pragma: no cover
            "Pydantic shouldn't allow this."
        )

    if dest == LogDest.CONSOLE:
        hndler = {
            "level": level,
            "class": "logging.StreamHandler",
            "formatter": "default",
        }
    elif dest == LogDest.FILE:
        hndler = {
            "level": level,
            "class": "logging.handlers.RotatingFileHandler",
            "filename": str(log_file),
            "formatter": "default",
            "maxBytes": 10e6,
            "backupCount": 100,
        }
        log_file.parent.mkdir(parents=True, exist_ok=True)
    else:
        raise NotImplementedError(  # pragma: no cover
            "Pydantic shouldn't allow this."
        )

    logging.config.dictConfig(
        {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {"default": fmt},
            "handlers": {"default": hndler},
            "loggers": {
                "": {
                    "handlers": ["default"],
                    "level": level.value,
                    "propagate": True,
                }
            },
        }
    )
    structlog.configure_once(
        processors=[
            structlog.stdlib.filter_by_level,
            *COMMON_CHAIN,
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )
コード例 #20
0
ファイル: test_output.py プロジェクト: iffy/ppo
from unittest import TestCase

import six
from io import StringIO

import structlog
structlog.configure_once(logger_factory=structlog.twisted.LoggerFactory())


from ppo.output import giganticGrep


class giganticGrepTest(TestCase):

    def assertValue(self, indata, expected_output, message=None):
        """
        Assert that the given input results in the expected_output.
        """
        outstream = StringIO()
        giganticGrep(indata, outstream)
        value = outstream.getvalue()
        self.assertEqual(value, expected_output, message)

    def assertLines(self, indata, expected_output, message=None):
        """
        Assert that the given input results in the expected_output lines.
        """
        outstream = StringIO()
        giganticGrep(indata, outstream)
        value = outstream.getvalue()
        actual_lines = value.split('\n')
コード例 #21
0
def setup_logging():
    logging.basicConfig(
        level=logging.DEBUG,
        format='%(levelname)s[%(threadName)s] %(message)s',
    )

    logging.getLogger('urllib3').setLevel(logging.CRITICAL)
    logging.getLogger('botocore').setLevel(logging.CRITICAL)

    structlog.configure_once(
        context_class=structlog.threadlocal.wrap_dict(dict),
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        processors=[
            structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs
        ])
    level_map = {
        'CRITICAL': 50,
        'ERROR': 40,
        'WARNING': 30,
        'INFO': 20,
        'DEBUG': 10,
        'TRACE': 5,
        'NOTSET': 0,
    }
    dict_config = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'json': {
                'format':
                '%(message)s %(threadName)s %(lineno)d %(pathname)s ',
                'class': 'pythonjsonlogger.jsonlogger.JsonFormatter'
            }
        },
        'handlers': {
            'json': {
                'class': 'logging.StreamHandler',
                'formatter': 'json'
            },
            'file': {
                'class': 'logging.handlers.RotatingFileHandler',
                'formatter': 'json',
                'filename': '/var/log/qsp-protocol/qsp-protocol.log',
                'mode': 'a',
                'maxBytes': 10485760,
                'backupCount': 5
            }
        },
        'loggers': {
            '': {
                'handlers': ['json', 'file'],
                'level': level_map["DEBUG"],
            }
        }
    }
    logging.config.dictConfig(dict_config)
    setup_trace_logging()
コード例 #22
0
def configure_logging(processor: Any, level: int) -> None:
    level_str = logging.getLevelName(level)
    timestamper = structlog.processors.TimeStamper(fmt="iso")

    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "formater": {
                "()":
                structlog.stdlib.ProcessorFormatter,
                "processor":
                processor,
                # Adjust log entries that are not from structlog
                "foreign_pre_chain": [
                    structlog.stdlib.add_log_level,
                    structlog.stdlib.add_logger_name,
                    timestamper,
                ],
            },
        },
        "handlers": {
            "default": {
                "level": level_str,
                "class": "logging.StreamHandler",
                "formatter": "formater",
            },
        },
        "loggers": {
            "": {
                "handlers": ["default"],
                "level": level_str,
                "propagate": True,
            },
            "sqlalchemy": {
                "handlers": ["default"],
                "level": level_str,
            },
            "PyQt5": {
                "handlers": ["default"],
                "level": level_str,
            },
        },
    })

    structlog.configure_once(
        processors=[
            merge_contextvars,
            structlog.stdlib.add_log_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.PositionalArgumentsFormatter(),
            timestamper,
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.make_filtering_bound_logger(level),
        cache_logger_on_first_use=True,
    )
コード例 #23
0
#
#     def debug(self, event, **kw):
#         return self._proxy_to_logger('debug', event, **kw)
#
#     def info(self, event, **kw):
#         return self._proxy_to_logger('info', event, **kw)

structlog.configure_once(
    processors=[
        structlog.stdlib.filter_by_level,
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.JSONRenderer(),
    ],
    context_class=dict,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    # wrapper_class=TraceableLogger,
    cache_logger_on_first_use=True,
)


class Logger:
    def __init__(self):
        pass

    @staticmethod
コード例 #24
0
    parser.add_argument("-o",
                        "--outdir",
                        dest="outdir",
                        default='./',
                        help="absolute path to output folder")
    parser.add_argument('-c',
                        '--config',
                        dest='config_file',
                        default=resource_filename('data', 'config.ini'),
                        help='Config File',
                        required=False)
    args = parser.parse_args()

    logging.basicConfig(
        format="%(message)s",
        stream=sys.stdout,
        level=logging.DEBUG,
    )

    structlog.configure_once(
        processors=[
            structlog.stdlib.add_log_level,
            structlog.processors.JSONRenderer()
        ],
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        context_class=structlog.threadlocal.wrap_dict(dict),
    )

    main(args)
コード例 #25
0
#         return self._proxy_to_logger('error', event, **kw)
#
#     def debug(self, event, **kw):
#         return self._proxy_to_logger('debug', event, **kw)
#
#     def info(self, event, **kw):
#         return self._proxy_to_logger('info', event, **kw)

structlog.configure_once(
    processors=[
        structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt='iso'),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.JSONRenderer()
    ],
    context_class=dict,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    # wrapper_class=TraceableLogger,
    cache_logger_on_first_use=True,
)


class Logger:
    def __init__(self):
        pass

    @staticmethod
    def get_logger(name):
コード例 #26
0
def configure_logger(log_to_console=True,
                     color_console=True,
                     log_to_file=True,
                     filename=None):
    pre_chain = []

    if structlog.contextvars:
        pre_chain += [
            structlog.contextvars.merge_contextvars,
        ]

    pre_chain += [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        _add_thread_info,
        structlog.processors.TimeStamper(fmt="iso", utc=True),
        _order_keys,
    ]

    structlog.configure_once(
        processors=pre_chain + [
            structlog.stdlib.filter_by_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )

    handlers = {}
    if log_to_console:
        handlers["console"] = {
            "()": logging.StreamHandler,
            "formatter": "console"
        }
    if log_to_file and filename:
        handlers["file"] = {
            "()": logging.handlers.RotatingFileHandler,
            "filename": filename,
            "formatter": "json",
            "maxBytes": 25000000,
            "backupCount": 5,
        }

    logging_config = {
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "console": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor":
                structlog.dev.ConsoleRenderer(colors=color_console),
                "foreign_pre_chain": pre_chain,
            },
            "json": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": structlog.processors.JSONRenderer(),
                "foreign_pre_chain": pre_chain,
            },
        },
        "handlers": handlers,
        "loggers": {
            "": {
                "propagate": True,
                "handlers": list(handlers.keys()),
                "level": "DEBUG"
            }
        },
    }
    logging.config.dictConfig(logging_config)