def setup_logzero(logger, logfile: str = None, level=None, cronlevel=None): if cronlevel is None: cronlevel = level import sys import logging stream_fmt = None file_fmt = None try: from logzero import LogFormatter # type: ignore FMT = '%(color)s[%(levelname)s %(name)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' stream_fmt = LogFormatter(fmt=FMT, color=True) file_fmt = LogFormatter(fmt=FMT, color=False) except ImportError as e: logging.warn("logzero is not available! Fallback to default") # ugh.. https://stackoverflow.com/a/21127526/706389 logger.propagate = False lev = level if sys.stdout.isatty() else cronlevel if lev is not None: logger.setLevel(lev) # TODO datefmt='%Y-%m-%d %H:%M:%S' shandler = logging.StreamHandler() if stream_fmt is not None: shandler.setFormatter(stream_fmt) logger.addHandler(shandler) if logfile is not None: fhandler = logging.FileHandler(logfile) # TODO rewrite? not sure ... if file_fmt is not None: fhandler.setFormatter(file_fmt) logger.addHandler(fhandler)
def configure_logger(verbose: bool = False, log_format: str = "string", log_file: str = None, logger_name: str = "chaostoolkit", context_id: str = None): """ Configure the chaostoolkit logger. By default logs as strings to stdout and the given file. When `log_format` is `"json"`, records are set to the console as JSON strings but remain as strings in the log file. The rationale is that the log file is mostly for grepping purpose while records written to the console can be forwarded out of band to anywhere else. """ log_level = logging.INFO # we define colors ourselves as critical is missing in default ones colors = { logging.DEBUG: ForegroundColors.CYAN, logging.INFO: ForegroundColors.GREEN, logging.WARNING: ForegroundColors.YELLOW, logging.ERROR: ForegroundColors.RED, logging.CRITICAL: ForegroundColors.RED } fmt = "%(color)s[%(asctime)s %(levelname)s]%(end_color)s %(message)s" if verbose: log_level = logging.DEBUG fmt = "%(color)s[%(asctime)s %(levelname)s] "\ "[%(module)s:%(lineno)d]%(end_color)s %(message)s" formatter = LogFormatter(fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S", colors=colors) if log_format == 'json': fmt = "(process) (asctime) (levelname) (module) (lineno) (message)" if context_id: fmt = "(context_id) {}".format(fmt) formatter = jsonlogger.JsonFormatter(fmt, json_default=encoder, timestamp=True) # sadly, no other way to specify the name of the default logger publicly LOGZERO_DEFAULT_LOGGER = logger_name logger = setup_default_logger(level=log_level, formatter=formatter) if context_id: logger.addFilter(ChaosToolkitContextFilter(logger_name, context_id)) if log_file: # always everything as strings in the log file logger.setLevel(logging.DEBUG) fmt = "%(color)s[%(asctime)s %(levelname)s] "\ "[%(module)s:%(lineno)d]%(end_color)s %(message)s" formatter = LogFormatter(fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S", colors=colors) logzero.logfile(log_file, formatter=formatter, mode='a', loglevel=logging.DEBUG)
def _get_logger(self, name): '''获取logger''' fmt = LogFormatter( fmt='[%(levelname)1.1s %(asctime)s] %(name)s: %(message)s') return setup_logger(name, logfile=devnull, formatter=fmt, disableStderrLogger=True)
def __init__(self): """Initialize the logger format based on system platform.""" # Set the different formats based on user's platform if sys.platform == 'win32': self._archive_format = '%m-%d-%Y_%I-%M-%p' elif sys.platform == 'linux': self._archive_format = '%m-%d-%Y@%I:%M:%S-%p' self._date_format = '%b-%d-%Y at %I:%M:%S %p' # Used to add date self._log_format = ("%(color)s[%(levelname)s | %(name)s] [%(asctime)s | " "%(module)s - line %(lineno)d]:%(end_color)s %(message)s") self._formatter = LogFormatter(fmt=self._log_format, datefmt=self._date_format) self.logging = logging
def set_logfile(self, filename, max_bytes=0, backup_count=0): """ Setup logging to a (rotating) logfile. Args: filename (str): Logfile. If filename is None, disable file logging max_bytes (int): Maximum number of bytes per logfile. If used together with backup_count, logfile will be rotated when it reaches this amount of bytes. backup_count (int): Number of rotated logfiles to keep """ _logger = logging.getLogger("neo-python") if not filename and not self.rotating_filehandler: _logger.removeHandler(self.rotating_filehandler) else: self.rotating_filehandler = RotatingFileHandler(filename, mode='a', maxBytes=max_bytes, backupCount=backup_count, encoding=None) self.rotating_filehandler.setLevel(logging.DEBUG) self.rotating_filehandler.setFormatter(LogFormatter(color=False)) _logger.addHandler(self.rotating_filehandler)
def getLogger(self, component_name: str = None) -> logging.Logger: """ Get the logger instance matching ``component_name`` or create a new one if non-existent. Args: component_name: a neo-python component name. e.g. network, vm, db Returns: a logger for the specified component. """ logger_name = self.root + (component_name if component_name else 'generic') _logger = self.loggers.get(logger_name) if not _logger: _logger = logging.getLogger(logger_name) stdio_handler = logging.StreamHandler() stdio_handler.setFormatter(LogFormatter()) stdio_handler.setLevel(logging.INFO) _logger.addHandler(stdio_handler) _logger.setLevel(logging.DEBUG) self.loggers[logger_name] = _logger return _logger
def create_api(): """Factory function to build a server instance.""" conf = Config() loglevel(conf.log_level) logfile(conf.log_file, maxBytes=conf.log_bytes) formatter(LogFormatter(fmt=conf.log_format, datefmt=conf.log_date)) middleware = [ LogComponent(), TokenHandler(conf.secret_key, conf.algorithm, conf.duration) ] api = falcon.API(middleware=middleware) api.resp_options.secure_cookies_by_default = not conf.dev_mode api.add_route('/auth', AuthHandler()) api.add_route('/users', UsersHandler()) return api
def setup_mylogger(name=None,logfile=None,formatter=LogFormatter(datefmt=_DATE_FORMATTER,fmt=_DEFAULT_FORMAT)): return setup_logger(name=name,logfile=logfile,formatter=formatter,maxBytes=3e7)
""" import time import signal from logzero import LogFormatter, setup_logger from rfid_music_player.core import settings from rfid_music_player.core.eventhub import ee, EVENT_RFID_TAG_DETECTED, EVENT_RFID_TAG_REMOVED from rfid_music_player.components.basecomponent import BaseComponent if settings.IS_RASPBERRY: import RPi.GPIO as GPIO import MFRC522 log_formatter = LogFormatter( fmt= '%(color)s[%(levelname)1.1s %(asctime)s %(name)s:%(lineno)d]%(end_color)s %(message)s' ) logger = setup_logger("rfid-mfrc522", logfile=settings.LOGFILE, formatter=log_formatter, level=settings.LOGLEVEL) # Timeout for sending the last detected tag a second time SEND_EVENT_TAG_DETECTED_AGAIN_TIMEOUT_SEC = 10 SEND_EVENT_TAG_REMOVED = True class RFIDReader(BaseComponent): MIFAREReader = None tag_last_uid = None tag_last_timestamp = 0
from logzero import setup_logger, LogFormatter from config import spider_config log_format = '%(color)s[%(levelname)s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' formatter = LogFormatter(fmt=log_format, datefmt='%Y-%m-%d %H:%M:%S') logger = setup_logger(name="vcode-spider", logfile=spider_config.LOG_FILE, level=spider_config.LOG_LEVEL, maxBytes=1000000000, backupCount=3, formatter=formatter, )
pip3 install tqdm logzero requests ''') exit() __author__ = 'cloudwindy' # 基本单位 kb = 1024 mb = 1024 * 1024 gb = 1024 * 1024 * 1024 tb = 1024 * 1024 * 1024 * 1024 CHUNK_SIZE = 16 * kb # 日志格式 LOG_FORMAT = LogFormatter(fmt='%(color)s[%(levelname)1.1s %(asctime)s] %(name)s:%(end_color)s %(message)s') RECORD_FORMAT = LogFormatter(fmt='[%(levelname)1.1s %(asctime)s] %(name)s: %(message)s') class Application: def main(self): '''主程序''' parser = ArgumentParser(description = 'Yande.re 爬虫') parser.add_argument('-v', '--version', action = 'version', version = 'Yande.re 爬虫 by %s' % __author__) parser.add_argument('-p', '--prefix', type = chdir, default = '.', help = '指定工作路径')
Copyright (C) 2016-2018 Nikolaos Kamarinakis ([email protected]) See License at nikolaskama.me (https://nikolaskama.me/onioffproject) """ import csv import json import os import click from logzero import LogFormatter, logger, setup_default_logger from onioff import VERSION from utils.tor import Onion log_format = '%(color)s[%(levelname)1.1s] %(message)s%(end_color)s' setup_default_logger(formatter=LogFormatter(fmt=log_format)) TIMEOUT_DEFAULT = 20 WORKERS_DEFAULT = 5 SOCKS_PORT_DEFAULT = 7000 def print_banner(): banner_vars = dict(blue='\33[94m', red='\033[91m', white='\33[97m', yellow='\33[93m', green='\033[32m', end='\033[0m', version=VERSION)
from django.shortcuts import redirect, render from django.http import HttpResponse, JsonResponse, Http404 from django.shortcuts import get_list_or_404 from django.utils import timezone from .models import Device, Log from .forms import DeviceForm from rest_framework import viewsets, permissions from dwebsocket.decorators import accept_websocket, require_websocket from datetime import datetime from django.contrib.auth import authenticate, login, logout from django.contrib.auth.decorators import login_required from django.contrib.auth.models import Group, User formatter = LogFormatter(datefmt=logging.Formatter.default_time_format) logger = setup_logger(name=__name__, level=logging.INFO, formatter=formatter) # raspberrypi address # HOST = '' # POST = [21566, 21567] ONLINE_DEVICES = dict() @login_required(login_url='login') def index(request): device_list = Device.objects.all() LOGS = Log.objects.order_by('-time') #logs_10 = LOGS[:10] return render(request, 'index.html', locals())
def set_logger() -> None: log_format = '%(color)s[%(levelname)1.1s process:%(process)d %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' formatter = LogFormatter(fmt=log_format) setup_default_logger(formatter=formatter)
'memory': 64, # Allow user process to fork # 'can_fork': False, # Limiting the maximum number of user processes in Linux is tricky. # http://unix.stackexchange.com/questions/55319/are-limits-conf-values-applied-on-a-per-process-basis } if DEBUG: logzero.loglevel(logging.DEBUG) else: logzero.loglevel(logging.WARNING) fmt = '%(color)s[%(asctime)s <%(module)s:%(funcName)s>:%(lineno)d] [%(levelname)s]%(end_color)s - %(message)s' formatter = LogFormatter(color=True, datefmt='%Y%m%d %H:%M:%S', fmt=fmt) logzero.formatter(formatter) logzero.logfile(filename='crazybox.log', maxBytes=1000000, backupCount=3, loglevel=logging.WARNING) logger.debug("start crazybox") def fuc(): logger.debug("start fuc") if __name__ == '__main__': print("中文测试 " + os.path.join(os.getcwd(), 'tmp')) fuc()
""" import sys import os import re import yaml import glob import argparse import subprocess from fnmatch import fnmatch # from collections import defaultdict # from datetime import datetime, timedelta from logzero import LogFormatter, setup_logger, logging _log_format = ("%(color)s[%(levelname)s | %(name)s]:%(end_color)s %(message)s") _formatter = LogFormatter(fmt=_log_format) logger = setup_logger(name="GIT-STATUS-CHECKER", level=logging.INFO, formatter=_formatter) def parse_args(argv=None): """Parse command line arguments. Args: argv: list of command line arguments, e.g. sys.argv (default). Returns: parser and parsed args namespace (two-tuple).
import numpy as np import fastStructure import parse_bed import parse_str import random import getopt import sys import pdb import warnings from logzero import setup_logger, LogFormatter, logging # Set up logging log_format = ("%(color)s[%(levelname)s | %(name)s] [%(asctime)s | " "%(module)s - line %(lineno)d]:%(end_color)s %(message)s") date_format = '%b-%d-%Y at %I:%M:%S %p' formatter = LogFormatter(fmt=log_format, datefmt=date_format) log = setup_logger(name="structure", logfile=None, level=logging.DEBUG, formatter=formatter) # ignore warnings with these expressions warnings.filterwarnings( 'ignore', '.*divide by zero.*', ) warnings.filterwarnings( 'ignore', '.*invalid value.*', )