def _configure_logger(): """Configure the custom logger for this script All info messages and higher will be shown on the console All messages from all priorities will be streamed to a log file """ if not os.path.exists(log_folder): os.makedirs(log_folder) global modlog modlog = logging.getLogger("pyjen") modlog.setLevel(logging.DEBUG) # Primary logger will write all messages to a log file log_file = os.path.join(log_folder, "run.log") if os.path.exists(log_file): os.remove(log_file) file_logger = logging.FileHandler(log_file) file_logger_format = "%(asctime)s %(levelname)s:%(message)s" file_formatter = logging.Formatter(file_logger_format) file_logger.setFormatter(file_formatter) file_logger.setLevel(logging.DEBUG) modlog.addHandler(file_logger) # Secondary logger will show all 'info' class messages and below on the console console_logger = logging.StreamHandler() console_logger.setLevel(logging.INFO) if ENABLE_COLOR: console_formatter = ColoredFormatter() else: console_log_format = "%(asctime)s: (%(levelname)s) %(message)s" console_formatter = logging.Formatter(console_log_format) console_formatter.datefmt = "%H:%M" console_logger.setFormatter(console_formatter) modlog.addHandler(console_logger)
import logging # set up logging LOG_LEVEL = logging.DEBUG LOGFORMAT = "%(log_color)s[%(asctime)s] %(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s" from colorlog import ColoredFormatter logging.root.setLevel(LOG_LEVEL) formatter = ColoredFormatter(LOGFORMAT, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, datefmt='%Y-%m-%d %H:%M:%S') with open("positions.log", "w"): pass fh = logging.FileHandler('positions.log') fh.setLevel(LOG_LEVEL) fh.setFormatter(formatter) log = logging.getLogger('pythonConfig') log.setLevel(LOG_LEVEL) log.addHandler(fh)
def async_enable_logging( hass: core.HomeAssistant, verbose: bool = False, log_rotate_days: Optional[int] = None, log_file: Optional[str] = None, log_no_color: bool = False, ) -> None: """Set up the logging. This method must be run in the event loop. """ fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s" datefmt = "%Y-%m-%d %H:%M:%S" if not log_no_color: try: from colorlog import ColoredFormatter # basicConfig must be called after importing colorlog in order to # ensure that the handlers it sets up wraps the correct streams. logging.basicConfig(level=logging.INFO) colorfmt = f"%(log_color)s{fmt}%(reset)s" logging.getLogger().handlers[0].setFormatter( ColoredFormatter( colorfmt, datefmt=datefmt, reset=True, log_colors={ "DEBUG": "cyan", "INFO": "green", "WARNING": "yellow", "ERROR": "red", "CRITICAL": "red", }, )) except ImportError: pass # If the above initialization failed for any reason, setup the default # formatting. If the above succeeds, this will result in a no-op. logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO) # Suppress overly verbose logs from libraries that aren't helpful logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("aiohttp.access").setLevel(logging.WARNING) # Log errors to a file if we have write access to file or config dir if log_file is None: err_log_path = hass.config.path(ERROR_LOG_FILENAME) else: err_log_path = os.path.abspath(log_file) err_path_exists = os.path.isfile(err_log_path) err_dir = os.path.dirname(err_log_path) # Check if we can write to the error log if it exists or that # we can create files in the containing directory if not. if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( not err_path_exists and os.access(err_dir, os.W_OK)): if log_rotate_days: err_handler: logging.FileHandler = logging.handlers.TimedRotatingFileHandler( err_log_path, when="midnight", backupCount=log_rotate_days) else: err_handler = logging.FileHandler(err_log_path, mode="w", delay=True) err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt)) async_handler = AsyncHandler(hass.loop, err_handler) async def async_stop_async_handler(_: Any) -> None: """Cleanup async handler.""" logging.getLogger("").removeHandler(async_handler) # type: ignore await async_handler.async_close(blocking=True) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler) logger = logging.getLogger("") logger.addHandler(async_handler) # type: ignore logger.setLevel(logging.INFO) # Save the log file location for access by other components. hass.data[DATA_LOGGING] = err_log_path else: _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
from coati.powerpoint import open_pptx, runpowerpoint import os import sys import logging from shutil import copyfile from colorlog import ColoredFormatter LOG_LEVEL = logging.DEBUG LOGFORMAT = "%(asctime)s - %(log_color)s%(message)s" logging.root.setLevel(LOG_LEVEL) formatter = ColoredFormatter(LOGFORMAT) stream = logging.StreamHandler() stream.setLevel(LOG_LEVEL) stream.setFormatter(formatter) log = logging.getLogger('pythonConfig') log.setLevel(LOG_LEVEL) log.addHandler(stream) this_dir = os.path.dirname(__file__) template_path = os.path.join(this_dir, 'templates/slide_template.txt') config_template_path = os.path.join(this_dir, 'templates/config_template.txt') init_template_path = os.path.join(this_dir, 'templates/init_template.txt') def _get_slides_shapes(ppt_path): pptapp = runpowerpoint() pptFile = open_pptx(pptapp, ppt_path) log.debug('Open Template successfully...') all_slide_shapes = [] for slide in pptFile.Slides:
def attach_to_log(level=logging.DEBUG, handler=None, loggers=None, colors=True, blacklist=[ 'TerminalIPythonApp', 'PYREADLINE', 'pyembree', 'shapely.geos', 'shapely.speedups._speedups' ]): """ Attach a stream handler to all loggers. Parameters ------------ level: logging level handler: log handler object loggers: list of loggers to attach to if None, will try to attach to all available colors: bool, if True try to use colorlog formatter blacklist: list of str, names of loggers NOT to attach to """ formatter = logging.Formatter( "[%(asctime)s] %(levelname)-7s (%(filename)s:%(lineno)3s) %(message)s", "%Y-%m-%d %H:%M:%S") if colors: try: from colorlog import ColoredFormatter formatter = ColoredFormatter( ("%(log_color)s%(levelname)-8s%(reset)s " + "%(filename)17s:%(lineno)-4s %(blue)4s%(message)s"), datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red' }) except ImportError: pass # if no handler was passed, use a StreamHandler if handler is None: handler = logging.StreamHandler() # add the formatters and set the level handler.setFormatter(formatter) handler.setLevel(level) # if nothing passed, use all available loggers if loggers is None: loggers = logging.Logger.manager.loggerDict.values() # disable pyembree warnings logging.getLogger('pyembree').disabled = True # loop through all available loggers for logger in loggers: # skip loggers on the blacklist if (logger.__class__.__name__ != 'Logger' or logger.name in blacklist): continue logger.addHandler(handler) logger.setLevel(level) # set nicer numpy print options np.set_printoptions(precision=5, suppress=True)
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Logger import LogOutput import logging try: from colorlog import ColoredFormatter logging_formatter = ColoredFormatter( "%(purple)s%(asctime)s%(reset)s - %(log_color)s%(levelname)s%(reset)s - %(white)s%(message)s%(reset)s", log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, ) except: from logging import Formatter logging_formatter = Formatter("%(asctime)s - %(levelname)s - %(message)s") class ConsoleLogger(LogOutput): def __init__(self): super().__init__() self._logger = logging.getLogger(self._name) #Create python logger self._logger.setLevel(logging.DEBUG) stream_handler = logging.StreamHandler() # Log to stream stream_handler.setFormatter(logging_formatter)
import random import json from pyv8 import PyV8 import logging from colorlog import ColoredFormatter # logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = ColoredFormatter( "%(log_color)s%(levelname)-8s %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%' ) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.INFO) ## END class proxy: def __init__(self): self.ip = ''
import logging from colorlog import ColoredFormatter level = logging.DEBUG Logger = logging.getLogger(__name__) Logger.setLevel(level) stream = logging.StreamHandler() stream.setLevel(level) stream.setFormatter( ColoredFormatter( '[%(asctime)s] %(log_color)s%(levelname)-8s%(reset)s | %(message)s%(reset)s' )) Logger.addHandler(stream)
from PIL import Image import sys from pyocr import pyocr from pyocr import builders import yaml from pokemonlib import PokemonGo from colorlog import ColoredFormatter logger = logging.getLogger('ivcheck') logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setLevel(logging.INFO) formatter = ColoredFormatter( " %(log_color)s%(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s" ) ch.setFormatter(formatter) logger.addHandler(ch) # Time to stay on eggs and world TIME_ON_EGGS = 240 TIME_ON_WORLD = 15 def get_median_location(box_location): ''' Given a list of 4 coordinates, returns the central point of the box ''' x1, y1, x2, y2 = box_location return [int((x1 + x2) / 2), int((y1 + y2) / 2)]
logging.INFO) # this one is way too verbose in debug logger.setLevel(logging.INFO) pydev = ispydevd() stream = sys.stdout if pydev else sys.stderr isatty = pydev or stream.isatty( ) # force isatty if we are under pydev because it supports coloring anyway. console_hdlr = logging.StreamHandler(stream) if isatty: formatter = ColoredFormatter( "%(asctime)s %(log_color)s%(levelname)-8s%(reset)s " "%(blue)s%(name)-25.25s%(reset)s %(white)s%(message)s%(reset)s", datefmt="%H:%M:%S", reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', }) console_hdlr.setFormatter(formatter) else: console_hdlr.setFormatter( logging.Formatter("%(levelname)-8s %(name)-25s %(message)s")) logger.addHandler(console_hdlr) def get_config(config_path, mode): __import__('errbot.config-template' ) # - is on purpose, it should not be imported normally ;)
import logging.handlers as hdl from shutil import copy2 as cp # use "copy2" instead of "copyfile" import colorlog from colorlog import ColoredFormatter, TTYColoredFormatter logger = logging.getLogger() # output log into stdout console_hdl = logging.StreamHandler(sys.stdout) # console_fmt = logging.Formatter('%(asctime)s %(name)-5s [%(threadName)-10s] %(levelname)-5s %(funcName)-5s ' # '%(filename)s line=%(lineno)-4d: %(message)s') # Add colorlog console_fmt = ColoredFormatter( "%(log_color)s%(asctime)s %(name)-5s [%(threadName)-10s] %(levelname)-5s " "%(funcName)-5s %(filename)s line=%(lineno)-4d: %(message_log_color)s%(message)s", secondary_log_colors={'message': { 'ERROR': 'red', 'CRITICAL': 'red' }}) console_hdl.setFormatter(console_fmt) logger.addHandler(console_hdl) # output log into file dir_log = './log/data_kjl' pth_log = pth.join(dir_log, f'app_{START_TIME}.log') if not pth.exists(dir_log): os.makedirs(dir_log) # if pth.exists(pth_log): # cp(pth_log, pth_log+'time') # BackupCount: if either of maxBytes or backupCount is zero, rollover never occurs file_hdl = hdl.RotatingFileHandler(
from . import utils from .build import build_recipes from . import docker_utils from . import lint_functions from . import linting from . import github_integration log_stream_handler = logging.StreamHandler() log_stream_handler.setFormatter( ColoredFormatter( "%(asctime)s %(log_color)sBIOCONDA %(levelname)s%(reset)s %(message)s", datefmt="%H:%M:%S", reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', })) logger = logging.getLogger(__name__) def setup_logger(loglevel): LEVEL = getattr(logging, loglevel.upper()) #logging.basicConfig(level=LEVEL, format='%(levelname)s:%(name)s:%(message)s') l = logging.getLogger('bioconda_utils') l.propagate = False l.setLevel(getattr(logging, loglevel.upper()))
def async_enable_logging( hass: core.HomeAssistant, verbose: bool = False, log_rotate_days: int | None = None, log_file: str | None = None, log_no_color: bool = False, ) -> None: """Set up the logging. This method must be run in the event loop. """ fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s" datefmt = "%Y-%m-%d %H:%M:%S" if not log_no_color: try: # pylint: disable=import-outside-toplevel from colorlog import ColoredFormatter # basicConfig must be called after importing colorlog in order to # ensure that the handlers it sets up wraps the correct streams. logging.basicConfig(level=logging.INFO) colorfmt = f"%(log_color)s{fmt}%(reset)s" logging.getLogger().handlers[0].setFormatter( ColoredFormatter( colorfmt, datefmt=datefmt, reset=True, log_colors={ "DEBUG": "cyan", "INFO": "green", "WARNING": "yellow", "ERROR": "red", "CRITICAL": "red", }, )) except ImportError: pass # If the above initialization failed for any reason, setup the default # formatting. If the above succeeds, this will result in a no-op. logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO) # Suppress overly verbose logs from libraries that aren't helpful logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("aiohttp.access").setLevel(logging.WARNING) sys.excepthook = lambda *args: logging.getLogger(None).exception( "Uncaught exception", exc_info=args # type: ignore ) threading.excepthook = lambda args: logging.getLogger(None).exception( "Uncaught thread exception", exc_info=(args.exc_type, args.exc_value, args.exc_traceback ), # type: ignore[arg-type] ) # Log errors to a file if we have write access to file or config dir if log_file is None: err_log_path = hass.config.path(ERROR_LOG_FILENAME) else: err_log_path = os.path.abspath(log_file) err_path_exists = os.path.isfile(err_log_path) err_dir = os.path.dirname(err_log_path) # Check if we can write to the error log if it exists or that # we can create files in the containing directory if not. if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( not err_path_exists and os.access(err_dir, os.W_OK)): err_handler: logging.handlers.RotatingFileHandler | logging.handlers.TimedRotatingFileHandler if log_rotate_days: err_handler = logging.handlers.TimedRotatingFileHandler( err_log_path, when="midnight", backupCount=log_rotate_days) else: err_handler = logging.handlers.RotatingFileHandler(err_log_path, backupCount=1) try: err_handler.doRollover() except OSError as err: _LOGGER.error("Error rolling over log file: %s", err) err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt)) logger = logging.getLogger("") logger.addHandler(err_handler) logger.setLevel(logging.INFO if verbose else logging.WARNING) # Save the log file location for access by other components. hass.data[DATA_LOGGING] = err_log_path else: _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path) async_activate_log_queue_handler(hass)
def configureRootLogger(args): """ Configure the main logger Parameters: - args: (dictionary) args from the command line Returns the logger """ # store the configuration in Config (datat used elsewhere) gameName = args['<gameName>'] Config.mode = 'prod' if args['--prod'] else 'dev' if args['--dev'] else 'debug' Config.logPath = join('games', gameName, Template(args['--log']).render(hostname=gethostname())) Config.webPort = args['--web'] Config.host = args['--host'] # add the LOW_DEBUG and MESSAGE logging levels logging.addLevelName(LOW_DEBUG_LEVEL, "COM_DEBUG") logging.Logger.low_debug = low_debug logging.addLevelName(MESSAGE_LEVEL, "MESSAGE") logging.Logger.message = message # Create and setup the logger logger = logging.getLogger() logger.setLevel(LOW_DEBUG_LEVEL) # add an handler to redirect the log to a file (1Mo max) makedirs(Config.logPath, exist_ok=True) file_handler = RotatingFileHandler(join(Config.logPath, 'activity.log'), mode='a', maxBytes=MAX_ACTIVITY_SIZE, backupCount=1) file_handler.setLevel(activity_level[Config.mode][1]) file_formatter = logging.Formatter('%(asctime)s [%(name)s] | %(message)s', "%m/%d %H:%M:%S") file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) # Add an other handler to redirect some logs to the console # (with colors, depending on the level DEBUG/INFO/WARNING/ERROR/CRITICAL) steam_handler = logging.StreamHandler() steam_handler.setLevel(activity_level[Config.mode][0]) LOGFORMAT = " %(log_color)s[%(name)s]%(reset)s | %(log_color)s%(message)s%(reset)s" formatter = ColoredFormatter(LOGFORMAT) steam_handler.setFormatter(formatter) logger.addHandler(steam_handler) # An other handler to log the errors (only) in errors.log error_handler = RotatingFileHandler(join(Config.logPath, 'errors.log'), mode='a', maxBytes=MAX_ACTIVITY_SIZE, backupCount=1) error_handler.setLevel(error_level[Config.mode]) error_formatter = logging.Formatter('----------------------\n%(asctime)s [%(name)s] | %(message)s', "%m/%d %H:%M:%S") error_handler.setFormatter(error_formatter) logger.addHandler(error_handler) # Manage errors (send an email) when we are in production if Config.mode == 'prod' and not args['--no-email']: # get the password (and disable warning message) # see http://stackoverflow.com/questions/35408728/catch-warning-in-python-2-7-without-stopping-part-of-progam # noinspection PyUnusedLocal def custom_fallback(prompt="Password: "******"""Simple fallback to get the password, see getpass module""" print("WARNING: Password input may be echoed (can not control echo on the terminal)") # noinspection PyProtectedMember return getpass._raw_input(prompt) # Use getpass' custom raw_input function for security getpass.fallback_getpass = custom_fallback # Replace the getpass.fallback_getpass function with our equivalent password = getpass.getpass('Password for %s account:' % args['--email']) # check the smtp and address smtp, port = 0, '' try: smtp, port = args['--smtp'].split(':') port = int(port) except ValueError: print(Fore.RED + "Error: The smtp is not valid (should be `smpt:port`)" + Fore.RESET) quit() address = parseaddr(args['--email'])[1] if not address: print(Fore.RED + "Error: The email address is not valid" + Fore.RESET) quit() # check if the password/email/smtp works smtpserver = SMTP(smtp, port) smtpserver.ehlo() smtpserver.starttls() try: smtpserver.login(address, password) except SMTPAuthenticationError as err: print(Fore.RED + "Error: The email/smtp:port/password is not valid address is not valid (%s)" % err + Fore.RESET) quit() finally: smtpserver.close() # add an other handler to redirect errors through emails mail_handler = SMTPHandler((smtp, port), address, [address], "Error in CGS (%s)" % gethostname(), (address, password), secure=()) mail_handler.setLevel(activity_level[Config.mode][2]) # mail_formatter = logging.Formatter('%(asctime)s [%(name)s] | %(message)s', "%m/%d %H:%M:%S") # mail_handler.setFormatter(mail_formatter) logger.addHandler(mail_handler) return logger
# use colorama to add color to the output from colorlog import ColoredFormatter logger = logging.getLogger('stress') logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setLevel(logging.INFO) formatter = ColoredFormatter( '%(log_color)s %(asctime)s - %(levelname)-8s%(reset)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', } ) ch.setFormatter(formatter) logger.addHandler(ch) # disable paramiko INFO logger logging.getLogger('paramiko').setLevel(logging.WARNING) RUN_MODE_ONESHOT = 'oneshot' RUN_MODE_REPEAT = 'repeat'
def main(): global GROUP_CONFIG logging.basicConfig(level=logging.INFO) try: from colorlog import ColoredFormatter logging.getLogger().handlers[0].setFormatter( ColoredFormatter("%(log_color)s%(levelname)s %(message)s%(reset)s", datefmt="", reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', })) except ImportError: pass parser = argparse.ArgumentParser( description="Check Home Assistant configuration.") parser.add_argument( '-c', '--config', default=get_default_config_dir(), help="Directory that contains the Home Assistant configuration") args = parser.parse_args() config_dir = os.path.join(os.getcwd(), args.config) hass = homeassistant.core.HomeAssistant() hass.config.config_dir = config_dir config_path = find_config_file(config_dir) config = load_yaml_config_file(config_path) GROUP_CONFIG = config['group'] name = config['homeassistant'].get('name', 'Home') lovelace = OrderedDict() lovelace['name'] = name views = lovelace['views'] = [] if 'default_view' in GROUP_CONFIG: views.append(convert_view(GROUP_CONFIG['default_view'], 'default_view')) for name, conf in GROUP_CONFIG.items(): if name == 'default_view': continue if not conf.get('view', False): continue views.append(convert_view(conf, name)) views.append({ 'name': "All Entities", 'tab_icon': "mdi:settings", 'cards': [{ 'type': 'entity-filter', 'filter': [{}], 'card_config': { 'title': 'All Entities' } }], }) lovelace_path = os.path.join(config_dir, 'ui-lovelace.yaml') if os.path.exists(lovelace_path): i = 0 while os.path.exists(lovelace_path + '.bkp.{}'.format(i)): i += 1 bkp_path = lovelace_path + '.bkp.{}'.format(i) shutil.move(lovelace_path, bkp_path) _LOGGER.error( "The lovelace configuration already exists under %s! " "I will move it to %s", lovelace_path, bkp_path) with open(lovelace_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(lovelace) + '\n') _LOGGER.info("Successfully migrated lovelace configuration to %s", lovelace_path) return 0
def async_enable_logging(hass: core.HomeAssistant, verbose: bool = False, log_rotate_days=None, log_file=None) -> None: """Set up the logging. This method must be run in the event loop. """ logging.basicConfig(level=logging.INFO) fmt = ("%(asctime)s %(levelname)s (%(threadName)s) " "[%(name)s] %(message)s") colorfmt = "%(log_color)s{}%(reset)s".format(fmt) datefmt = '%Y-%m-%d %H:%M:%S' # Suppress overly verbose logs from libraries that aren't helpful logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('urllib3').setLevel(logging.WARNING) logging.getLogger('aiohttp.access').setLevel(logging.WARNING) try: from colorlog import ColoredFormatter logging.getLogger().handlers[0].setFormatter(ColoredFormatter( colorfmt, datefmt=datefmt, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', } )) except ImportError: pass # Log errors to a file if we have write access to file or config dir if log_file is None: err_log_path = hass.config.path(ERROR_LOG_FILENAME) else: err_log_path = os.path.abspath(log_file) err_path_exists = os.path.isfile(err_log_path) err_dir = os.path.dirname(err_log_path) # Check if we can write to the error log if it exists or that # we can create files in the containing directory if not. if (err_path_exists and os.access(err_log_path, os.W_OK)) or \ (not err_path_exists and os.access(err_dir, os.W_OK)): if log_rotate_days: err_handler = logging.handlers.TimedRotatingFileHandler( err_log_path, when='midnight', backupCount=log_rotate_days) else: err_handler = logging.FileHandler( err_log_path, mode='w', delay=True) err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt)) async_handler = AsyncHandler(hass.loop, err_handler) @asyncio.coroutine def async_stop_async_handler(event): """Cleanup async handler.""" logging.getLogger('').removeHandler(async_handler) yield from async_handler.async_close(blocking=True) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler) logger = logging.getLogger('') logger.addHandler(async_handler) logger.setLevel(logging.INFO) # Save the log file location for access by other components. hass.data[DATA_LOGGING] = err_log_path else: _LOGGER.error( "Unable to setup error log %s (access denied)", err_log_path)
#: Root of the locale files localedir = PKGDATA / 'locales' #: Translation machinery for the app translation = gettext.translation( domain=__package__, # Allow the locale files to be stored in system folder localedir=str(localedir) if localedir.is_dir() else None, fallback=True, ) _ = translation.gettext #: Logging machinery for the app color_log_fmt = ColoredFormatter( '%(log_color)s%(message)s', style='%', reset=True, log_colors={ 'DEBUG': 'purple', 'INFO': 'reset', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'fg_red,bg_white', }, ) color_console_handler = logging.StreamHandler() color_console_handler.setFormatter(color_log_fmt) log = logging.getLogger(RESOURCE_NAME) log.addHandler(color_console_handler)
def validate_args(args): """ Validate that arguments are valid. :param args: An arguments namespace. :type args: :py:class:`argparse.Namespace` :return: The validated namespace. :rtype: :py:class:`argparse.Namespace` """ # Setup logging level = LEVELS.get(args.verbosity, DEBUG) if not args.colorize: formatter = Formatter(fmt=SIMPLE_FORMAT, style='{') else: formatter = ColoredFormatter(fmt=COLOR_FORMAT, style='{') handler = StreamHandler() handler.setFormatter(formatter) basicConfig( handlers=[handler], level=level, ) log.debug('Arguments:\n{}'.format(args)) # Check if source exists args.source = Path(args.source) if not args.source.exists(): raise InvalidArguments('No such file or directory {}'.format( args.source)) args.source = args.source.resolve() # Check destination args.destination = Path(args.destination) if args.destination.exists(): if args.output and not args.override: raise InvalidArguments('Output file or directory "{}" exists. ' 'Use --force to force overriding.'.format( str(args.destination), )) elif args.output_in and not args.destination.is_dir(): raise InvalidArguments( 'Output must be a directory when using --output-in.') args.destination = args.destination.parent.resolve() # Check output flag semantics # FIXME: Maybe use a Default class if args.output and args.output_in: raise InvalidArguments('Either use --output or --output-in') if args.output is None and args.output_in is None: args.output = True # Check if files and directories exists for human, argsattr, checker in [ ('configurations', 'configs', lambda path: path.is_file()), ('libraries', 'libraries', lambda path: path.is_dir()), ('values files', 'values_files', lambda path: path.is_file()), ]: assert hasattr(args, argsattr) files = getattr(args, argsattr) if not files: continue files = [Path(file) for file in files] # Check if exists missing = [file for file in files if not file.exists()] if missing: raise InvalidArguments('No such {} {}'.format( human, ', '.join(map(str, missing)), )) # Check if valid invalid = [file for file in files if not checker(file)] if invalid: raise InvalidArguments('Invalid {} {}'.format( human, ', '.join(map(str, invalid)), )) files = [file.resolve() for file in files] setattr(args, argsattr, files) # Check values options if args.values: values = OrderedDict() for pair in args.values: if '=' not in pair: raise InvalidArguments('Invalid value "{}"'.format(pair)) key, value = pair.split('=', 1) values[key] = autocast(value) args.values = values return args
from colorlog import ColoredFormatter import logging formatter = ColoredFormatter( '%(log_color)s[%(asctime)-8s] %(module)s: %(message_log_color)s%(message)s', datefmt=None, reset=True, log_colors={ 'DEBUG': 'blue', 'PLUGINFO': 'purple', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', }, secondary_log_colors={ 'message': { 'DEBUG': 'purple', 'PLUGINFO': 'blue', 'INFO': 'yellow', 'WARNING': 'green', 'ERROR': 'yellow', 'CRITICAL': 'red', } }, style='%') stream = logging.StreamHandler() stream.setFormatter(formatter)
# limitations under the License. """Propeller""" import os import sys import logging import six from time import time __version__ = '0.2' log = logging.getLogger(__name__) stream_hdl = logging.StreamHandler(stream=sys.stderr) formatter = logging.Formatter( fmt='[%(levelname)s] %(asctime)s [%(filename)12s:%(lineno)5d]:\t%(message)s') try: from colorlog import ColoredFormatter fancy_formatter = ColoredFormatter( fmt='%(log_color)s[%(levelname)s] %(asctime)s [%(filename)12s:%(lineno)5d]:\t%(message)s' ) stream_hdl.setFormatter(fancy_formatter) except ImportError: stream_hdl.setFormatter(formatter) log.setLevel(logging.INFO) log.addHandler(stream_hdl) log.propagate = False #from propeller.types import * from propeller.util import ArgumentParser, parse_hparam, parse_runconfig, parse_file
os.environ['LANGUAGE']=args.language console = logging.StreamHandler() nextRaidQueue = [] if not args.verbose: console.setLevel(logging.INFO) formatter = ColoredFormatter( '%(log_color)s [%(asctime)s] [%(threadName)16s] [%(module)14s:%(lineno)d]' + ' [%(levelname)8s] %(message)s', datefmt='%m-%d %H:%M:%S', reset=True, log_colors={ 'DEBUG': 'purple', 'INFO': 'cyan', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%' ) console.setFormatter(formatter) # Redirect messages lower than WARNING to stdout stdout_hdlr = logging.StreamHandler(sys.stdout) stdout_hdlr.setFormatter(formatter) log_filter = LogFilter(logging.WARNING) stdout_hdlr.addFilter(log_filter)
import requests from colorlog import ColoredFormatter from lib.algorithms.hashing_algs import * from lib.github.create_issue import (request_connection, dagon_failure) # Create logging log_level = logging.DEBUG logger_format = "[%(log_color)s%(asctime)s %(levelname)s%(reset)s] %(log_color)s%(message)s%(reset)s" logging.root.setLevel(log_level) formatter = ColoredFormatter(logger_format, datefmt="%H:%M:%S", log_colors={ "DEBUG": "cyan", "INFO": "bold,green", "WARNING": "yellow", "ERROR": "red", "CRITICAL": "bold,red" }) stream = logging.StreamHandler() stream.setLevel(log_level) stream.setFormatter(formatter) LOGGER = logging.getLogger('configlog') LOGGER.setLevel(log_level) LOGGER.addHandler(stream) # dagons email address DAGON_EMAIL = "*****@*****.**" # Version number <major>.<minor>.<patch>.<git-commit> VERSION = "1.15.37.60"
from sys import platform import numpy as np import psutil from colorlog import ColoredFormatter ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = ColoredFormatter( "%(log_color)s[%(asctime)s][%(process)05d] %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'white,bold', 'INFOV': 'cyan,bold', 'WARNING': 'yellow', 'ERROR': 'red,bold', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%') ch.setFormatter(formatter) log = logging.getLogger('rl') log.setLevel(logging.DEBUG) log.handlers = [] # No duplicated handlers log.propagate = False # workaround for duplicated logs in ipython log.addHandler(ch) # general Python utilities
def enable_logging(hass: core.HomeAssistant, verbose: bool = False, log_rotate_days=None) -> None: """Setup the logging. Async friendly. """ logging.basicConfig(level=logging.INFO) fmt = ("%(log_color)s%(asctime)s %(levelname)s (%(threadName)s) " "[%(name)s] %(message)s%(reset)s") # suppress overly verbose logs from libraries that aren't helpful logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("aiohttp.access").setLevel(logging.WARNING) try: from colorlog import ColoredFormatter logging.getLogger().handlers[0].setFormatter( ColoredFormatter(fmt, datefmt='%y-%m-%d %H:%M:%S', reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', })) except ImportError: pass # AsyncHandler allready exists? if hass.data.get(core.DATA_ASYNCHANDLER): return # Log errors to a file if we have write access to file or config dir err_log_path = hass.config.path(ERROR_LOG_FILENAME) err_path_exists = os.path.isfile(err_log_path) # Check if we can write to the error log if it exists or that # we can create files in the containing directory if not. if (err_path_exists and os.access(err_log_path, os.W_OK)) or \ (not err_path_exists and os.access(hass.config.config_dir, os.W_OK)): if log_rotate_days: err_handler = logging.handlers.TimedRotatingFileHandler( err_log_path, when='midnight', backupCount=log_rotate_days) else: err_handler = logging.FileHandler(err_log_path, mode='w', delay=True) err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter( logging.Formatter('%(asctime)s %(name)s: %(message)s', datefmt='%y-%m-%d %H:%M:%S')) async_handler = AsyncHandler(hass.loop, err_handler) hass.data[core.DATA_ASYNCHANDLER] = async_handler logger = logging.getLogger('') logger.addHandler(async_handler) logger.setLevel(logging.INFO) else: _LOGGER.error('Unable to setup error log %s (access denied)', err_log_path)
import logging from logging.handlers import RotatingFileHandler from colorlog import ColoredFormatter STREAM_FORMAT = "%(log_color)s%(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s" FILE_FORMAT = "%(log_color)s %(asctime)s %(levelname)s %(funcName)s(%(lineno)d) | %(log_color)s %(message)s" LOG_FILE = "/var/log/kraken-trades.log" LOG_LEVEL = logging.DEBUG logging.root.setLevel(LOG_LEVEL) ColoredFormatter(STREAM_FORMAT) stream_handler = logging.StreamHandler() stream_handler.setLevel(LOG_LEVEL) stream_handler.setFormatter(ColoredFormatter(STREAM_FORMAT)) file_handler = RotatingFileHandler(LOG_FILE, mode="a", maxBytes=5 * 1024 * 1024, backupCount=2, encoding=None, delay=0) file_handler.setLevel(LOG_LEVEL) file_handler.setFormatter(ColoredFormatter(FILE_FORMAT)) log = logging.getLogger("pythonConfig") log.setLevel(LOG_LEVEL) log.addHandler(stream_handler) log.addHandler(file_handler)
from . import viewer # noqa: E402 from .hdf5 import H5File # noqa: E402 from .options import options # noqa: E402 from . import monitoring # noqa: E402 from .analysis import EquivalentEquation, Stability # noqa: E402 from .utils import progress_bar # noqa: E402 numeric_level = getattr(logging, options().loglevel, None) formatter = ColoredFormatter( f"%(log_color)s[{mpi.COMM_WORLD.Get_rank()}] " "%(levelname)-8s %(name)s in function %(funcName)s " "line %(lineno)s\n%(reset)s%(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'green', 'INFO': 'cyan', 'WARNING': 'blue', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, style='%') logger = logging.getLogger(__name__) logger.setLevel(level=numeric_level) console = logging.StreamHandler() console.setFormatter(formatter) logger.addHandler(console)
async def main(): client = SHSClient('127.0.0.1', 8008, keypair, bytes(keypair.verify_key)) packet_stream = PacketStream(client) await client.open() api.add_connection(packet_stream) await gather(ensure_future(api), test_client()) if __name__ == '__main__': # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.INFO) # create formatter formatter = ColoredFormatter( '%(log_color)s%(levelname)s%(reset)s:%(bold_white)s%(name)s%(reset)s - ' '%(cyan)s%(message)s%(reset)s') # add formatter to ch ch.setFormatter(formatter) # add ch to logger logger = logging.getLogger('packet_stream') logger.setLevel(logging.INFO) logger.addHandler(ch) keypair = load_ssb_secret()['keypair'] loop = get_event_loop() loop.run_until_complete(main()) loop.close()
#!/usr/bin/env python2 # Tests for key reinstallation vulnerabilities in Wi-Fi clients # Copyright (c) 2017, Mathy Vanhoef <*****@*****.**> # # This code may be distributed under the terms of the BSD license. # See README for more details. import logging from colorlog import ColoredFormatter from subprocess import check_output LOGFORMAT = "%(log_color)s%(message)s%(reset)s" logging.root.setLevel(logging.DEBUG) formatter = ColoredFormatter() stream = logging.StreamHandler() stream.setLevel(logging.DEBUG) stream.setFormatter(formatter) log = logging.getLogger('pythonConfig') log.setLevel(logging.DEBUG) log.addHandler(stream) from scapy.all import * from libwifi import * import sys, socket, struct, time, subprocess, atexit, select, os.path from wpaspy import Ctrl # FIXME: # - If the client installs an all-zero key, we cannot reliably test the group key handshake # - We should test decryption using an all-zero key, and warn if this seems to succeed # Future work:
import copy import scrapy.utils.log from colorlog import ColoredFormatter __version_info__ = (0, 0, 7) __version__ = '.'.join(str(_) for _ in __version_info__) # colorize different log levels color_formatter = ColoredFormatter( ('%(log_color)s%(levelname)-5s%(reset)s ' '%(yellow)s[%(asctime)s]%(reset)s' '%(white)s %(name)s %(funcName)s ' '%(bold_purple)s:%(lineno)d%(reset)s ' '%(log_color)s%(message)s%(reset)s'), datefmt='%y-%m-%d %H:%M:%S', log_colors={ 'DEBUG': 'blue', 'INFO': 'bold_cyan', 'WARNING': 'red', 'ERROR': 'bg_bold_red', 'CRITICAL': 'red,bg_white', }) _get_handler = copy.copy(scrapy.utils.log._get_handler) def _get_handler_custom(*args, **kwargs): handler = _get_handler(*args, **kwargs) handler.setFormatter(color_formatter) return handler
"""Logging abstraction to facade systemd journal.""" import sys import logging from colorlog import ColoredFormatter FORMATTER = ColoredFormatter('[%(asctime)s]' + ' %(log_color)s%(message)s%(reset)s', datefmt='%H:%M:%S') def init_logging(logging_level: int = logging.DEBUG): """Initialize the logging.""" handler = logging.StreamHandler(sys.stdout) handler.setFormatter(FORMATTER) handler.setLevel(logging_level) logger = logging.getLogger() logger.setLevel(logging_level) logger.addHandler(handler)