def __init__(self, model, device, config_path, weights_path=None, verbose=1, tensorboard=False): self.model_ = model self.device_ = device self.main_dir_ = os.path.dirname(config_path) self.config = ConfigFile(session_name="") self.config.load(config_path) self.weights_path_ = weights_path if self.weights_path_: self.model_.load_weights(self.weights_path_) self.verbose = verbose self.logger = verboselogs.VerboseLogger('verbose-demo') self.logger.addHandler(logging.StreamHandler()) self.logger.setLevel(verbose) self.tensorboard_ = tensorboard if self.tensorboard_: tb_callback = TensorBoard(log_dir=os.path.join( self.config.session_dir, ConfigFile.tensorboard_dirname), histogram_freq=0, write_graph=True, write_images=True) self.config.add_callback(tb_callback)
def __init__(self, model=None, device=None, config_path=None, weights_path=None, reg_metric_dict=None, seg_metric_dict=None, verbose=1): """ Args: model (keras.model): model architecture metric_dict (dict): metric dictionnary following LunaTester.reg_metrics format config_path (str): path to serialized config file following src.training.ConfigFile weights_path (str): path to model weights (optional) use_segmentation (boolean): if true trains with segmentation data verbose (int): {0, 1} """ self._model = model self._device = device self._weights_path = weights_path if self._weights_path: self._model.load_weights(self._weights_path) self._reg_metric_dict = reg_metric_dict self._seg_metric_dict = seg_metric_dict self._metric_dict = reg_metric_dict if self._metric_dict and self._seg_metric_dict: self._metric_dict.update(self._seg_metric_dict) self._config = ConfigFile(session_name="") if config_path: self._config.load(config_path) self._verbose = verbose self._logger = verboselogs.VerboseLogger('verbose-demo') self._logger.addHandler(logging.StreamHandler()) if verbose: self._logger.setLevel(verbose)
def set_verbose_level(level=10): logger = verboselogs.VerboseLogger('verbose-demo') logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) # Command line option defaults. verbosity = 0 # Parse command line options. if not isinstance(level, int): print(level, " should be integer") print(__doc__.strip()) sys.exit(0) else: verbosity = level # Configure logger for requested verbosity. if verbosity >= 4: # The value of SPAM positions the spam log level # between the DEBUG and NOTSET levels. logger.setLevel(logging.SPAM) elif verbosity >= 3: # Detailed information, typically of interest # only when diagnosing problems. logger.setLevel(logging.DEBUG) elif verbosity >= 2: # The value of VERBOSE positions the verbose log level # between the INFO and DEBUG levels. logger.setLevel(logging.VERBOSE) elif verbosity >= 1: # The value of NOTICE positions the notice log level # between the WARNING and INFO levels. logger.setLevel(logging.NOTICE) elif verbosity <= 0: # An indication that something unexpected happened. print("done") logger.setLevel(logging.WARNING) return logger
def __init__(self): self.log = verboselogs.VerboseLogger(__name__) coloredlogs.install( fmt='%(asctime)s [Core] [%(levelname)s] %(message)s', logger=self.log) process = Popen("VBoxManage -h", shell=True, stdout=PIPE, stderr=PIPE).communicate() if len(process[1]) > 0: self.log.critical( "VboxManage not found!\nvmaker uses VBoxManage to control virtual machines.\n" "Make sure, that you have installed VirtualBox or " "VBoxManage binary in $PATH environment.") sys.exit(1) if not os.path.exists(self.WORK_DIR): self.log.warning("%s not found and will be generated!" % self.GENERAL_CONFIG_FILENAME) os.mkdir(self.WORK_DIR) self.generate_general_config() self.log.success("Generated: %s" % self.GENERAL_CONFIG) if not os.path.exists(self.GENERAL_CONFIG): self.log.warning("%s not found and will be generated!" % self.GENERAL_CONFIG_FILENAME) self.generate_general_config() self.log.success("Generated: %s" % self.GENERAL_CONFIG) self.load_general_config()
def initialize_logging(self, verbosity): """ Initialize the logging subsystem. """ # Create a logger instance. self.logger = verboselogs.VerboseLogger('vim-plugin-manager') self.set_log_level(logging.DEBUG) # Add a handler for logging to a file. log_file = os.path.expanduser('~/.vim-plugin-manager.log') log_exists = os.path.isfile(log_file) file_handler = coloredlogs.ColoredStreamHandler(open(log_file, 'a'), show_name=True, isatty=False) self.logger.addHandler(file_handler) # The log file is always verbose. file_handler.setLevel(logging.DEBUG) # Add a delimiter to the log file to delimit the messages of the # current run from those of previous runs. if log_exists: self.logger.info("-" * 40) # Add a logging handler for console output, after logging the delimiter # to the log file (the delimiter is useless on the console). console_handler = coloredlogs.ColoredStreamHandler(show_name=True) self.logger.addHandler(console_handler) # Set the verbosity of the console output. if verbosity >= 2: self.set_log_level(logging.DEBUG) self.logger.debug("Enabling debugging output.") elif verbosity == 1: self.set_log_level(logging.VERBOSE) self.logger.verbose("Enabling verbose output.") else: self.set_log_level(logging.INFO) # Mention the log file on the console after setting the verbosity? self.logger.debug("Logging messages to %s.", log_file)
def logando_server(tipo, mensagem): """ :param tipo: Set the log type/Irá setar o tipo de log. :param mensagem: Set the message of log/Irá setar a mensagem do log. :return: Return the complete log's body/Irá retornar o corpo completo do log. """ logger = logging.getLogger(__name__) coloredlogs.install(level='DEBUG') coloredlogs.install(level='DEBUG', logger=logger) logging.basicConfig(format='%(asctime)s %(hostname)s %(name)s[%(process)d] %(levelname)s %(message)s') logger = verboselogs.VerboseLogger('') if tipo == 'verbose': logger.verbose(mensagem) elif tipo == 'debug': logger.debug(mensagem) elif tipo == 'info': logger.info(mensagem) elif tipo == 'warning': logger.warning(mensagem) elif tipo == 'error': logger.error(mensagem) elif tipo == 'critical': logger.critical(mensagem) else: pass
def set_colored_log_level(): global logger coloredlogs.install(fmt=conf.LOG_FORMAT_DEBUG, datefmt="%m%d %H:%M:%S", level=verboselogs.SPAM) logging.basicConfig(format=conf.LOG_FORMAT_DEBUG, level=verboselogs.SPAM) logger = verboselogs.VerboseLogger("dev")
def get_verbose_logger(name: str, log_level: str, fmt: str = LOG_FORMAT) -> verboselogs.VerboseLogger: """Return VerboseLogger that has extra log_levels. Possible log levels: - CRITICAL 50 - ERROR 40 - SUCCESS 35 - WARNING 30 - NOTICE 25 - INFO 20 - VERBOSE 15 - DEBUG 10 - SPAM 5 - NOTSET 0 """ logger = verboselogs.VerboseLogger(name) stream_handler = logging.StreamHandler() if fmt: formatter = logging.Formatter(fmt) stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) logger.setLevel(log_level) return logger
def main( sass_file, images_dir, css_file, config_file, skip_css_validation, ): config = Config(config_file) config.setup_logging() logger = verboselogs.VerboseLogger("validate") try: logger.info(f"Validating Stylesheet.") data = StylesheetData( subreddit_name=None, css_file=css_file, data_page_name=None, revision_comment=None, ) assets = StylesheetAssets(config, css_file) builder = StylesheetAssetsBuilder(config, assets, data) logger.info("Building CSS:") builder.build_css(sass_file) logger.verbose(f"Compressed CSS size: {assets.css_size} bytes") logger.info("Loading local images:") builder.load_local_images(images_dir) logger.verbose(f"Loaded {len(assets.local_images)} images") logger.info("Validating images:") builder.validate_images() logger.verbose("All images are unique and fit the Reddit criteria.") logger.verbose( "All image references in the CSS are matching with image files.") logger.info("Mapping images:") builder.map_images() logger.verbose(f"Mapped {len(data.local_images.new)} images") logger.info("Adapting CSS:") builder.adapt() logger.verbose(f"Adapted CSS size: {assets.adapted_css_size} bytes") logger.info("Validating CSS:") builder.validate_css(skip_css_validation) logger.verbose("CSS has been validated") logger.info("Validation completed successfully.") except StylesheetException as error: show_traceback = logger.getEffectiveLevel() == logging.DEBUG logger.error(f"Validation procedure failed: {error}", exc_info=show_traceback) exit(1) except Exception as error: logger.error(f"An unexpected error has occurred.", exc_info=True) exit(1)
def getLogger(name: str = "__main__") -> verboselogs.VerboseLogger: """Return a verbose logger for a module It is an alias for verboselogs.VerboseLogger. Can be used as the logging.getLogger() method. Extends built-in logger with levels: verbose, spam """ return verboselogs.VerboseLogger(name)
def get_logger(target=None) -> verboselogs.VerboseLogger: """return a verbose logger auto-registered under a root""" if target is None: rooted_name = DEFAULT_ROOT else: name = target if isinstance(target, str) else target.__class__.__name__ rooted_name = '%s.%s' % (DEFAULT_ROOT, name) return verboselogs.VerboseLogger(rooted_name)
def log(level): """Log verbose setter message""" # set logger level from parent class logger = verboselogs.VerboseLogger('') # add the handlers to the logger logger.setLevel(getattr(logging, level)) return logger
def set_log_level(self, level: Type[int]): verboselogs.install() global logger logger = verboselogs.VerboseLogger("") logger.addHandler(logging.StreamHandler()) logger.setLevel(level) coloredlogs.install(level=level, logger=logger) for handler in logger.handlers: handler.setLevel(level)
def __init__(self, xl_processor, twinsoft_tag_export_file, write_xml_file): self.xl_processor = xl_processor self.__logger = verboselogs.VerboseLogger(__name__) self.__twinsoft_tag_export_file = twinsoft_tag_export_file self.__twinsoft_tags_df = None self.__xl_memory_map_df = None self.__xl_tags_df = None self.__xl_template_df = None self.__write_xml_file = write_xml_file self.__to_export_df = None
def check_custom_level(self, name): """Check a custom log method.""" logger = verboselogs.VerboseLogger(random_string()) # Gotcha: If we use NOTSET (0) here the level will be inherited from # the parent logger and our custom log level may be filtered out. logger.setLevel(1) logger._log = mock.MagicMock() level = getattr(verboselogs, name.upper()) method = getattr(logger, name.lower()) message = "Any random message" method(message) logger._log.assert_called_with(level, message, ())
def __init__(self, player1, player2, level=logging.VERBOSE): self.player1 = player1 self.player2 = player2 self.state = State([], [], [player1.money_left, player2.money_left]) self.deck = Deck() self.player1.reset_money() self.player2.reset_money() # Setting Verbose level self.logger = verboselogs.VerboseLogger('verbose-demo') self.logger.addHandler(logging.StreamHandler()) self.logger.setLevel(level)
def init_logger(self, level: str, name: str) -> logging.Logger: """ Init application logger """ handler = logging.StreamHandler() handler.setFormatter( logging.Formatter("%(asctime)s (%(name)s): %(message)s")) self.log = verboselogs.VerboseLogger(name, level) self.log.addHandler(handler) coloredlogs.install(level=level, logger=self.log) return self.log
def get_module_logger( verbosity=lg.SPAM, file=None, # if no file, stdout msg_format="%(asctime)s [%(levelname)-5.5s] %(message)s"): logFormatter = lg.Formatter(msg_format) theloger = vl.VerboseLogger(__name__) if not file: handler = lg.StreamHandler() else: handler = lg.FileHandler(file) handler.setFormatter(logFormatter) theloger.addHandler(handler) theloger.setLevel(level) return theloger
def __init__(self, config, device, verbose, chkpt, tensorboard, multigpu): super(Trainer, self).__init__() self._device = device self._config = config self._config.model.to(self._device) self._verbose = verbose self._logger = verboselogs.VerboseLogger('demo') self._logger.addHandler(logging.StreamHandler()) self._logger.setLevel(logging.INFO) self._chkpt = chkpt self._tensorboard = tensorboard if tensorboard: self._writer = SummaryWriter(log_dir=os.path.join(config.session_dir, ConfigFile.tensorboard_dirname)) self._multigpu = multigpu
def __init__(self, args): """ 1. Bootstrap dependencies a. test gps b. check for SDR 2. Start srsue with config file a. watch srsue for crashes and restart 3. Start watchdog daemon 4. Start web daemon """ self.threads = [] self.subprocs = [] self.debug = args.debug self.disable_gps = args.disable_gps self.disable_wigle = args.disable_wigle self.web_only = args.web_only self.config_fp = 'config.ini' self.config = args.config = configparser.ConfigParser() self.config.read(self.config_fp) self.earfcn_list = [] signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) if args.project_name: self.project_name = args.project_name else: self.project_name = self.config['general']['default_project'] args.project_name = self.config['general']['default_project'] if self.project_name not in self.config: self.config[self.project_name] = {} # GPSD settings self.gpsd_args = { 'host': self.config['gpsd']['host'], 'port': int(self.config['gpsd']['port']) } # Set up logging self.logger = args.logger = verboselogs.VerboseLogger( "crocodile-hunter") fmt = f"\b * %(asctime)s {self.project_name} - %(levelname)s %(message)s" if (self.debug): log_level = "DEBUG" else: log_level = "VERBOSE" coloredlogs.install(level=log_level, fmt=fmt, datefmt='%H:%M:%S') self.watchdog = Watchdog(args)
def __init__(self, subreddit_name): self.subreddit_name = subreddit_name reddit = praw.Reddit( client_id=SubredditDownloader.REDDIT_CLIENT_ID, client_secret=SubredditDownloader.REDDIT_CLIENT_SECRET, user_agent="saveddit (by /u/p_ranav)", ) self.subreddit = reddit.subreddit(subreddit_name) self.logger = verboselogs.VerboseLogger(__name__) level_styles = { 'critical': { 'bold': True, 'color': 'red' }, 'debug': { 'color': 'green' }, 'error': { 'color': 'red' }, 'info': { 'color': 'white' }, 'notice': { 'color': 'magenta' }, 'spam': { 'color': 'white', 'faint': True }, 'success': { 'bold': True, 'color': 'green' }, 'verbose': { 'color': 'blue' }, 'warning': { 'color': 'yellow' } } coloredlogs.install(level='SPAM', logger=self.logger, fmt='%(message)s', level_styles=level_styles) self.indent_1 = "" self.indent_2 = ""
def test_custom_methods(self): """ Test logging functions. Test :func:`~verboselogs.VerboseLogger.verbose()`, :func:`~verboselogs.VerboseLogger.notice()`, and :func:`~verboselogs.VerboseLogger.spam()`. """ for name in 'notice', 'verbose', 'spam': logger = verboselogs.VerboseLogger(random_string()) logger._log = mock.MagicMock() level = getattr(verboselogs, name.upper()) method = getattr(logger, name.lower()) message = "Any random message" method(message) logger._log.assert_called_with(level, message, ())
def setup_logging(): logging.getLogger().setLevel(LOGGING_LEVEL) verboselogs.install() coloredlogs.install(level=logging.SPAM, fmt='%(asctime)s [%(levelname)s] %(name)s: %(message)s') log = verboselogs.VerboseLogger(__name__) # root = logging.getLogger() # fmt = logging.Formatter(fmt='{asctime} [{levelname}] {name}: {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{') # #logging.basicConfig(level=LOGGING_LEVEL, # # format='{asctime} [{levelname}] {name}: {message}', datefmt='%Y-%m-%d %H:%M:%S', style='{') # stream = logging.StreamHandler() # stream.setFormatter(fmt) # root.addHandler(stream) logging.getLogger('uvicorn').setLevel(logging.INFO) return log
def log(level): """Log verbose setter message""" try: import logging import verboselogs except ImportError: sys.exit("""You need logging , verboselogs! install it from http://pypi.python.org/pypi or run pip install logging verboselogs""") # set logger level from parent class logger = verboselogs.VerboseLogger('') # add the handlers to the logger logger.setLevel(getattr(logging, level)) return logger
def setup_log(stream_log_level=logging.INFO): global logger ts = datetime.today().strftime('%Y%m%dT%H%M') logging.basicConfig( filename=f'run_{ts}.log', # format='%(asctime)s %(caller)-25s %(levelname)-8s %(message)s', format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filemode='w', level=logging.VERBOSE) console = logging.StreamHandler() console.setLevel(stream_log_level) f = logging.Formatter('%(levelname)-8s %(message)s') console.setFormatter(f) logger = verboselogs.VerboseLogger('') logger.addHandler(logging.getLogger('')) # logger = logging.getLogger('') if len(logger.handlers) == 1: logger.addHandler(console) else: logger.handlers[1] = console
def __init__(self): arguments = docopt(__doc__, version=self.VERSION) self.id = arguments.get('--id') self.name = self.NAME self.version = self.VERSION self.description = self.DESCRIPTION self.debug = arguments.get('--debug') self.verbose = arguments.get('--verbose') self.logger = verboselogs.VerboseLogger(__name__) settings.set('service.id', self.id) settings.set('service.version', self.VERSION) settings.set('service.description', self.DESCRIPTION) settings.set('debug.active', self.debug) settings.set('debug.verbose', self.verbose) signal.signal(signal.SIGINT, self.stop) signal.signal(signal.SIGTERM, self.stop) self.logger.level = logging.INFO self.logger.level = verboselogs.VERBOSE if self.verbose else self.logger.level self.logger.level = logging.DEBUG if self.debug else self.logger.level
def logger(): """ Function setting options and return logger object """ LoggerOptions._SESSION_ID = LoggerOptions.generate_session_id() logfile = open(LoggerOptions._LOGFILE, "a") handler = logging.StreamHandler(stream=logfile) handler.setFormatter( logging.Formatter( '%(asctime)s [%(session_id)s] [%(component)s] %(action)s' ' [%(levelname)s] %(message)s', "%Y-%m-%d %H:%M:%S")) log = verboselogs.VerboseLogger(__name__) log.addFilter(_Commmon_filter()) log.addHandler(handler) if LoggerOptions.DEBUG: coloredlogs.install( fmt='%(asctime)s [%(session_id)s] [%(component)s] %(action)s ' '[%(levelname)s] %(message)s', logger=log, level="debug") else: coloredlogs.install( fmt='%(asctime)s [%(session_id)s] [%(component)s] %(action)s' ' [%(levelname)s] %(message)s', logger=log) return log
import verboselogs from triage.database_reflection import table_has_data, table_row_count, table_exists, table_has_duplicates logger = verboselogs.VerboseLogger(__name__) DEFAULT_ACTIVE_STATE = "active" class EntityDateTableGenerator: """Create a table containing state membership on different dates The structure of the output table is: entity_id date active (boolean): Whether or not the entity is considered 'active' (i.e., in the cohort or subset) on that date Args: db_engine (sqlalchemy.engine) experiment_hash (string) unique identifier for the experiment query (string) SQL query string to select entities for a given as_of_date The as_of_date should be parameterized with brackets: {as_of_date} replace (boolean) Whether or not to overwrite old rows. If false, each as-of-date will query to see if there are existing rows and not run the query if so. If true, the existing table will be dropped and recreated. """ def __init__(self, query, db_engine, entity_date_table_name,
import subprocess import logging, verboselogs LOG = verboselogs.VerboseLogger('clicommand') LOG.addHandler(logging.StreamHandler()) LOG.setLevel(logging.INFO) class CLICommand(object): def __init__(self, dry_run = 1): self._dry_run = dry_run LOG.verbose('Dry-run init %s' % self._dry_run) def run_command(self, command, dry_run = None): if dry_run is None: dry_run = self._dry_run output = None LOG.info('') LOG.info('******************** Run command ********************') LOG.info(command) # If dry-run option, no execute command if not dry_run: p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) output, error = p.communicate() if p.returncode != 0: LOG.warning('---------- ERROR ----------') if p.returncode == 143: raise ValueError('Timeout %ss' % self._context.opt['--timeout']) else:
from src.qmt.generator import Generator from src.qmt.parser import Parser from src.qmt.timer import Timer import dill import numpy as np import multiprocessing from pathos.multiprocessing import ProcessingPool as Pool import coloredlogs, verboselogs import copy import matplotlib.pyplot as plt # create logger coloredlogs.install(level='INFO') logger = verboselogs.VerboseLogger('qmt::collector ') import subprocess import h5py import progressbar import os bar = progressbar.ProgressBar() files = subprocess.check_output('find . -name ga.dill', shell=True).split() all_structures = [] h5_file = h5py.File('binary_images.h5') h5_file.create_dataset('images', shape=(100000, 128, 128, 1)) h5_file.create_dataset('k_prime_purity', shape=(100000, 1)) h5_file.create_dataset('k_purity', shape=(100000, 1)) h5_file.create_dataset('total_current', shape=(100000, 1))