def _set_level(self, level): if isinstance(level, bool): if level is True: level = "INFO" if level is False: level = "ERROR" assert level in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] logging_level = getattr(colorlog.logging.logging, level) colorlog.getLogger(self.name).setLevel(level)
def _load_config(self): if self.config is None: config_file = "config.json" # If config file exists, load variables from json load = {} if os.path.isfile(config_file): with open(config_file) as data: load.update(json.load(data)) defaults = load.get('defaults', {}) config = load.get('accounts', [])[self.cli_args['config_index']] if self.cli_args['debug'] or config.get('debug', False): colorlog.getLogger("requests").setLevel(logging.DEBUG) colorlog.getLogger("pgoapi").setLevel(logging.DEBUG) colorlog.getLogger("poketrainer").setLevel(logging.DEBUG) colorlog.getLogger("rpc_api").setLevel(logging.DEBUG) if config.get('auth_service', '') not in ['ptc', 'google']: self.log.error("Invalid Auth service specified for account %s! ('ptc' or 'google')", config.get('username', 'NA')) return False # merge account section with defaults self.config = Config(dict_merge(defaults, config), self.cli_args) return True
def init_logger(log_file=None, out=True): import colorlog logger = colorlog.getLogger('integron_finder') logging = colorlog.logging.logging if out: stdout_handler = colorlog.StreamHandler(sys.stdout) stdout_formatter = colorlog.ColoredFormatter("%(log_color)s%(levelname)-8s : %(reset)s %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'bold_red', }, secondary_log_colors={}, style='%' ) stdout_handler.setFormatter(stdout_formatter) logger.addHandler(stdout_handler) else: null_handler = logging.NullHandler() logger.addHandler(null_handler) if log_file: file_handler = logging.FileHandler(log_file) file_formatter = logging.Formatter("%(levelname)-8s : %(message)s") file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) logger.setLevel(logging.WARNING)
def main(args=None, log_level=None): """ main entry point to integron_split :param str args: the arguments passed on the command line :param log_level: the output verbosity :type log_level: a positive int or a string among 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' """ global _log args = sys.argv[1:] if args is None else args parsed_args = parse_args(args) integron_finder.init_logger(log_file=os.path.join(parsed_args.outdir, 'integron_split.out'), out=not parsed_args.mute) _log = colorlog.getLogger('integron_finder.split') if not log_level: # logs are specify from args options logger_set_level(utils.log_level(parsed_args.verbose, parsed_args.quiet)) else: # used by unit tests to mute or unmute logs logger_set_level(log_level) chunk_names = split(parsed_args.replicon, chunk=parsed_args.chunk, outdir=parsed_args.outdir) print(' '.join(chunk_names))
def excise(args, parser): logger = colorlog.getLogger('tacl') tokenizer = utils.get_tokenizer(args) corpus = tacl.Corpus(args.corpus, tokenizer) with open(args.ngrams) as fh: ngrams = [line.strip() for line in fh.readlines()] # It is no issue if the output directory already exists; it is a # reasonable use case to create an excised corpus from multiple # excise operations. try: os.mkdir(args.output) except FileExistsError: pass for work in args.works: # It is worth warning about writing in existing work # directories, however, since that might be unintended. Do not # prevent this, however, since it is a reasonable use case. try: os.mkdir(os.path.join(args.output, work)) except FileExistsError: logger.warning(constants.EXCISE_OVERWRITE_WORK_WARNING, work) for witness in corpus.get_witnesses(work): path = os.path.join(args.output, witness.get_filename()) content = witness.excise(ngrams, args.replacement) with open(path, 'w') as fh: fh.write(content)
def catch_log(self): logger = colorlog.getLogger('integron_finder') handlers_ori = logger.handlers fake_handler = colorlog.StreamHandler(StringIO()) try: logger.handlers = [fake_handler] yield LoggerWrapper(logger) finally: logger.handlers = handlers_ori
def test_example(): """Tests the usage example from the README""" import colorlog handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s%(levelname)s:%(name)s:%(message)s')) logger = colorlog.getLogger('example') logger.addHandler(handler)
def main(): parser = generate_parser() args = parser.parse_args() logger = colorlog.getLogger("tacl") if hasattr(args, "verbose"): utils.configure_logging(args.verbose, logger) if hasattr(args, "func"): args.func(args, parser) else: parser.print_help()
def main(): parser = generate_parser() args = parser.parse_args() logger = colorlog.getLogger('tacl') if hasattr(args, 'verbose'): utils.configure_logging(args.verbose, logger) if hasattr(args, 'func'): try: args.func(args, parser) except TACLError as err: parser.error(err) else: parser.print_help()
def logger_set_level(level='WARNING'): # default value must be a string # cannot be colorlog.logging.logging.WARNING for instance # because setup import __init__ to get __version__ # so logger_set_level is defined # if level is colorlog.logging.logging.WARNING # that mean that colorlog must be already installed # otherwise an error occured during pip install # NameError: name 'colorlog' is not defined import colorlog levels = {'NOTSET': colorlog.logging.logging.NOTSET, 'DEBUG': colorlog.logging.logging.DEBUG, 'INFO': colorlog.logging.logging.INFO, 'WARNING': colorlog.logging.logging.WARNING, 'ERROR': colorlog.logging.logging.ERROR, 'CRITICAL': colorlog.logging.logging.CRITICAL, } if level in levels: level = levels[level] elif not isinstance(level, int): raise IntegronError("Level must be {} or a positive integer") elif level < 0: raise IntegronError("Level must be {} or a positive integer") logger = colorlog.getLogger('integron_finder') if level <= colorlog.logging.logging.DEBUG: stdout_formatter = colorlog.ColoredFormatter( "%(log_color)s%(levelname)-8s : %(module)s: L %(lineno)d :%(reset)s %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'bold_red', }, secondary_log_colors={}, style='%' ) stdout_handler = logger.handlers[0] stdout_handler.setFormatter(stdout_formatter) logging = colorlog.logging.logging file_formatter = logging.Formatter("%(levelname)-8s : %(module)s: L %(lineno)d : %(message)s") file_handler = logger.handlers[1] file_handler.setFormatter(file_formatter) logger.setLevel(level)
def _get_level(self): level = colorlog.getLogger(self.name).level if level == 10: return "DEBUG" elif level == 20: return "INFO" elif level == 30: return "WARNING" elif level == 40: return "ERROR" elif level == 50: return "CRITICAL" else: return level
def create_logger(name, color='reset', log_level=logging.INFO, log_colors={ 'DEBUG': 'white', 'INFO': 'white', 'WARNING': 'red', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }): handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(asctime)s %(' + color + ')s[%(module)10s] %(log_color)s[%(levelname)5s] %(' + color + ')s%(message)s', log_colors=log_colors)) log = colorlog.getLogger(name) log.propagate = False log.addHandler(handler) log.setLevel(log_level) return log
def __init__(self, name, log_level=10, filepath=None, console=False): # Sets up a new logger, and sets it to Null by default self.name = name self.formatter_default = self.get_formatter_plainmsg() self.handler_null = logging.NullHandler() if console and console_colours: self.logger = colorlog.getLogger(name) else: self.logger = logging.getLogger(name) self.logger.addHandler(self.handler_null) self.set_loglevel(log_level) # self.filepath = filepath # else: # self.set_logger_console() if self.debug_this_class: print("New log controller intiated; name: ", self.name, " level: ", log_level)
def __init__(self, name="root", level="WARNING"): self.name = name formatter = colorlog.ColoredFormatter( "%(log_color)s%(levelname)-8s[%(name)s]: %(reset)s %(blue)s%(message)s", datefmt=None, reset=True, log_colors=colors, secondary_log_colors={}, style='%' ) handler = colorlog.StreamHandler() handler.setFormatter(formatter) logger = colorlog.getLogger(self.name) if len(logger.handlers) == 0: logger.addHandler(handler) self._set_level(level)
def main(): # Use argparse to supply 'origin' and 'destination' of the car to rename parser = argparse.ArgumentParser(description='Compares files in .src/panos/ and .src/masks/ to make sure each car has its corresponding mask file.') # parser.add_argument('-o', '--origin', action=readable_dir, dest='origin', help='Name of the car to be renamed') # parser.add_argument('-d', '--destination', dest='destination', help='New name') args = parser.parse_args() handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s%(levelname)s:%(message)s')) logger = colorlog.getLogger() logger.addHandler(handler) logger.setLevel(level=logging.INFO) logger.info("Started") importFolder = '//media//e//virtual_tours//gforces//cars//.src//import' masksFolder = '//media//e//virtual_tours//gforces//cars//.src//masks' allimports = [] imports = glob.glob(importFolder + '/*.jpg') for item in imports: newitem = getbasename(item) allimports.append(newitem) allmasks = [] masks = glob.glob(masksFolder + '/*.psb') for item in masks: newitem = getbasename(item) allmasks.append(newitem) comparation1 = set(allmasks).difference(allimports) comparation2 = set(allimports).difference(allmasks) logger.info('No of items in IMPORT folder -------------> ' + str(len(allimports))) logger.info('No of items in MASKS folder --------------> ' + str(len(allmasks))) for missing1 in comparation1: logger.warn('Items missing in IMPORT folder: ' + missing1 + '.jpg') for missing2 in comparation1: logger.warn('Items missing in MASKS folder: ' + missing2 + '.psb') if not comparation1 and not comparation2: logger.info('All OK!!!') logger.info('EOL')
def main(args=None, log_level=None): """ main entry point to integron_merge :param str args: the arguments passed on the command line :param log_level: the output verbosity :type log_level: a positive int or a string among 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' """ global _log args = sys.argv[1:] if args is None else args parsed_args = parse_args(args) integron_finder.init_logger() _log = colorlog.getLogger('integron_finder.merge') if not log_level: # logs are specify from args options if not log_level: # logs are specify from args options logger_set_level(utils.log_level(parsed_args.verbose, parsed_args.quiet)) else: # used by unit tests to mute or unmute logs logger_set_level(log_level) outdir = os.path.realpath(parsed_args.outdir) if os.path.exists(outdir): if not os.path.isdir(outdir): msg = "'{}' already exists and is not a directory".format(outdir) _log.critical(msg) raise IOError(msg) else: os.makedirs(parsed_args.outdir) integron_file_out = os.path.join(outdir, parsed_args.outfile + ".integrons") merge_integrons(integron_file_out, *parsed_args.results) summary_file_out = os.path.join(outdir, parsed_args.outfile + ".summary") merge_summary(summary_file_out, *parsed_args.results) copy_file(outdir, '.gbk', *parsed_args.results) copy_file(outdir, '.pdf', *parsed_args.results) copy_dir(outdir, 'tmp_*', *parsed_args.results)
def main(): # Add description parser = argparse.ArgumentParser( description="Deletes the specified virtual tour folder and the panoramas in the .SRC/PANOS folder" ) parser.add_argument("-c", "--carname", action=readable_dir, dest="carname", help="Car name to be deleted") args = parser.parse_args() handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter("%(log_color)s%(levelname)s:%(message)s")) logger = colorlog.getLogger() logger.addHandler(handler) logger.setLevel(level=logging.INFO) name1 = args.carname logger.info("Started") message = "Are you sure you want to delete: " + name1 + "?" if query_yes_no(message) == True: webvr = "yes" else: webvr = "no" tourpath = "//media//e//virtual_tours//gforces//cars//" + name1 panospath = "//media//e//virtual_tours//gforces//cars//.src//panos//" + name1 # print(tourpath) # print(panospath) if os.path.exists(tourpath): shutil.rmtree(tourpath) logger.info(tourpath + " removed") if os.path.exists(panospath): shutil.rmtree(panospath) logger.info(panospath + " removed") logger.info("EOL")
def logger(name): return colorlog.getLogger(name)
def main(args=None, loglevel=None): """ main entry point to integron_finder :param str args: the arguments passed on the command line :param loglevel: the output verbosity :type loglevel: a positive int or a string among 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' """ global _log args = sys.argv[1:] if args is None else args config = parse_args(args) ################################### # Prepare directories for results # ################################### # need to create directory before to init logger # as we write log in integron_finder.out in this dir if not os.path.exists(config.outdir): os.mkdir(config.outdir) else: if not os.path.isdir(config.outdir): msg = "outdir '{}' already exists and is not a directory".format(config.outdir) # _log.critical(msg) # we can not log it because logger are not initialized yet. raise IsADirectoryError(msg) if not os.path.exists(config.result_dir): os.mkdir(config.result_dir) else: if not os.path.isdir(config.result_dir): msg = "result dir '{}' already exists and is not a directory".format(config.outdir) # _log.critical(msg) # we can not log it because logger are not initialized yet. raise IsADirectoryError(msg) elif not os.access(config.result_dir, os.W_OK): msg = "result dir '{}' already exists and is not writable".format(config.outdir) # _log.critical(msg) # we can not log it because logger are not initialized yet. raise PermissionError(msg) #################### # init the loggers # #################### log_file = os.path.join(config.result_dir, 'integron_finder.out') integron_finder.init_logger(log_file=log_file, out=not config.mute) _log = colorlog.getLogger('integron_finder') if not loglevel: # logs are specify from args options logger_set_level(config.log_level) else: # used by unit tests to mute or unmute logs logger_set_level(loglevel) ####################################### # do last config check before running # ####################################### if config.cmsearch is None: msg = """cannot find 'cmsearch' in PATH. Please install infernal package or setup 'cmsearch' binary path with --cmsearch option""" _log.critical(msg) raise RuntimeError(msg) if config.hmmsearch is None: msg = """cannot find 'hmmsearch' in PATH. Please install hmmer package or setup 'hmmsearch' binary path with --hmmsearch option""" _log.critical(msg) raise RuntimeError(msg) if config.prodigal is None: msg = """cannot find 'prodigal' in PATH. Please install prodigal package or setup 'prodigal' binary path with --prodigal option""" _log.critical(msg) raise RuntimeError(msg) ################ # print Header # ################ log_header = colorlog.getLogger('integron_finder.header') logging = colorlog.logging.logging handlers = [] header_log_file = logging.FileHandler(log_file) handlers.append(header_log_file) if not config.mute: header_stream = colorlog.StreamHandler(sys.stdout) handlers.append(header_stream) formatter = colorlog.ColoredFormatter("%(message)s") for h in handlers: h.setFormatter(formatter) log_header.addHandler(h) log_header.setLevel(colorlog.logging.logging.INFO) log_header.propagate = False log_header.info(header(args)) with utils.FastaIterator(config.input_seq_path, dist_threshold=config.distance_threshold) as sequences_db: ################ # set topology # ################ default_topology = 'circ' if len(sequences_db) == 1 else 'lin' if config.linear: default_topology = 'lin' elif config.circular: default_topology = 'circ' # the both options are mutually exclusive topologies = Topology(default_topology, topology_file=config.topology_file) # allow sequences_db to inject topology information # in seq.topology attribute sequences_db.topologies = topologies ############## # do the job # ############## sequences_db_len = len(sequences_db) all_integrons = [] all_summaries = [] for rep_no, replicon in enumerate(sequences_db, 1): # if replicon contains illegal characters # or replicon is too short < 50 bp # then replicon is None if replicon is not None: _log.info("############ Processing replicon {} ({}/{}) ############\n".format(replicon.id, rep_no, sequences_db_len)) integron_res, summary = find_integron_in_one_replicon(replicon, config) if integron_res: all_integrons.append(integron_res) if summary: all_summaries.append(summary) else: _log.warning("############ Skipping replicon {}/{} ############".format(rep_no, sequences_db_len)) if not config.split_results: _log.info("Merging integrons results.\n") agg_integrons = results.merge_results(*all_integrons) agg_summary = results.merge_results(*all_summaries) outfile_base_name = os.path.join(config.result_dir, utils.get_name_from_path(config.input_seq_path)) merged_integron_file = outfile_base_name + ".integrons" if not agg_integrons.empty: agg_integrons.to_csv(merged_integron_file, sep="\t", index=False, na_rep="NA") else: with open(merged_integron_file, "w") as out_f: out_f.write("# No Integron found\n") merged_summary_file = outfile_base_name + ".summary" if not agg_integrons.empty: agg_summary.to_csv(merged_summary_file, sep="\t", index=False, na_rep="NA", columns=['ID_replicon', 'ID_integron', 'complete', 'In0', 'CALIN']) for _file in all_integrons + all_summaries: if _file != merged_integron_file and _file != merged_summary_file: # in special case where the merged file has the same name that a replicon result file os.unlink(_file)
import logging import colorlog import argparse from colorama import init from termcolor import cprint from datetime import datetime from pyfiglet import figlet_format init(autoreset=True, strip=not sys.stdout.isatty()) # logging handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s[%(name)s] \u2192 %(message)s', datefmt="%d/%m/%Y")) logger = colorlog.getLogger("ZEUS") logger.addHandler(handler) logger.setLevel(logging.INFO) class CreateFolderException(Exception): """Create Folder Module.""" pass class DuplicateModuleException(Exception): """Duplicate Module Name.""" pass
from core.input.sp_group import SPGroupAPI from resin import Resin PERSISTENCE = '/mnt/data/tobalaba/' tty_handler = colorlog.StreamHandler() tty_handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(message)s')) if not os.path.exists(PERSISTENCE): os.makedirs(PERSISTENCE) file_handler = logging.FileHandler(PERSISTENCE + 'bond.log') formatter = logging.Formatter('%(asctime)s [%(levelname)s]%(message)s') file_handler.setFormatter(formatter) # Default color scheme is 'example' logger = colorlog.getLogger('example') logger.addHandler(tty_handler) logger.addHandler(file_handler) logger.setLevel(logging.DEBUG) error_log = logging.getLogger() error_file_handler = logging.FileHandler(PERSISTENCE + 'error.log') error_file_handler.setFormatter(formatter) error_log.addHandler(error_file_handler) error_log.setLevel(logging.DEBUG) class AsyncClientError(EnvironmentError): pass
def main(args): logging.basicConfig(level=logging.INFO) handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter( "%(log_color)s%(levelname)s:%(name)s:%(message)s")) logger = colorlog.getLogger("example") logger.addHandler(handler) ckpt = torch.load(args.checkpoint, map_location=lambda storage, loc: storage) # Publicly released checkpoints use dicts for longevity, so we need to wrap them # up in an OmegaConf object as this is what EpicActionRecognitionSystem expects. cfg = OmegaConf.create(ckpt["hyper_parameters"]) OmegaConf.set_struct(cfg, False) # allow writing arbitrary keys without raising # exceptions cfg.data._root_gulp_dir = os.getcwd( ) # set default root gulp dir to prevent # exceptions on instantiating the EpicActionRecognitionSystem system = EpicActionRecognitionSystem(cfg) system.load_state_dict(ckpt["state_dict"]) if not cfg.get("log_graph", True): # MTRN can't be traced due to the model stochasticity so causes a JIT tracer # error, we allow you to prevent the tracer from running to log the graph when # the summary writer is created try: delattr(system, "example_input_array") except AttributeError: pass if args.n_frames is not None: cfg.data.test_frame_count = args.n_frames if args.batch_size is not None: cfg.learning.batch_size = args.batch_size if args.datadir is not None: data_dir_key = f"{args.split}_gulp_dir" cfg.data[data_dir_key] = args.datadir # Since we don't support writing results when using DP or DDP LOG.info("Disabling DP/DDP") cfg.trainer.accelerator = None n_gpus = 1 LOG.info(f"Overwriting number of GPUs to {n_gpus}") cfg.trainer.gpus = n_gpus cfg["test.results_path"] = str(args.results) data_module = EpicActionRecogintionDataModule(cfg) if args.split == "val": dataloader = data_module.val_dataloader() elif args.split == "test": dataloader = data_module.test_dataloader() else: raise ValueError( f"Split {args.split!r} is not a recognised dataset split to " f"test on.") saver = ResultsSaver() trainer = Trainer(**cfg.trainer, callbacks=[saver]) trainer.test(system, test_dataloaders=dataloader) saver.save_results("test", args.results)
PROJECT_NAME = "pygraph" LEVEL = "INFO" try: import colorlog handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter( "%(log_color)s%(levelname)-8s: %(message)-60s[%(module)s:%(lineno)d]" ) ) logging = colorlog.getLogger(PROJECT_NAME) logging.setLevel(LEVEL) logging.addHandler(handler) except ModuleNotFoundError: import logging logging.basicConfig(format="%(levelname)-8s: %(message)-60s[%(module)s:%(lineno)d]") logging = logging.getLogger(PROJECT_NAME) logging.setLevel(LEVEL)
action='store_true', default=False, help='Flag, Actually modify database') PARSER.add_argument('--verbose', dest='VERBOSE', action='store_true', default=False, help='Flag, Chatty') PARSER.add_argument('--debug', dest='DEBUG', action='store_true', default=False, help='Flag, Very chatty') ARG = PARSER.parse_args() LOGGER = colorlog.getLogger() if ARG.DEBUG: LOGGER.setLevel(colorlog.colorlog.logging.DEBUG) elif ARG.VERBOSE: LOGGER.setLevel(colorlog.colorlog.logging.INFO) else: LOGGER.setLevel(colorlog.colorlog.logging.WARNING) HANDLER = colorlog.StreamHandler() HANDLER.setFormatter(colorlog.ColoredFormatter()) LOGGER.addHandler(HANDLER) initialize_program() update_flyboy() print("Duplicate Robot IDs in StockFinder: %d" % COUNT['robot']) print("Associated KPIDs: %d" % COUNT['kp']) print("Deleted KPIDs: %d" % COUNT['delete'])
def init(loop, argv): parser = argparse.ArgumentParser( description="Simple processing server v 0.0.1") group = parser.add_mutually_exclusive_group() group.add_argument("-c", "--config", help="config file", type=str, nargs=1, metavar=('PATH', )) group.add_argument("-l", "--log", help="log file", type=str, nargs=1, metavar=('PATH', )) parser.add_argument("-v", "--verbose", help="increase output verbosity", action="count", default=0) parser.add_argument("--json", help="json formatted logs", action='store_true') args = parser.parse_args() config_file = "simple-processing.cnf" log_file = "somple-processing.log" log_level = logging.WARNING logger = colorlog.getLogger('sp') if args.config is not None: config_file = args.config config = configparser.ConfigParser() config.read(config_file) if args.log is None: if "LOG" in config.sections(): if "log_file" in config['LOG']: log_file = config['LOG']["log_file"] if "log_level" in config['LOG']: if config['LOG']["log_level"] == "info": log_level = logging.INFO elif config['LOG']["log_level"] == "info": log_level = logging.INFO elif config['LOG']["log_level"] == "debug": log_level = logging.DEBUG else: log_file = args.log if args.verbose == 0: log_level = logging.WARNING elif args.verbose == 1: log_level = logging.INFO elif args.verbose > 1: log_level = logging.DEBUG if args.verbose > 3: connector_debug = True if args.verbose > 4: connector_debug = True connector_debug_full = True if log_level == logging.WARNING and "LOG" in config.sections(): if "log_level" in config['LOG']: if config['LOG']["log_level"] == "info": log_level = logging.INFO elif config['LOG']["log_level"] == "debug": log_level = logging.DEBUG if args.json: logger = logging.getLogger() logHandler = logging.StreamHandler() formatter = jsonlogger.JsonFormatter( '%(created)s %(asctime)s %(levelname)s %(message)s %(module)s %(lineno)d)' ) logHandler.setFormatter(formatter) logger.addHandler(logHandler) logger.setLevel(log_level) else: logger.setLevel(log_level) logger.debug("test") fh = logging.FileHandler(log_file) fh.setLevel(log_level) ch = logging.StreamHandler() ch.setLevel(log_level) formatter = colorlog.ColoredFormatter( '%(log_color)s%(asctime)s %(levelname)s: %(message)s (%(module)s:%(lineno)d)' ) fh.setFormatter(formatter) ch.setFormatter(formatter) logger.addHandler(fh) logger.addHandler(ch) try: config["BITCOIND"]["zeromq"] config["BITCOIND"]["rpc"] except Exception as err: print(traceback.format_exc()) logger.critical("Bitcoind config failed: %s" % err) logger.critical("Shutdown") sys.exit(0) logger.setLevel(log_level) logger.info("Start") loop = asyncio.get_event_loop() app = App(loop, logger, config) return app
import colorlog import io import json import os.path from . import JanusSink logger = colorlog.getLogger('Janus.januslib.filesinks') class JanusFileSink(JanusSink): def __init__(self, cachepath, output): super().__init__(output) self.cachepath = cachepath def __str__(self): 'return pretty name' return '>>>File({})'.format(self.id, self.cachepath) def push(self, post): if not os.path.exists(self.cachepath): os.makedirs(self.cachepath) with io.open('{}/{}.json'.format(self.cachepath, post['id']), 'wb') as f: f.write(json.dumps(post).encode()) def finished(self): pass class JanusCSVSink(JanusSink):
""" Currently, openai gym offers a great range of environments and we try to test all kinds of them (not all), however, "robotics" and "mujoco" requires a license and we cannot test them, but theoretically they should work just fine. Submit us a issue if you have found any problem. """ from machin.env.wrappers import openai_gym from random import choice, sample from colorlog import getLogger import pytest import gym import numpy as np logger = getLogger("default") ENV_NUM = 2 SAMPLE_NUM = 2 WORKER_NUM = 2 def mock_action(action_space: gym.spaces.Space): return action_space.sample() def prepare_envs(env_list): for env in env_list: env.reset() def should_skip(spec):
import colorlog from clusterman.args import add_cluster_arg from clusterman.args import add_cluster_config_directory_arg from clusterman.args import add_env_config_path_arg from clusterman.args import add_pool_arg from clusterman.args import add_scheduler_arg from clusterman.autoscaler.autoscaler import Autoscaler from clusterman.autoscaler.pool_manager import PoolManager from clusterman.config import setup_config from clusterman.util import setup_logging from clusterman.util import splay_event_time from examples.batch.util import BatchRunningSentinelMixin from examples.batch.util import suppress_request_limit_exceeded logger = colorlog.getLogger(__name__) colorlog.getLogger('clusterman_metrics') # This just adds a handler to the clusterman_metrics logger class AutoscalerBatch(BatchRunningSentinelMixin): def parse_args(self): parser = argparse.ArgumentParser() arg_group = parser.add_argument_group('AutoscalerBatch options') add_cluster_arg(arg_group, required=True) add_pool_arg(arg_group) add_scheduler_arg(arg_group) add_cluster_config_directory_arg(arg_group) add_env_config_path_arg(arg_group) arg_group.add_argument( '--dry-run', default=False,
import colorlog import signal handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter("%(log_color)s%(levelname)s:%(name)s:%(message)s") ) logger = colorlog.getLogger("skplumber") logger.addHandler(handler) logger.setLevel("INFO") class EvaluationTimeoutError(Exception): pass class PipelineRunError(Exception): pass class conditional_timeout: """ Can be used to exit a function if it's taking too long. E.g. for a function `foo`, this can be done: ``` # Will raise `EvaluationTimeoutError` if `foo` takes longer than 5 seconds. with conditional_timeout(5): foo() ```
import pika import logging import colorlog import time import warnings from crawler import Crawler from cleaner import fetch_and_clean_html # filter warnings warnings.filterwarnings("ignore") # setup logging logger = colorlog.getLogger('NLPCrawl') logger.setLevel(logging.DEBUG) ch = colorlog.StreamHandler() formatter_ch = colorlog.ColoredFormatter('%(log_color)s[%(name)s] %(message)s') ch.setFormatter(formatter_ch) logger.addHandler(ch) # RabbitMQ settigns RABBITMQ_HOST = 'rabbitmq' # Queue Settings HTML_QUEUE = 'HTML_QUEUE' DOC_QUEUE = 'DOC_QUEUE' FEATURE_QUEUE = 'FEAT_QUEUE' # Thread Settings NUM_CRAWL_THREAD = 8 NUM_PARSE_THREAD = 1
def test_colorlog_basicConfig(test_logger): colorlog.basicConfig() test_logger(colorlog.getLogger())
"DEBUG": "cyan", "INFO": "green", "WARNING": "yellow", "ERROR": "red", "CRITICAL": "red,bg_white", }, secondary_log_colors={}, style="%", ) handler = colorlog.StreamHandler() handler.setFormatter(formatter) def title_formatter(r): title, *msg = r.msg.split(": ") r.msg = ": ".join(msg) r.name = title return r logger = colorlog.getLogger("test") logger.addHandler(handler) logger.addFilter(title_formatter) def get_logger(): return logger logger.setLevel("DEBUG")
""".. rubric:: Standalone application dedicated to conversion""" import os import argparse import json import sys import colorlog import bioconvert from bioconvert import ConvBase from bioconvert.core import graph from bioconvert.core import utils from bioconvert.core.base import ConvMeta from bioconvert.core.converter import Bioconvert from bioconvert.core.decorators import get_known_dependencies_with_availability from bioconvert.core.registry import Registry _log = colorlog.getLogger(__name__) def main(args=None): registry = Registry() if args is None: args = sys.argv[1:] if not len(sys.argv) == 1: # check that the first argument is not a converter in the registry if args[0].lower() not in list(registry.get_converters_names()) \ and "." in args[0]: in_ext = utils.get_extension(args[0], remove_compression=True)
import logging from colorlog import ColoredFormatter,getLogger #logger = logging.getLogger('__name__') logger = getLogger() logger.setLevel(logging.DEBUG) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.WARN) # create a file handler fh = logging.FileHandler('program.log') fh.setLevel(logging.INFO) # create formatter formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s',datefmt='%m/%d/%Y %I:%M:%S %p') # add formatter to ch ch.setFormatter(formatter) fh.setFormatter(formatter) col_formatter = ColoredFormatter( "%(cyan)s%(asctime)s %(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s", datefmt='%I:%M:%S %p', reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green',
def main(): # Use argparse to supply 'origin' and 'destination' of the car to rename parser = argparse.ArgumentParser( description='Rename a car and all the necessary files in the folders \ import, layers, masks, panos and the virtual tours themselver.') parser.add_argument('-o', '--origin', action=readabledir, dest='origin', help='Name of the car to be renamed') parser.add_argument('-d', '--destination', dest='destination', help='New name') args = parser.parse_args() handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s')) logger = colorlog.getLogger() logger.addHandler(handler) logger.setLevel(level=logging.INFO) logger.info("Started") name1 = args.origin name2 = args.destination if os.name == 'nt': rootdir = os.path.join('E:\\', 'virtual_tours', 'gforces') else: rootdir = os.path.join('/media', 'e', 'virtual_tours', 'gforces') # Folder .src/import/###.jpg path = os.path.join(rootdir, 'cars', '.src', 'import') item1 = name1 + '.jpg' item2 = name2 + '.jpg' rename_item(path, item1, item2) # Folder .src/layers/###.psb path = os.path.join(rootdir, 'cars', '.src', 'layers') item1 = name1 + '.psb' item2 = name2 + '.psb' rename_item(path, item1, item2) # Folder .src/masks/###.psb path = os.path.join(rootdir, 'cars', '.src', 'masks') item1 = name1 + '.psb' item2 = name2 + '.psb' rename_item(path, item1, item2) # Folder .src/panos/### path = os.path.join(rootdir, 'cars', '.src', 'panos') item1 = name1 item2 = name2 rename_item(path, item1, item2) # Folder ./### path = os.path.join(rootdir, 'cars') item1 = name1 item2 = name2 rename_item(path, item1, item2) # Folder /media/e/virtual_tours/gforces/html/list/latest.html filepath = os.path.join(rootdir, 'html', 'list', 'latest.html') str1 = name1 str2 = name2 replace_str(filepath, str1, str2) # File ./###/files/tour.xml filepath = os.path.join(rootdir, 'cars', name2, 'files', 'tour.xml') str1 = name1 str2 = name2 replace_str(filepath, str1, str2) # File ./###/files/content/index.xml filepath = os.path.join(rootdir, 'cars', name2, 'files', 'content', 'index.xml') str1 = name1 str2 = name2 replace_str(filepath, str1, str2) # File ./###/files/scenes/*.xml filepath = os.path.join(rootdir, 'cars', name2, 'files', 'scenes') str1 = name1 str2 = name2 scenesdir = glob.glob(filepath + '\\*.xml') for xmlfile in scenesdir: filepath = xmlfile replace_str(filepath, str1, str2) logger.warning('Open .src/layers/' + name2 + '.psd and manually change the Background Layer name') logger.info('EOL')
def info(self, msg): colorlog.getLogger(self.name).info(msg)
import argparse import logging import sys import time import colorlog from faker import Faker from data_generator.kafka_generator import Kafka from data_generator.mssql_generator import MSSQL from data_generator.mysql_generator import MySQL from data_generator.oracle_generator import Oracle from data_generator.postgresql_generator import PostgreSQL logger = colorlog.getLogger('data-generator') def __init_logger(): handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter( '%(log_color)s%(levelname)s:%(name)s:%(message)s')) logger.addHandler(handler) logger.setLevel(logging.INFO) def main(): __init_logger() parser = argparse.ArgumentParser(description='Incremental Data Generator') parser.add_argument('-H',
def critical(self, msg): colorlog.getLogger(self.name).critical(msg)
def __init__(self, args=None): self.log = colorlog.getLogger(self.__class__.__name__) """ General Stuff """ self.debug = False self.epa_defs = '/resources/epa_defs.json' self.abstract_interaction = '/resources/abstract-interaction.txt' self.input = None self.input_file = None """ Chemical Stuff """ # For classification, how big of filters to use. self.combine = combiner.CombineMethod.NAIVE self.smarts_length = 5 self.filters = False # How to identify a chemical. self.identify = 4 # What level to report things. self.error_level = ct.ReportingLevel.ERROR self.typecheck = combiner.TypeCheckLevel.NAIVE """ Compiler Stuff """ # What is the target? self.target = target.Target.INKWELL self.supports_functions = False self.supports_recursion = False self.supports_nesting = False """ Necessary for identify """ self.db_enabled = False # Database stuff. self.db = { 'name': None, 'pass': None, 'addr': None, 'user': None, 'driver': None } """ Build the config object now. """ # self.log.warning(args) self.debug = args.debug self.path = os.path.dirname(sys.modules['__main__'].__file__) if args.epa_defs: self.epa_defs = args.epa_defs if args.abs_int: self.abstract_interaction = args.abs_int self.input = args.input # Converts: /path/to/bioscript.bs => bioscript self.input_file = args.input.split("/")[-1].split(".")[0] # self.log.info(self.input_file) self.db['name'] = args.dbname self.db['user'] = args.dbuser self.db['pass'] = args.dbpass self.db['addr'] = args.dbaddr self.db['driver'] = args.dbdriver self.smarts_length = args.smarts self.filters = not args.no_filters self.identify = identifier.IdentifyLevel(args.identify) if args.simulate: self.combine = combiner.CombineMethod.SIMULATE else: self.combine = combiner.CombineMethod.NAIVE if args.typechecklevel.lower() == "none": self.error_level = ct.ReportingLevel.NONE elif args.typechecklevel.lower() == "warn": self.error_level = ct.ReportingLevel.WARNING else: self.error_level = ct.ReportingLevel.ERROR if args.typecheck.lower() == "d" or args.typecheck.lower( ) == "disable": self.typecheck = combiner.TypeCheckLevel.DISABLED elif args.typecheck.lower() == "union" or args.typecheck.lower( ) == 'u': self.typecheck = combiner.TypeCheckLevel.UNION else: self.typecheck = combiner.TypeCheckLevel.NAIVE if args.target is not None: """ The support_* flags are defaulted above, so we just toggle the necessary flags when necessary. """ if args.target.lower() == "m" or args.target.lower() == "mfsim": self.target = target.Target.MFSIM self.supports_functions = True self.supports_nesting = True elif args.target.lower() == 'i' or args.target.lower( ) == 'inkwell': self.target = target.Target.INKWELL self.supports_functions = True elif args.target.lower() == "p" or args.target.lower() == "puddle": self.target = target.Target.PUDDLE self.supports_functions = True self.supports_recursion = True self.supports_nesting = True if self.db['name'] and self.db['user'] and self.db['pass']: self.db_enabled = True if not self.db['addr']: self.db['addr'] = 'localhost' if not self.db['driver']: self.db['driver'] = 'mysql'
formatter = ColoredFormatter( "%(log_color)s%(levelname)s%(reset)s %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'blue', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%' ) logger = colorlog.getLogger('tensorprob') logger.setLevel(logging.INFO) handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) from . import config from . import utilities from . import distributions from .distribution import Distribution, DistributionError, Region from .model import Model, ModelError from .parameter import Parameter from .distributions import * from .optimizers import * from .samplers import *
def main(): def check_url(carname, filename): underscores = carname.split('_') brand = underscores[1] server = 'https://s3-eu-west-1.amazonaws.com/autofs/shared/interiors/projects/' url = server + brand + '_manufacturer/' + \ carname + '/files/' + filename + '.xml' request = requests.get(url) if request.status_code == 200: logger.info('[ OK ] ' + filename + '.xml') else: logger.warning('Missing ' + filename + '.xml') parser = argparse.ArgumentParser( description='For each car folder in the current directory, it \ checks which language XML files are in the server.') parser.add_argument( dest='language', type=str, const='all', nargs='?', default='all', help='Enter the name of an specific XML file without the extension') args = parser.parse_args() handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(message)s', log_colors={ 'DEBUG': 'green', 'WARNING': 'red', 'INFO': 'cyan' })) logger = colorlog.getLogger() logger.addHandler(handler) logger.setLevel(level=logging.INFO) logger.info("Started") alltours = [] allfiles = ['ar', 'de', 'en', 'en_us', 'it', 'nl', 'sk'] bad_folders = ['shared'] for tour in os.listdir(os.getcwd()): if os.path.isdir(os.path.join(os.getcwd(), tour)): if not tour.startswith('.'): if not any(bad_folder in tour for bad_folder in bad_folders): alltours.append(tour) alltours.sort(reverse=False) for tour in alltours: carname = os.path.basename(tour) logger.info("Tour: " + carname) if args.language == 'all': for file in allfiles: check_url(carname, file) else: check_url(carname, args.language) logger.info("_EOF_")
import os import sys import cv2 import win32con import win32gui from numpy import sum sys.path.append(os.path.join(os.path.dirname(__file__), "..")) from edcm import windows from edcm import screen import colorlog logger = colorlog.getLogger() logger.info(sys.path) hwnd = win32gui.GetForegroundWindow() windows.get_hwnd_info(hwnd) screen_size = screen.get_elite_size() assert screen_size is not None logger.info("screen_size['left'] = %s, screen_size['top'] = %s, screen_size['width'] = %s, screen_size['height'] = %s", screen_size['left'], screen_size['top'], screen_size['width'], screen_size['height']) viewer = cv2.namedWindow("Test Screen", cv2.WINDOW_NORMAL) windows.set_elite_active_window() while True:
def debug(self, msg): colorlog.getLogger(self.name).debug(msg)
def warning(self, msg): colorlog.getLogger(self.name).warning(msg)
def error(self, msg): colorlog.getLogger(self.name).error(msg)
from _version import __version__ import logging import colorlog handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s[%(levelname)s-%(lineno)d] %(message)s')) # global variable debug = False debug_path = '' quality_list = [] use_ffmpeg = 'False' downloader = 'aria2c' video_quality = '654321' logger = colorlog.getLogger('udemy_dl') logger.addHandler(handler) logger.setLevel(level=logging.INFO) def logging_exception(type_, value, tb): """Catch Exception message.""" logger.exception(''.join(traceback.format_exception(type_, value, tb))) # sys.__excepthook__(type_, value, tb) # Install exception handler sys.excepthook = logging_exception
""" integron_finder is a program that looks for integron in DNA sequences. """ import os import sys import argparse import distutils.spawn import shutil import pandas as pd import integron_finder # must be done after import 'integron_finder' import colorlog _log = colorlog.getLogger('integron_finder') from Bio import SeqIO from integron_finder import IntegronError, logger_set_level from integron_finder import utils from integron_finder import results from integron_finder.topology import Topology from integron_finder.config import Config from integron_finder.hmm import scan_hmm_bank from integron_finder.integrase import find_integrase from integron_finder.attc import find_attc_max from integron_finder.infernal import find_attc from integron_finder.integron import find_integron from integron_finder.annotation import func_annot, add_feature from integron_finder.prot_db import GembaseDB, ProdigalDB
formatter = ColoredFormatter( "%(log_color)s%(levelname)s%(reset)s %(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'blue', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%' ) logger = colorlog.getLogger('tensorprob') logger.setLevel(logging.INFO) handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) from . import config from . import utilities from . import distributions from .distribution import Distribution, DistributionError from .model import Model, ModelError from .parameter import Parameter from .stats import fisher from .distributions import * from .optimizers import * from .samplers import *
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Open KB article on localhost, kb.corp, or JIRA.""" import argparse import sys import webbrowser from os import path, system import colorlog from git import Repo __version__ = "1.0.0" logger = colorlog.getLogger() def parse_keys(keys): """ Parse keys provided by user. Args: keys (str): TSWRITING-### or ### Returns: str: str in `TSWRITING-###` format """ keylist = [] for key in keys: key = key.lower() if key.startswith("tswriting-"):
def main(): """Entry point that parses the argument, and invokes the proper functions.""" parser = argparse.ArgumentParser( description="List and create emulator docker containers ({}).".format( emu.__version__), formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="Set verbose logging") subparsers = parser.add_subparsers() list_parser = subparsers.add_parser( "list", help= "list all the available the publicly available emulators and system images." ) list_parser.add_argument( "--arm", action="store_true", help= "Display arm images. Note that arm images are not hardware accelerated and are *extremely* slow.", ) list_parser.set_defaults(func=list_images) license_parser = subparsers.add_parser( "licenses", help= "Lists all licenses and gives you a chance to accept or reject them.") license_parser.add_argument( "--accept", action="store_true", help="Accept all licensens after displaying them.") license_parser.set_defaults(func=accept_licenses) create_parser = subparsers.add_parser( "create", help="Given an emulator and system image zip file, " "generates a Docker image comprising complete environment in which the Android Emulator runs. " "After the Docker image is started up, interaction with the emulator is made possible via port forwarding and ADB, " "or gRPC and WebRTC.", ) create_parser.add_argument( "emuzip", help= "Zipfile containing the a publicly released emulator, or [canary|stable|all] to use the latest canary or stable, or every release.", ) create_parser.add_argument( "imgzip", help= "Zipfile containing a public system image that should be launched, or a regexp matching the image to retrieve. " "All the matching images will be selected when using a regex. " 'Use the list command to show all available images. For example "P google_apis_playstore x86_64".', ) create_parser.add_argument( "--extra", default="", help="Series of additional commands to pass on to the emulator. " + 'For example "-turncfg \\"curl -s -X POST https://networktraversal.googleapis.com/v1alpha/iceconfig?key=MySec\\""', ) create_parser.add_argument( "--dest", default=os.path.join(os.getcwd(), "src"), help="Destination for the generated docker files") create_parser.add_argument( "--tag", default="", help="Docker tag, defaults to the emulator build id") create_parser.add_argument( "--repo", default="", help="Repo prefix, for example: us.gcr.io/emu-dev/") create_parser.add_argument( "--push", action="store_true", help= "Push the created image to your repository, as marked by the --repo argument.", ) create_parser.add_argument( "--gpu", action="store_true", help="Build an image with gpu drivers, providing hardware acceleration" ) create_parser.add_argument( "--metrics", action="store_true", help= "When enabled, the emulator will send usage metrics to Google when the container exists gracefully.", ) create_parser.add_argument( "--no-metrics", action="store_true", help="Disables the collection of usage metrics.") create_parser.add_argument( "--start", action="store_true", help="Starts the container after creating it. " "All exposed ports are forwarded, and your private adbkey (if available) is injected but not stored.", ) create_parser.set_defaults(func=create_docker_image) create_inter = subparsers.add_parser( "interactive", help= "Interactively select which system image and emulator binary to use when creating a docker container", ) create_inter.add_argument( "--extra", default="", help="Series of additional commands to pass on to the emulator. " 'For example -turncfg \\"curl -s -X POST https://networktraversal.googleapis.com/v1alpha/iceconfig?key=MySec\\"', ) create_inter.add_argument( "--dest", default=os.path.join(os.getcwd(), "src"), help="Destination for the generated docker files") create_inter.add_argument( "--gpu", action="store_true", help="Build an image with gpu drivers, providing hardware acceleration" ) create_inter.add_argument( "--start", action="store_true", help="Starts the container after creating it. " "All exposed ports are forwarded, and your private adbkey (if available) is injected but not stored.", ) create_inter.add_argument( "--arm", action="store_true", help= "Display arm images. Note that arm images are not hardware accelerated and are *extremely* slow.", ) create_inter.set_defaults(func=create_docker_image_interactive) args = parser.parse_args() # Configure logger. lvl = logging.DEBUG if args.verbose else logging.WARNING handler = colorlog.StreamHandler() handler.setFormatter( colorlog.ColoredFormatter("%(log_color)s%(levelname)s:%(message)s")) logging.root = colorlog.getLogger("root") logging.root.addHandler(handler) logging.root.setLevel(lvl) if hasattr(args, "func"): args.func(args) else: parser.print_help()
from __future__ import division import sys import pkg_resources from easydev import CustomConfig __version__ = "1.6.0" try: version = pkg_resources.require("bioservices")[0].version __version__ = version except: version = __version__ import colorlog logger = colorlog.getLogger("bioservices") # Initialise the config directory if not already done configuration = CustomConfig("bioservices", verbose=False) bspath = configuration.user_config_dir # Add bioservices.uniprot to sys.modules to prevent cycles in our imports #import bioservices.uniprot #bioservices.uniprot # Stop flake8 error from . import settings from .settings import * from . import services from .services import *
#!/usr/bin/env python # -*- coding: utf-8 -*- """Download files for udemy-dl.""" from __future__ import unicode_literals from __future__ import print_function import os import subprocess import sys import colorlog import requests logger = colorlog.getLogger(__name__) # User Agent String USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0' class DLException(Exception): """Raise if some lectured failed to download.""" pass def download(link, filename, update_progress, downloader='aria2c'): """Download files to given destination file-name.""" try: downloader_dict = {'aria2c': aria2c_dl, 'axel': axel_dl, 'httpie': httpie_dl,
print('Get all:') # pprint.pprint(data) # return None retval['controller'] = self.controller.get_all(data['settings']) retval['options'] = self.options.get_all(data['options']) retval['stations'] = [x.get_all(data=data['stations']) for x in self._station_list] return retval if __name__ == "__main__": import sys handler = logging.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s%(levelname)s:%(name)s : %(message)s')) log = colorlog.getLogger('Open Sprinkler Example') log.addHandler(handler) log.setLevel(logging.DEBUG) log.info('Open Sprinkler Example') if len(sys.argv) < 3: exit(1) hostname = sys.argv[1] password = sys.argv[2] os_device = OpenSprinkler(hostname, password, log=log) log.info('Get "controller" fields:') for prop in Controller.my_get_args.keys(): log.info('\t%s: %r', prop, getattr(os_device.controller, prop))
'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%') handler.setFormatter(formatter) file_handler = handlers.RotatingFileHandler(logfile, maxBytes=(1048576 * 5), backupCount=7) file_handler.setFormatter(formatter) logger = colorlog.getLogger('ESD') logger.addHandler(handler) logger.addHandler(file_handler) logger.setLevel(logging.INFO) class EnumSubDomain(object): def __init__(self, domain): self.project_directory = os.path.abspath(os.path.dirname(__file__)) logger.info('----------') logger.info('Start domain: {d}'.format(d=domain)) self.data = {} self.domain = domain dns_servers = [] dns_server_config = '{pd}/servers.esd'.format( pd=self.project_directory)
log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red,bg_white', }, secondary_log_colors={}, style='%') handler.setFormatter(formatter) file_handler = handlers.RotatingFileHandler(logfile, maxBytes=(1048576 * 5), backupCount=7) file_handler.setFormatter(formatter) logger = colorlog.getLogger('Hawkeye') logger.addHandler(handler) logger.addHandler(file_handler) logger.setLevel(logging.DEBUG) def hawkeye_conf(): config = configparser.ConfigParser() config.read(conf_path) return config def get_conf(section, option): config = hawkeye_conf() return config.get(section=section, option=option)