Example #1
0
def main():
    parser = ArgumentParser(description='convert dlt to json')
    parser.add_argument('file', help='dlt file to convert')
    parser.add_argument('--log',
                        '-l',
                        choices=['off', 'dbg', 'info', 'warn', 'err', 'crit'],
                        help='set log level')
    args = parser.parse_args()

    if args.file:
        with open(args.file, 'rb') as file:
            data = file.read()

    if args.log:
        level = {
            'off': logging.NOTSET,
            'dbg': logging.DEBUG,
            'info': logging.INFO,
            'warn': logging.WARNING,
            'err': logging.ERROR,
            'crit': logging.CRITICAL
        }
        loglevel(level[args.log])
    else:
        loglevel(logging.ERROR)

    try:
        try:
            decoded = parse_buffer(data)
            print(json.dumps(decoded, indent=2))
        except TypeError as err:
            logger.error(err)
            exit(1)
    except KeyboardInterrupt:
        exit(0)
Example #2
0
def main(args):
    loglevel((5 - args.verbose) * 10)  # -v for error+critical, up to -vvvv for debug+

    cloudfare_cookie = get_cloudfare_cookie()
    os.makedirs(args.output_dir, exist_ok=True)
    locations = ["New Zealand", "Australia"]
    resulted_events = get_resulted_event_list(cloudfare_cookie, locations, args.offset_days, args.race_type)
    if args.event_id is None:
        event_ids = [x["id"] for x in resulted_events]
    else:
        event_ids = [args.event_id]
    logger.info("Found {n} events, with IDs {ids}".format(n=len(resulted_events),
                                                          ids=event_ids))
    for i, event_id in enumerate(event_ids):
        try:
            time.sleep(0.5)  # Be a good citizen - avoid throttling by limiting request frequency
            event_info = [get_resulted_event(cloudfare_cookie, event_id,
                                             save_source=args.save_source,
                                             output_dir=args.output_dir,
                                             race_type=args.race_type)]
            filename = f"{RACE_TYPES[args.race_type].lower()}-results-{event_info[0]['meeting number']}-"\
                       f"{event_info[0]['meeting place'].replace(' ', '_')}-R{event_info[0]['race number']}.json"
            filename = os.path.join(args.output_dir, filename)
            with open(filename, "w") as outfile:
                logger.info(f"Result {i + 1} of {len(event_ids)}. Writing file {filename}.")
                json.dump(event_info, outfile, indent=4)
        except Exception as e:
            logger.error(f"Error while importing resulted event id {event_id}: {sys.exc_info()[0]}")
            logger.exception(e)
Example #3
0
    def do_parse(self):
        for dom_key, dom_config in self.mapper.items():
            if dom_key.startswith('__'):
                continue

            if dom_key in self.reserved_yaml_keys:
                zlog.error('[RESERVED-KEYS] ({})'.format(dom_key))
                continue

            if not isinstance(dom_config, dict):
                zlog.error(
                    '[PLAIN-TYPE] {}:{}, please move inside page'.format(
                        dom_key, dom_config))
                continue

            if all([
                    self.is_test_mode, self.test_keys, dom_key
                    not in self.test_keys
            ]):
                zlog.debug('[SKIPPED-KEYS] ({})'.format(dom_key))
                continue

            if all([self.selected_keys, dom_key not in self.selected_keys]):
                zlog.info(f"[SKIPPED-KEYS] ({dom_key})")
                continue

            self._parse_dom(dom_key, dom_config, self.soup, self._data)

        # reset logzero level to 10
        logzero.loglevel(10)
Example #4
0
    def __set_logger(self):
        log_dir_fullpath = os.path.join(
            os.getcwd()) + '/' + constant.CONFIG['log_dir_name']
        log_file_fullpath = log_dir_fullpath + '/' + constant.CONFIG[
            'log_file_name']

        if not os.path.exists(log_dir_fullpath):
            os.makedirs(log_dir_fullpath)

        logzero.logfile(log_file_fullpath,
                        maxBytes=1000000,
                        backupCount=7,
                        encoding='utf8')

        logger.info('### logfile_full_path : {0}'.format(log_file_fullpath))
        logger.info('### log level : {0}'.format(constant.CONFIG['log_level']))

        if constant.CONFIG['log_level'].upper() == 'DEBUG'.upper():
            logzero.loglevel(level=logging.DEBUG)
        elif constant.CONFIG['log_level'].upper() == 'INFO'.upper():
            logzero.loglevel(level=logging.INFO)
        elif constant.CONFIG['log_level'].upper() == 'WARN'.upper():
            logzero.loglevel(level=logging.WARN)
        elif constant.CONFIG['log_level'].upper() == 'ERROR'.upper():
            logzero.loglevel(level=logging.ERROR)
        elif constant.CONFIG['log_level'].upper() == 'FATAL'.upper():
            logzero.loglevel(level=logging.FATAL)
        else:
            raise Exception(
                'log_level setting Exception : Unknown log level :{}'.format(
                    constant.CONFIG['log_level']))
Example #5
0
def read_assist(  # pylint: disable=too-many-statements, too-many-branches
    argv: list,
    debug: bool = False,
):
    """Do proc_argv."""
    del argv
    if FLAGS.debug:  # pragma: no cover
        logzero.loglevel(10)  # logging.DEBUG
    else:
        logzero.loglevel(20)  # logging.INFO

    width = FLAGS.width

    # version = "0.0.2"
    if FLAGS.version:  # pragma: no cover
        indent = " " * 10
        msg = indent + "xtl read-assistant tool %s\n\n" % __version__
        msg1 = "Brought to you by mu@qq41947782. Join qq group 316287378 to be kept updated about this tool."
        msg1 = fill(
            msg1,
            width=width,
            replace_whitespace=False,
            initial_indent=indent,
            subsequent_indent=indent,
        )
        print(msg + msg1)
        raise SystemExit(0)
    if debug:
        return FLAGS.m, FLAGS.s, FLAGS.t

    loop.run_until_complete(trans_clipb())
    return None
Example #6
0
def test_api_logfile_custom_loglevel():
    """
    logzero.logfile(..) should be able to use a custom loglevel
    """
    logzero.reset_default_logger()
    temp = tempfile.NamedTemporaryFile()
    try:
        # Set logfile with custom loglevel
        logzero.logfile(temp.name, loglevel=logging.WARNING)
        logzero.logger.info("info1")
        logzero.logger.warning("warning1")

        # If setting a loglevel with logzero.loglevel(..) it will not overwrite
        # the custom loglevel of the file handler
        logzero.loglevel(logging.INFO)
        logzero.logger.info("info2")
        logzero.logger.warning("warning2")

        with open(temp.name) as f:
            content = f.read()
            cases = {
                'ins': {"] warning2", "] warning1"},
                'outs': {"] info2", "] info1"}
            }
            _check_strs_in(cases, content=content)

    finally:
        temp.close()
Example #7
0
def setup_logzero(level="info", path="logs/clix.log"):
    log_fmt = "%(color)s[%(levelname)s %(asctime)s]%(end_color)s %(message)s"
    if level == "debug":
        level = logging.DEBUG
        log_fmt = (
            "%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]"
            "%(end_color)s %(message)s")
    elif level == "info":
        level = logging.INFO
    elif level == "warning":
        level = logging.WARNING
    elif level == "error":
        level = logging.ERROR
    elif level == "critical":
        level = logging.CRITICAL

    # create the directory if it doesn't exist
    # https://github.com/metachris/logzero/issues/129
    Path(path).parent.mkdir(parents=True, exist_ok=True)

    formatter = logzero.LogFormatter(fmt=log_fmt)
    logzero.setup_default_logger(formatter=formatter)
    logzero.loglevel(level)
    logzero.logfile(path,
                    loglevel=level,
                    maxBytes=1e9,
                    backupCount=3,
                    formatter=formatter)
Example #8
0
async def get_pwbrowser(headless: bool = HEADLESS,
                        verbose: Union[bool, int] = DEBUG,
                        proxy: Optional[Union[str, dict]] = PROXY,
                        **kwargs) -> Browser:
    # fmt: on
    """Instantiate a playwright chrominium browser.

    if isinstance(verbose, bool):
        verbose = 10 if verbose else 20
    logzero.loglevel(verbose)

    browser = await get_browser(headless)
    context = await browser.newContext()
    page = await context.newPage()
    await page.goto('https://httpbin.org/ip') https://httpbin.org/ip
    # https://getfoxyproxy.org/geoip/
    # http://whatsmyuseragent.org/
    https://playwright.dev/python/docs/intro/

    proxy setup: https://playwright.dev/python/docs/network?_highlight=proxy#http-proxy
        browser = await chromium.launch(proxy={
          "server": "http://myproxy.com:3128",
          "user": "******",
          "password": "******"
        })
    https://scrapingant.com/blog/how-to-use-a-proxy-in-playwright
        chrominium
            const launchOptions = {
                args: [ '--proxy-server=http://222.165.235.2:80' ]
            };
            browser = await playwright['chromium'].launch(launchOptions)

    """
    if isinstance(verbose, bool):
        verbose = 10 if verbose else 20
    logzero.loglevel(verbose)

    kwargs.update({
        "headless": headless,
    })

    if proxy:
        proxy = {"server": proxy}
        kwargs.update({
            "proxy": proxy,
        })

    try:
        playwright = await async_playwright().start()
    except Exception as exc:
        logger.error(exc)
        raise

    try:
        browser = await playwright.chromium.launch(**kwargs)
    except Exception as exc:
        logger.error(exc)
        raise

    return browser
Example #9
0
def main(argv=None):
    """Main entry point before parsing command line arguments."""

    parser = argparse.ArgumentParser()
    parser.add_argument("--verbose", action="store_true", default=False, help="Increase verbosity.")
    parser.add_argument("--version", action="version", version="%%(prog)s %s" % __version__)

    subparsers = parser.add_subparsers(dest="cmd")

    setup_argparse_webserver(subparsers.add_parser("run", help="Run the ExCoVis web server."))

    args = parser.parse_args(argv)

    # Setup logging verbosity.
    if args.verbose:
        level = logging.DEBUG
    else:
        level = logging.INFO
    logzero.loglevel(level=level)

    # Handle the actual command line.
    cmds = {None: run_nocmd, "run": run_webserver}

    # Disable duplicated crypto warnings from paramiko, triggered by fs.sshfs.
    warnings.filterwarnings(
        "once", module="paramiko.ecdsakey", message=".*unsafe construction of public numbers.*"
    )

    return cmds[args.cmd](args, parser)
Example #10
0
def cli(ctx, debug):
    """A pure-python diffing utility using Google's diff-match-patch.

    \b
    This tool serves exactly two functions:
    1. Create diffs between two versions of the same file
    2. Apply diffs to a file

    Commands can be abbreviated by the shortest unique string.

    \b
    For example:
        diff -> d
        apply -> a

    \b
    Examples of full commands:
        diffusor diff <source_file> <modified_file> -n <target_file>
        diffusor apply <patch_file> -t <target_file>
    """
    ctx.ensure_object(dict)

    logzero.loglevel(logging.DEBUG if debug else logging.INFO)

    ctx.obj["debug"] = debug
Example #11
0
    def __init__(self,
                 mm_host=None,
                 username=None,
                 password=None,
                 api_version=1,
                 port=4343,
                 verify=False,
                 timeout=10,
                 proxy=str()):
        # Set default logging to error_resp
        logzero.loglevel(logging.ERROR)

        self.mm_host = mm_host
        self.username = username
        self.password = password
        self.api_version = api_version
        self.port = port
        self.timeout = abs(timeout)
        self._access_token = ""

        self.proxy = {}
        if proxy:
            self.proxy = {'http': proxy, 'https': proxy}

        self.verify = verify
        if self.verify == False:
            # Disable warnings that come up, as we're not checking the cert
            requests.packages.urllib3.disable_warnings()
            logger.info("Not verifying SSL")
Example #12
0
    def __init__(self, *args, **kwargs):
        super(RestoReviewSpider, self).__init__(*args, **kwargs)

        # Set logging level
        logzero.loglevel(logging.WARNING)

        # Mmax number of review and resto
        self.resto_per_page = 30
        self.review_per_page = 10

        max_resto = kwargs.get('max_resto')
        if max_resto:
            self.max_resto_page = math.ceil(
                int(max_resto) / self.resto_per_page)
        else:
            self.max_resto_page = None

        max_review = kwargs.get('max_review')
        if max_review:
            self.max_review_page = math.ceil(
                int(max_review) / self.review_per_page)
        else:
            self.max_review_page = None

        logger.info("max_resto: {}".format(max_resto))
        logger.info("max_resto_page: {}".format(self.max_resto_page))
        logger.info("max_review: {}".format(max_review))
        logger.info("max_review_page: {}".format(self.max_review_page))

        # To track the evolution of scrapping
        self.main_nb = 0
        self.resto_nb = 0
        self.review_nb = 0
Example #13
0
def setup_logzero(path, level):
    Path(path).parent.mkdir(parents=True, exist_ok=True)
    log_fmt = '%(color)s[%(levelname)s %(asctime)s]%(end_color)s %(message)s'
    if level == 'debug':
        level = logging.DEBUG
        log_fmt = (
            '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]'
            '%(end_color)s %(message)s')
    elif level == 'info':
        level = logging.INFO
    elif level == 'warning':
        level = logging.WARNING
    elif level == 'error':
        level = logging.ERROR
    elif level == 'critical':
        level = logging.CRITICAL

    formatter = logzero.LogFormatter(fmt=log_fmt)
    logzero.setup_default_logger(formatter=formatter)
    logzero.loglevel(level)
    logzero.logfile(path,
                    loglevel=level,
                    maxBytes=1e9,
                    backupCount=3,
                    formatter=formatter)
Example #14
0
    def __init__(
        self,
        config_path: str,
        signals_len: int,
        overlap_len: int,
        name: str,
        minibatch_size: int,
        beam_width: int,
        num_threads: int,
        gpus: List[int],
        ignore_alignment_history: bool,
        keep_full_alignment: bool,
        verbose: bool = False,
    ) -> None:
        if not verbose:
            tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
            tf.get_logger().setLevel('ERROR')

        # set attributes
        self._config_path = config_path
        self._signals_len = signals_len
        self._overlap_len = overlap_len
        #  logger.debug('signals_len: {}'.format(self._signals_len))
        #  logger.debug('overlap_len: {}'.format(self._overlap_len))
        self._name = name
        self._minibatch_size = minibatch_size
        self._beam_width = beam_width
        self._num_threads = num_threads
        self._ignore_alignment_history = ignore_alignment_history
        self._keep_full_alignment = keep_full_alignment
        self._verbose = verbose
        if self._verbose:
            logzero.loglevel(logging.DEBUG)
        else:
            logzero.loglevel(logging.ERROR)

        # Load config
        with open(config_path) as json_f:
            cfg = json.load(json_f)
        self._net_cfg = cfg['net']
        self._hp_cfg = cfg['hp']
        self._rt_cfg = cfg['rt']

        self._tf_config = tf.ConfigProto(
            allow_soft_placement=True,
            inter_op_parallelism_threads=self._num_threads,
            intra_op_parallelism_threads=self._num_threads,
            gpu_options=tf.GPUOptions(
                per_process_gpu_memory_fraction=0.8,
                allow_growth=True,
                visible_device_list=','.join(list(map(str, gpus))),
            ))
        self._make_net()
        self._sess = tf.Session(config=self._tf_config)
        self._net.get_ready(
            sess=self._sess,
            ckpt_model_dir_path='',
            max_to_keep=1,
        )
        return
Example #15
0
def main(args):
    """
    The main entrypoint.
    """
    logzero.loglevel(getattr(logging, args.log_level))
    s = requests.Session()
    prelogin_endpoint = args.prelogin
    resp = make_saml_request(s, prelogin_endpoint)
    resp = authn_user_passwd(s, resp, args.username)
    duo_factor, duo_device = parse_duo_opts(args.duo_mfa)
    if duo_factor is not None:
        resp = authn_duo_mfa(s,
                             resp,
                             duo_device=duo_device,
                             duo_factor=duo_factor)
    resp = send_saml_response_to_globalprotect(s, resp)
    logger.debug("Response:\n{}".format(resp.text))
    logger.debug("Headers: {}".format(resp.headers))
    p = urlparse(prelogin_endpoint)
    host = p.netloc.split(":")[0]
    user = resp.headers["saml-username"]
    cookie = resp.headers["prelogin-cookie"]
    exports = dict(VPN_HOST=host, VPN_USER=user, COOKIE=cookie)
    for key, value in exports.items():
        print("export {}={}".format(key, value))
Example #16
0
File: cli.py Project: east301/gpipe
 def run_click(debug):
     logzero.loglevel(logging.DEBUG if debug else logging.INFO)
     logzero.formatter(
         logzero.LogFormatter(
             fmt=
             '%(asctime)s %(color)s[%(levelname).1s]%(end_color)s %(message)s',
             datefmt='%Y-%m-%d %H:%M:%S'))
Example #17
0
    def __init__(self,
                 ip,
                 filename=None,
                 record_type="A",
                 debug=False,
                 scriptOutType=None):
        """Initialize the class.

        Arguments:
            ip {string} -- IP address serving the basis of the search.

        Keyword Arguments:
            filename {str} -- Path to the BIND9 zone file (default: {None})
            record_type {str} -- Type of record to search for (default: {"A"})
            debug {bool} -- Set to True to enable debug messages (default: {False})
            scriptOutType {str} -- Type of script to output (default: {None})

        Raises:
            DnsParserInputError: Exception for errors pertaining to the input recevied.
        """
        self.ip = ip
        self.filename = filename
        self.debug = debug
        self.scriptOutType = scriptOutType
        self.results = []
        self.domain = None
        self.names = None
        self.zonefile = None
        self.record_type = "A" if self.ip.version == 4 else "AAAA"
        logzero.loglevel(logging.INFO)
        if self.debug:
            logzero.loglevel(logging.DEBUG)
        if self.filename is None:
            raise DnsParserInputError("filename",
                                      "Must include filename to parse")
Example #18
0
def cli(ctx: click.Context, verbose: bool=False, no_version_check: bool=False,
        change_dir: str=None, no_log_file: bool=False,
        log_file: str="chaostoolkit.log"):
    if verbose:
        logzero.loglevel(logging.DEBUG, update_custom_handlers=False)
        fmt = "%(color)s[%(asctime)s %(levelname)s] "\
              "[%(module)s:%(lineno)d]%(end_color)s %(message)s"
    else:
        logzero.loglevel(logging.INFO, update_custom_handlers=False)
        fmt = "%(color)s[%(asctime)s %(levelname)s]%(end_color)s %(message)s"

    if not no_log_file:
        # let's ensure we log at DEBUG level
        logger.setLevel(logging.DEBUG)
        logzero.logfile(
            click.format_filename(log_file), mode='a',
            loglevel=logging.DEBUG)

    logzero.formatter(
        formatter=logzero.LogFormatter(fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S"),
        update_custom_handlers=False)

    subcommand = ctx.invoked_subcommand

    # make it nicer for going through the log file
    logger.debug("#" * 79)
    logger.debug("Running command '{}'".format(subcommand))

    if not no_version_check:
        check_newer_version(command=subcommand)

    if change_dir:
        logger.warning("Moving to {d}".format(d=change_dir))
        os.chdir(change_dir)
Example #19
0
def main(args):
    """Engine of the script."""
    logzero.loglevel(logging.INFO)
    if args.verbose >= 1:
        logzero.loglevel(logging.DEBUG)
    logger.debug(args)
    parsedip = parseIp(ip=args.ip)
    records_flat = list(itertools.chain(*args.records))
    parsedRecords = []
    for record in records_flat:
        logger.debug("Parsing Record: %s" % record)
        r = tldextract.extract(record)
        parsedRecords.append(
            DnsRecord(domain=r.registered_domain, entry=r.fqdn))
    if parsedip:
        logger.debug("Parsed IP: %s" % parsedip)
        scriptoutput = ScriptGenerator(records=parsedRecords,
                                       ip=parsedip.exploded,
                                       action=args.scriptouttype)
    else:
        cname = args.ip
        logger.debug("CNAME: %s" % args.ip)
        scriptoutput = ScriptGenerator(records=parsedRecords,
                                       cname=cname,
                                       action=args.scriptouttype)
    print(*scriptoutput.get_script(), sep="\n")
Example #20
0
def test_api_logfile_custom_loglevel():
    """
    logzero.logfile(..) should be able to use a custom loglevel
    """
    logzero.reset_default_logger()
    temp = tempfile.NamedTemporaryFile()
    try:
        # Set logfile with custom loglevel
        logzero.logfile(temp.name, loglevel=logzero.WARN)
        logzero.logger.info("info1")
        logzero.logger.warning("warn1")

        # If setting a loglevel with logzero.loglevel(..) it will not overwrite
        # the custom loglevel of the file handler
        logzero.loglevel(logzero.INFO)
        logzero.logger.info("info2")
        logzero.logger.warning("warn2")

        with open(temp.name) as f:
            content = f.read()
            assert "] info1" not in content
            assert "] warn1" in content
            assert "] info2" not in content
            assert "] warn2" in content

    finally:
        temp.close()
Example #21
0
    def __init__(
        self,
        to_lang: str = 'zh',
        from_lang: str = 'en',
        debug: bool = False,
        proxy: Optional[str] = None,
        testurl: str = '',
        retry: int = 2,
    ) -> None:
        '''
        testurl: if not empty, used as dest url (to substitute api_url for testing proxy)
        '''
        self.from_lang = from_lang
        self.to_lang = to_lang
        # self.source_list = ['']

        # for use in _get_json5
        self.from_lang_ = from_lang
        self.to_lang_ = to_lang

        self.proxy = proxy
        self.testurl = testurl
        self.retry = retry

        _ = make_url(proxy)
        proxies = {
            'http': _,
            'https': _,
        }
        self.client = httpx.Client(proxies=proxies)

        if debug:
            logzero.loglevel(10)
        else:
            logzero.loglevel(20)
Example #22
0
    def _api_call(self, method, url, **kwargs):
        '''The API call handler.

        This method is used by `resource`. kwargs passed in get passed to the
        requests.session instance

        Args:
        -----
        method: `str`
            either `POST` or `GET`

        url: `str`
            URL with the endpoint included

        **kwargs:
        These are passed into the requests and include the `params` and `json`
        attributes which are the exact same ones used by requests.

        Returns:
        --------
        The full response in JSON format including `_global_result` AND
        The error if status string returned is not 0, else `None`.
        '''
        logger.info(f"Method is: {method.upper()}")

        response = getattr(self.session, method.lower())(url,
                                                         verify=self.verify,
                                                         **kwargs)
        logger.debug(f"Full URL: {response.url}")

        # If response is wrong, for example if someone passes in the wrong
        # endpoint return https://gitlab.ocado.tech/Net-wifi/wireless-passphrase-change/-/merge_requests/2, None for both values
        try:
            jresp = response.json()
            logger.debug(f"Response JSON: {jresp}")
        except json.decoder.JSONDecodeError as e:
            logger.exception(
                f'Got a JSONDecodeError exception. Check the defined endpoint is correct\n'
            )
            logger.exception(f"Response text:\n{response.text}")
            logzero.loglevel(logging.ERROR)
            return None, None

        logzero.loglevel(logging.ERROR)

        # Return propper values depending on the type of HTTP request and
        # the response received from it
        if method.lower() == "get":
            return jresp, None
        else:
            if '_global_result' in jresp:
                if jresp["_global_result"]["status"] == 0 or jresp[
                        "_global_result"]["status"] == '0':
                    return jresp, None
                else:
                    #logger.debug(f'Error is: {jresp["_global_result"]}')
                    return jresp, jresp["_global_result"]
            else:
                return None, logger.error(f"Config not written: {jresp}")
Example #23
0
 def __init__(self) :
     self.logfile = os.path.join(config.logPath, 'core-service.log')
     logzero.logfile(self.logfile, maxBytes = 1e6, backupCount = 3)
     import logging
     formatter = logging.Formatter('%(asctime)-15s - [%(filename)s: %(lineno)s] -%(levelname)s: %(message)s');
     logzero.formatter(formatter)
     logzero.loglevel(logging.INFO)
     self.logger = logzero.logger
Example #24
0
 def _get_vcr(self, **kwargs):
     logzero.loglevel(logging.INFO)
     myvcr = super(TestAuthIntegrationMock, self)._get_vcr(**kwargs)
     myvcr.match_on = [
         'method', 'host', 'port', 'path', 'query', 'body', 'headers'
     ]
     myvcr.record_mode = 'none'
     return myvcr
Example #25
0
 def __init__(self) :
     self.logfile = config.logPath + 'flask.log'
     logzero.logfile(self.logfile, maxBytes = 1e6, backupCount = 3)
     import logging
     formatter = logging.Formatter('%(asctime)-15s - [%(filename)s: %(lineno)s] -%(levelname)s: %(message)s');
     logzero.formatter(formatter)
     logzero.loglevel(logging.ERROR)
     self.logger = logzero.logger
Example #26
0
def setup_logger(__name__: str,
                 file_path: str = log_location,
                 level: int = 10) -> logzero.logger:
    # todo: this should be able to write to lambda/local logs without code change
    logzero.setup_default_logger()
    logzero.logfile(file_path, maxBytes=int(1e6))
    logzero.loglevel(level)
    return logzero.logger
Example #27
0
def get_logger(log_dir, loglevel=logging.INFO):
    from logzero import logger
    if not Path(log_dir).exists():
        Path(log_dir).mkdir(parents=True)
    logzero.loglevel(loglevel)
    logzero.logfile(log_dir + '/logfile')

    return logger
Example #28
0
def main(filename, debug=False):
    if not debug:
        logzero.loglevel(logging.INFO)

    import yaml
    with open(filename, 'rb') as f:
        tc = TestCase(yaml.load(f))
        tc.run()
Example #29
0
 def _spawn(self, config_file, dat):
     self.cfg = Conf(
         config_file=config_file,
         dat=dat,
     ).cfg
     logzero.formatter(LFormatter(log_pre=self.cfg.get('log.symbol', '')))
     logzero.loglevel(self.cfg.get('log.level', 20))
     self.zlog = logzero.logger
Example #30
0
    def set_loglevel(self, level):
        """
        Set the minimum loglevel for the default logger

        Args:
            level (int): eg. logging.DEBUG or logging.ERROR. See also https://docs.python.org/2/library/logging.html#logging-levels
        """
        logzero.loglevel(level)
Example #31
0
    def set_loglevel(self, level):
        """
        Set the minimum loglevel for the default logger

        Args:
            level (int): eg. logging.DEBUG or logging.ERROR. See also https://docs.python.org/2/library/logging.html#logging-levels
        """
        self.log_level = level
        logzero.loglevel(level)
Example #32
0
                        incident.presentation_message,
                        incident.result["actual"]
                    )
                    incident.message = "{}, actual: {}".format(
                        incident.message,
                        incident.result["actual"]
                    )
                else:
                    incident.message = "{}, actual: {}".format(
                        incident.message,
                        incident.result["message"]
                    )

                # incident.message = "{}\n({})".format(
                #     incident.message,
                #     incident.endpoint.url
                # )

            db.save_active(incident)

            deliver_alert_to_groups(incident, alert_groups, alert_definitions)


if __name__ == "__main__":
    if settings.DEBUG:
        logzero.loglevel(logging.DEBUG)
    else:
        logzero.loglevel(logging.INFO)

    main()
Example #33
0
def write(Message):
    logzero.logfile(log_path)
    logzero.loglevel(logging.INFO)
    logger.info(str(Message))
Example #34
0
def Error():
    logzero.logfile(log_path)
    logzero.loglevel(logging.ERROR)
    return logger