コード例 #1
0
ファイル: __init__.py プロジェクト: rastern/arbiter
def init_logging(config):
    """
    Initialize logging based on configuration

    Either the entire config, or the logging subsection must be passed in. If no
    logging `path` parameter is found a :py:class:`~logging.StreamHandler` will
    be initialized instead. If `path` is found, a :py:class:`~logging.handlers.RotatingFileHandler`
    will be initialized, with a default rotation of 10mb, and `backupCount` of 1.

    Args:
        config (dict): Configuration.
    """
    global LOG_MODE, LOG_MAXSIZE, LOG_PATH, LOG_FILENAME, LOG_FILEMODE, LOG_ENCODING

    hdlr = None
    logging = config.get('logging', config)

    LOG_MODE = logging.get('mode', 'ERROR')
    LOG_MAXSIZE = logging.get('maxsize', '10M')
    LOG_PATH = logging.get('path', None)
    LOG_FILEMODE = logging.get('filemode', 'a+').lower()
    LOG_ENCODING = logging.get('encoding', None)

    if LOG_PATH:
        hdlr = RotatingFileHandler(LOG_PATH,
                                   mode=LOG_FILEMODE,
                                   maxBytes=mem_cast(LOG_MAXSIZE, 'B'),
                                   backupCount=1,
                                   encoding=LOG_ENCODING)
    if not hdlr:
        hdlr = StreamHandler()

    umsg.init(mode=LOG_MODE)
    umsg.add_handler(hdlr)
コード例 #2
0
ファイル: healthcheck.py プロジェクト: potix/PrimWatch
 def init_logger(self):
 # init logger
     logging = self.config.get('logging', {})
     log_type = logging.get('type', 'syslog')
     log_file = logging.get('file', None)
     log_level = logging.get('level', 'DEBUG')
     try:
         self.logger, handler = logger_handler_factory(log_type, log_file, log_level)
     except:
         traceback.print_exc(file=sys.stderr)
         sys.exit(1)
コード例 #3
0
ファイル: s3.py プロジェクト: ywyt738/cloudaux
def get_logging(bucket_name, **conn):
    result = get_bucket_logging(Bucket=bucket_name, **conn)

    logging_dict = {}
    if result.get('LoggingEnabled'):
        logging = result['LoggingEnabled']
        logging_dict['Enabled'] = True
        logging_dict['Prefix'] = logging['TargetPrefix']
        logging_dict['Target'] = logging['TargetBucket']
        grant_list = []
        if logging.get('TargetGrants'):
            for grant in logging['TargetGrants']:
                grant_dict = {}
                grant_dict['Permission'] = grant['Permission']
                grantee = grant['Grantee']
                grant_dict['Type'] = grantee['Type']
                if grantee['Type'] == 'CanonicalUser':
                    grant_dict['DisplayName'] = grantee['DisplayName']
                elif grantee['Type'] == 'Group':
                    grant_dict['GroupUri'] = grantee['URI']
                else:
                    grant_dict['Email'] = grantee['EmailAddress']
            grant_list.append(grant_dict)

        logging_dict['Grants'] = grant_list

    return logging_dict
コード例 #4
0
 def _get_proxy_config(self, image: str, name: str, logging: dict,
                       mounts: dict, port_mappings: list) -> dict:
     environment = ([] if logging.get("default", "info") in ['', "info"]
                    else [f"ENVOY_LOG_LEVEL={logging['default']}"])
     exposed = {
         f"{internal}/tcp": {}
         for external, internal in port_mappings
     }
     return {
         'Image': image,
         'Cmd': ["python", "/hot-restarter.py", "/start_envoy.sh"],
         "AttachStdin": False,
         "AttachStdout": False,
         "AttachStderr": False,
         "Tty": False,
         "OpenStdin": False,
         "Labels": {
             "envoy.playground.proxy": name,
         },
         "Env": environment,
         "ExposedPorts": exposed,
         "HostConfig": {
             "PortBindings": self._get_port_bindings(port_mappings),
             "Binds": ['%s:%s' % (v, k) for k, v in mounts.items()]
         }
     }
コード例 #5
0
    def _get_info(target_endpoint: str,
                  proxy: Optional[dict] = None,
                  verify: bool = True) -> Info:
        info_response = CloudFoundryClient._check_response(
            requests.get("%s/info" % target_endpoint,
                         proxies=proxy
                         if proxy is not None else dict(http="", https=""),
                         verify=verify))
        info = info_response.json()
        root_response = CloudFoundryClient._check_response(
            requests.get("%s/" % target_endpoint,
                         proxies=proxy
                         if proxy is not None else dict(http="", https=""),
                         verify=verify))
        root_info = root_response.json()

        root_links = root_info["links"]
        logging = root_links.get("logging")
        log_stream = root_links.get("log_stream")
        return Info(
            root_links["cloud_controller_v2"]["meta"]["version"],
            info["authorization_endpoint"],
            target_endpoint,
            logging.get("href") if logging is not None else None,
            log_stream.get("href") if log_stream is not None else None,
        )
コード例 #6
0
 def Print(self):
     logger = logging.get(__name__)
     logger.info("BMP Color Map")
     logger.info("  Blue:           0x%X" % self.Blue)
     logger.info("  Green:          0x%X" % self.Green)
     logger.info("  Red:            0x%X" % self.Red)
     logger.info("  Reserved:       0x%X" % self.Reserved)
コード例 #7
0
ファイル: logset.py プロジェクト: g842995907/guops-know
def get_log_path():
    logging = settings.LOGGING
    handlers = logging.get('handlers')
    if handlers is None:
        return ""

    log_file = handlers.get('logfile')
    if log_file is None:
        return ""

    log_path = log_file.get('filename')
    if log_path is None:
        log_path = DEFAULT_LOG_PATH

    return log_path
コード例 #8
0
ファイル: allPythonContent.py プロジェクト: Mondego/pyreco
 def get_log_path(self):
     """ Get the log file that should of been written by the parse tests """
     if CSVIMPORT_LOG != 'logger':
         print '''CSVIMPORT_LOG is not set to 'logger' in settings
                  - assume not using csvimport.tests.settings
                  - so cannot test the log'''
         return False
     logging = getattr(settings, 'LOGGING', '')
     if logging:
         handlers = logging.get('handlers', {})
         if handlers:
             logfile = handlers.get('logfile', {})
             if logfile:
                 self.logpath = logfile.get('filename', '')
     if self.logpath.endswith('.log'):
         if os.path.exists(self.logpath):
             print 'Found csvimport_test.log'
             return True
     print '''cvsimport logging is not set up for %s from
              csvimport.tests.settings so cannot test the log''' % self.logpath
     return False
コード例 #9
0
 def get_log_path(self):
     """ Get the log file that should of been written by the parse tests """
     if CSVIMPORT_LOG != 'logger':
         print '''CSVIMPORT_LOG is not set to 'logger' in settings
                  - assume not using csvimport.tests.settings
                  - so cannot test the log'''
         return False
     logging = getattr(settings, 'LOGGING', '')
     if logging:
         handlers = logging.get('handlers', {})
         if handlers:
             logfile = handlers.get('logfile', {})
             if logfile:
                 self.logpath = logfile.get('filename', '')
     if self.logpath.endswith('.log'):
         if os.path.exists(self.logpath):
             print 'Found csvimport_test.log'
             return True
     print '''cvsimport logging is not set up for %s from
              csvimport.tests.settings so cannot test the log''' % self.logpath
     return False
コード例 #10
0
    def _get_info(target_endpoint: str,
                  proxy: Optional[dict] = None,
                  verify: bool = True) -> Info:
        info_response = CloudFoundryClient._check_response(
            requests.get('%s/info' % target_endpoint,
                         proxies=proxy
                         if proxy is not None else dict(http='', https=''),
                         verify=verify))
        info = info_response.json()
        root_response = CloudFoundryClient._check_response(
            requests.get('%s/' % target_endpoint,
                         proxies=proxy
                         if proxy is not None else dict(http='', https=''),
                         verify=verify))
        root_info = root_response.json()

        root_links = root_info['links']
        logging = root_links.get('logging')
        log_stream = root_links.get('log_stream')
        return Info(root_links['cloud_controller_v2']['meta']['version'],
                    info['authorization_endpoint'], target_endpoint,
                    logging.get('href') if logging is not None else None,
                    log_stream.get('href') if log_stream is not None else None)
コード例 #11
0
import logging

logger = logging.get("sizebot")


async def main():
    await test()


async def test():
    logger.info("Welcome to the poopview!")


if __name__ == "__main__":
    main()
コード例 #12
0
from bs4 import BeautifulSoup, SoupStrainer
import requests
import time
import asyncio
import logging

_LOGGER = logging.get(__name__)


def google_address(address, state=' MN'):
    start = time.time()

    URL = f'https://www.google.com/search?q={address + state}'
    USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:65.0) Gecko/20100101 Firefox/65.0"
    headers = {'user-agent': USER_AGENT}

    strainer = SoupStrainer(attrs=['class:vk_sh vk_bk'])

    # async with bot.session.get(URL) as response:
    #         if response.status == 200:
    #             text = await response.read()

    URL = f'https://www.google.com/search?q={address + state}'
    USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:65.0) Gecko/20100101 Firefox/65.0"
    headers = {'user-agent': USER_AGENT}
    resp = requests.get(URL, headers=headers)

    if resp.status_code == 200:
        soup = BeautifulSoup(resp.content, 'lxml', parse_only=strainer)
    else:
        print(f'Status Code != 200')