示例#1
0
def configure_logging(app):
    """
    Sets up application wide logging.

    :param app:
    """
    handler = RotatingFileHandler(app.config.get("LOG_FILE", "lemur.log"),
                                  maxBytes=10000000,
                                  backupCount=100)

    handler.setFormatter(
        Formatter("%(asctime)s %(levelname)s: %(message)s "
                  "[in %(pathname)s:%(lineno)d]"))

    if app.config.get("LOG_JSON", False):
        handler.setFormatter(
            logmatic.JsonFormatter(extra={"hostname": socket.gethostname()}))

    handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.addHandler(handler)

    stream_handler = StreamHandler()
    stream_handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.addHandler(stream_handler)

    if app.config.get("DEBUG_DUMP", False):
        activate_debug_dump()
def main():

    # load config file
    global CONFIG
    CONFIG = json.loads(open('/root/ec2_snapshot/config.json').read())

    # set up logging
    log_location = CONFIG['log_location']
    logger = logging.getLogger()
    os_touch(log_location)
    handler = RotatingFileHandler(filename=log_location, maxBytes=8000000)
    handler.setFormatter(logmatic.JsonFormatter())
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)

    # generate snapshot_at timestamp
    snapshot_at = convert_dt(datetime.datetime.now())

    # fire up firehose client
    firehose = boto3.client('firehose', 'us-west-2')

    # for each arn/aws account, create a thread to perform the describe-instances work
    threads = []
    for i in range(len(CONFIG['ec2_arn'])):
        t = threading.Thread(target=worker(i, firehose, snapshot_at))
        threads.append(t)
        t.start()
示例#3
0
def initialize_logging():
    # --- set up logging

    # remove handlers that Lambda set up, as they interfere with log configuration
    root_logger = logging.getLogger()
    if root_logger.handlers:
        for handler in root_logger.handlers:
            root_logger.removeHandler(handler)

    # add json log formatter
    handler = logging.StreamHandler()
    if not os.getenv("LOGFORMAT", "") == "TXT":
        handler.setFormatter(logmatic.JsonFormatter())

    root_logger.addHandler(handler)
    root_logger.setLevel(os.getenv("LOGLEVEL", "DEBUG"))

    # don't allow boto to log at DEBUG level - it is way too chatty
    logging.getLogger("botocore").setLevel(logging.WARN)
    logging.getLogger("boto3").setLevel(logging.WARN)
    logging.getLogger("urllib3").setLevel(logging.WARN)

    LOGGER.info({
        "message": "starting lambda handler {}".format(__name__),
        "stage": os.getenv("STAGE"),
        "log level": os.getenv("LOGLEVEL"),
    })
 def loadLoggingConfig(self):
     fmt = "%(created)f %(msecs)d %(relativeCreated)d %(asctime)s %(levelname)s %(levelno)s %(filename)s" \
           "%(args) %(funcName)s %(lineno)d %(module)s %(name)s %(pathname)s %(process)d %(processName)s " \
           "%(thread)d %(threadName)s %(msg) %(message)s %(exc_info)"
     handler = logging.FileHandler(self.logPath)
     handler.setFormatter(logmatic.JsonFormatter(fmt=fmt))
     self.logger.addHandler(handler)
示例#5
0
文件: log.py 项目: xorhex/mwdb-core
def setup_logger():
    enable_json_logger = app_config.mwdb.enable_json_logger

    logger = logging.getLogger("mwdb")

    if logger.hasHandlers():
        # If already configured: return
        # Used by 'mwdb configure'
        return

    # Don't propagate to root logger
    logger.propagate = False

    # Setup stream handler for main logger
    handler = logging.StreamHandler()

    if enable_json_logger:
        formatter = logmatic.JsonFormatter(
            fmt="%(filename) %(funcName) %(levelname) "
            "%(lineno) %(module) %(threadName) %(message)"
        )
    else:
        formatter = InlineFormatter(
            fmt="[%(levelname)s] %(threadName)s "
            "- %(module)s.%(funcName)s:%(lineno)s"
            " - %(message)s"
        )
    handler.setFormatter(formatter)
    logger.addFilter(ContextFilter())
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)
示例#6
0
    def __init__(self,
                 logger=None,
                 date_tag=None,
                 filehandler=None,
                 consolehandler=None,
                 file_id=None):

        if date_tag is None:
            date_tag = datetime.datetime.now()\
                .strftime("%Y-%b-%d-%H-%M-%S")

        if file_id is None:
            # file_id = LOG_ID
            file_id = "test_logs"

        if logger is None:
            # logger = logging.getLogger(file_id)
            logger = logging.getLogger(file_id)

            # Add handlers and set log level

        if filehandler is None:
            logname = '-'.join([str(file_id), date_tag, '.json'])
            if not os.path.exists(LOG_ROOT):
                os.makedirs(LOG_ROOT)
            filehandler = logging.FileHandler(os.path.join(LOG_ROOT, logname))
            filehandler.setFormatter(
                logmatic.JsonFormatter(
                    extra={"hostname": socket.gethostname()}))

        if consolehandler is None:
            consolehandler = logging.StreamHandler()
            consolehandler.setFormatter(
                logmatic.JsonFormatter(
                    extra={"hostname": socket.gethostname()}))

        logger.addHandler(filehandler)
        logger.addHandler(consolehandler)
        logger.setLevel(logging.DEBUG)

        self.logger = logger
        self.info = logger.info
        self.debug = logger.debug
        self.date_tag = date_tag
        self.filehandler = filehandler
        self.consolehandler = consolehandler
        self.file_id = file_id
def initialise_logger(target_logger, level, logging_format, reset_handlers=False):
    if target_logger.hasHandlers() and reset_handlers:
        target_logger.handlers = []
    if level is not None:
        target_logger.setLevel(level)
    handler = logging.StreamHandler(sys.stdout)
    handler.setLevel(logging.NOTSET)
    target_logger.addHandler(handler)
    if logging_format == 'json':
        formatter = logmatic.JsonFormatter()
        handler.setFormatter(formatter)
示例#8
0
def setup_logger(name: str,
                 fmt: str = DEFAULT_FORMAT,
                 level: int = DEFAULT_LEVEL):
    handler = logging.StreamHandler()
    handler.setFormatter(logmatic.JsonFormatter(fmt=fmt))

    logger = logging.getLogger(name)
    logger.setLevel(level)
    logger.addHandler(handler)

    return logger
示例#9
0
def define_logger(log_component='no name'):
    """ Define a  logger with 3 default levels
    :param log_component: name of the component which generates the log
    :return: logger object
    """
    logger = logging.getLogger(log_component)

    handler = logging.StreamHandler()
    handler.setFormatter(logmatic.JsonFormatter())

    logger.addHandler(handler)
    logger.setLevel(logging.INFO)
    logger.setLevel(logging.DEBUG)

    return logger
示例#10
0
def init_logging():
    """
    Initialize global LOGGER object with config defined in the global CONFIG object

    Args:
        None

    Returns:
        None
    """

    if CONFIG:
        logging.config.dictConfig(CONFIG["logging"])

    # these loggers are very noisy
    suppressed_loggers = [
        "botocore.vendored.requests.packages.urllib3.connectionpool",
        "urllib3",
    ]

    for logger in suppressed_loggers:
        logging.getLogger(logger).setLevel(logging.ERROR)

    log = logging.getLogger(__name__)
    log.addFilter(ContextFilter())
    extra = {
        "eventTime": datetime.datetime.now(timezone("US/Pacific")).isoformat()
    }
    log.propagate = False
    handler = logging.StreamHandler(sys.stdout)
    handler.setLevel("DEBUG")
    log.addHandler(handler)
    if CONFIG:
        json_logging_file = CONFIG.get("json_logging_file")
        if json_logging_file:
            if "~" in json_logging_file:
                json_logging_file = os.path.expanduser(json_logging_file)
            os.makedirs(os.path.dirname(json_logging_file), exist_ok=True)
            file_handler = logging.FileHandler(json_logging_file)
            file_handler.setFormatter(logmatic.JsonFormatter())
            log.addHandler(file_handler)
    log = logging.LoggerAdapter(log, extra)
    return log
示例#11
0
def _add_logger_handler(custom_logger,
                        handler,
                        formatter=None,
                        json_formatter=False):
    """
    Add a handler to a logger. If a formatter is not provided, a default logmatic one will be created.
    Args:
        custom_logger: The logger to add the handler to
        handler: The handler to add
        formatter: The formatter for formatting the log. See arg json_formatter for details (Optional)
        json_formatter: If a formatter is not provided, this identifies if the formatter that will be created should be a json formatter (logmatics) or a standard formatter
    """
    if not formatter:
        if json_formatter:
            formatter = logmatic.JsonFormatter()
        else:
            formatter = logging.Formatter(
                '[%(asctime)-15s] [%(module)s] [%(funcName)s] %(levelname)s %(message)s'
            )
    handler.setFormatter(formatter)
    custom_logger.addHandler(handler)
示例#12
0
def define_logger(logger):
    """
    """
    # create a logging format as a JSON (works well for rabbit)
    formatter = logmatic.JsonFormatter(fmt="%(levelname) $(name) $(message)",
                                       extra={})

    # Create an object which redirect logs to a text file
    # Keep 10 files of 5MB for history
    # With tuning the logs can be redirected anywhere
    log_dir = ROOT_DIRECTORY / 'logs'
    if not log_dir.exists():
        log_dir.mkdir()
    logfilename = log_dir / 'happytal_libpython.log'
    handler = logging.handlers.RotatingFileHandler(str(logfilename),
                                                   maxBytes=5000000,
                                                   backupCount=10)
    handler.setLevel(logging.DEBUG)
    handler.setFormatter(formatter)

    # Multiple handlers with different types and sensitivity can be added to a unique logger
    logger.addHandler(handler)
示例#13
0
def setup_logger_stdout(name, level=logging.INFO, additional_logger=[], removed_logger=[]):
    logger = logging.getLogger(name)
    handler = logging.StreamHandler(sys.stdout)
    # formatter = logging.Formatter()
    formatter = logmatic.JsonFormatter(extra={"env":os.getenv('RZC_ENV', 'local')})

    handler.setFormatter(formatter)
    handler.setLevel(level)

    logger_file = logging.getLogger('logger_file')
    logger_file.propagate = False

    for n in additional_logger:
        logger_n = logging.getLogger(n)
        logger_n.addHandler(handler)
        logger_n.propagate = False
    for m in removed_logger:
        logger_m = logging.getLogger(m)
        logger_m.propagate = False

    logging.basicConfig(level=level, handlers=[handler])
    return logger
示例#14
0
def init_app(app):
    # pylint: disable=protected-access
    log_level = logging._nameToLevel[app.config['LOG_LEVEL']]
    log_type = app.config['LOG_FORMATTER']

    # Configure JSON filesystem log handler
    handler = logging.StreamHandler(sys.stdout)

    if log_type.upper() == 'JSON':
        handler.setFormatter(logmatic.JsonFormatter())
    else:
        #pylint: disable=line-too-long
        text_formatter = logging.Formatter(
            '%(asctime)s - %(request_id)s - %(name)s - %(filename)s - %(levelname)s - %(message)s'
        )
        handler.setFormatter(text_formatter)

    handler.addFilter(RequestIdFilter())

    # Configure global logging
    logger = logging.getLogger()
    logger.addHandler(handler)
    logger.setLevel(log_level)
示例#15
0
文件: factory.py 项目: vsnine/lemur
def configure_logging(app):
    """
    Sets up application wide logging.

    :param app:
    """
    logfile = app.config.get("LOG_FILE", "lemur.log")
    # if the log file is a character special device file (ie. stdout/stderr),
    # file rotation will not work and must be disabled.
    disable_file_rotation = os.path.exists(logfile) and stat.S_ISCHR(
        os.stat(logfile).st_mode)
    if disable_file_rotation:
        handler = StreamHandler(open(logfile, 'a'))
    else:
        handler = RotatingFileHandler(logfile,
                                      maxBytes=10000000,
                                      backupCount=100)

    handler.setFormatter(
        Formatter("%(asctime)s %(levelname)s: %(message)s "
                  "[in %(pathname)s:%(lineno)d]"))

    if app.config.get("LOG_JSON", False):
        handler.setFormatter(
            logmatic.JsonFormatter(extra={"hostname": socket.gethostname()}))

    handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.addHandler(handler)

    stream_handler = StreamHandler()
    stream_handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG"))
    app.logger.addHandler(stream_handler)

    if app.config.get("DEBUG_DUMP", False):
        activate_debug_dump()
示例#16
0
    def get_logger(self, name: Optional[str] = None) -> LoggerAdapter:
        """Get logger."""
        if self.log:
            return self.log
        if not name:
            name = self.get("application_name", "consoleme")
        level_c = self.get("logging.level", "debug")
        if level_c == "info":
            level = logging.INFO
        elif level_c == "critical":
            level = logging.CRITICAL
        elif level_c == "error":
            level = logging.ERROR
        elif level_c == "warning":
            level = logging.WARNING
        elif level_c == "debug":
            level = logging.DEBUG
        else:
            # default
            level = logging.DEBUG
        filter_c = ContextFilter()
        format_c = self.get(
            "logging.format",
            "%(asctime)s - %(levelname)s - %(name)s - [%(filename)s:%(lineno)s - %(funcName)s() ] - %(message)s",
        )

        logging.basicConfig(level=level, format=format_c)
        logger = logging.getLogger(name)
        logger.addFilter(filter_c)

        extra = {"eventTime": datetime.datetime.now(timezone("US/Pacific")).isoformat()}

        now = datetime.datetime.now()

        # Elasticsearch logging
        if self.get("logging.elasticsearch_enabled", False):
            try:
                es = f"{self.get('logging.elasticsearch.host')}:{self.get('logging.elasticsearch.port')}"
                index_name = (
                    f"{self.get('logging.elasticsearch.index_name', 'consoleme')}-'"
                    f"{now.year}{now.month}{now.day}"
                )
                from consoleme.lib.elasticsearch import ESHandler

                handler = ESHandler(es, index_name)
                handler.setFormatter(logmatic.JsonFormatter())
                handler.setLevel(self.get("logging.elasticsearch.level", "INFO"))
                logger.addHandler(handler)
            except Exception:
                logger.error(
                    "Unable to configure Elasticsearch logging.", exc_info=True
                )
        # Log to stdout and disk
        if self.get("logging.stdout_enabled", True):
            logger.propagate = False
            handler = logging.StreamHandler(sys.stdout)
            handler.setFormatter(logmatic.JsonFormatter())
            handler.setLevel(self.get("logging.stdout.level", "DEBUG"))
            logger.addHandler(handler)
            logging_file = self.get("logging.file")
            if logging_file:
                if "~" in logging_file:
                    logging_file = os.path.expanduser(logging_file)
                os.makedirs(os.path.dirname(logging_file), exist_ok=True)
                file_handler = logging.FileHandler(logging_file)
                file_handler.setFormatter(logmatic.JsonFormatter())
                logger.addHandler(file_handler)
        self.log = logging.LoggerAdapter(logger, extra)
        return self.log
示例#17
0
db.init_app(app)

app.url_map.converters['hash64'] = HashConverter
api, spec = setup_restful_service(app)

logger = log.getLogger()

# Don't propagate to root logger
logger.propagate = False

# Setup JSON stream handler for main logger
handler = logging.StreamHandler()
handler.setFormatter(
    logmatic.JsonFormatter(
        fmt=
        "%(filename) %(funcName) %(levelname) %(lineno) %(module) %(threadName) %(message)"
    ))
logger.addHandler(handler)
logger.addFilter(log.ContextFilter())
logger.setLevel(logging.INFO)


@app.before_request
def assign_request_id():
    g.request_id = token_hex(16)
    g.request_start_time = datetime.utcnow()


@app.after_request
def log_request(response):
    response_time = datetime.utcnow() - g.request_start_time
示例#18
0
    def setUp(self):

        # access glue service
        self.glue = boto3.client(
            service_name='glue',
            region_name='us-east-1',
            endpoint_url='https://glue.us-east-1.amazonaws.com'
            )

        # Create CloudWatch client
        self.cloudwatch = boto3.client('cloudwatch')

        # configure redshift access

        self.db_conn = get_redshift_connection()

        # access s3 storage
        self.s3 = boto3.resource('s3')

        # get json file for this test suite 
        self.json_results = {}

        # define the jobs list, including initial params
        self.job_list = {
            'EDUDirect_to_parquet_last_N_months': {
                'args': {
                        '--MONTHS': '3',
                        '--ALL_TABLES': 'False'
                },
                'bucket': 'highereducation-dw-transformed-data',
                'date_partition': True,
                'initial_folders': [
                    'EDUDirectDB'
                ],
                'tables': [
                    'cddirect_production_lead',
                    'cddirect_production_visitor'
                ],
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_to_parquet_replace': {
                'bucket': 'highereducation-dw-transformed-data',
                'initial_folders': [
                    'EDUDirectDB'
                ],
                'tables': [
                    'cddirect_production_lead_cap',
                    'cddirect_production_migration_versions',
                    'cddirect_production_school_campus_program',
                    'cddirect_production_school_criteria',
                    'cddirect_production_school_criteria_affiliate',
                    'cddirect_production_school_multilead_segment',
                    'cddirect_production_visitor_tag',
                    'cddirect_production_zip_state'
                ],
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_to_parquet_new_snapshot': {
                'bucket': 'highereducation-dw-transformed-data',
                'initial_folders': [
                    'EDUDirectDB'
                ],
                'tables': [
                    'cddirect_production_affiliate',
                    'cddirect_production_country',
                    'cddirect_production_education_level',
                    'cddirect_production_publisher',
                    'cddirect_production_school',
                    'cddirect_production_school_alias',
                    'cddirect_production_school_campus',
                    'cddirect_production_school_eligible_country',
                    'cddirect_production_school_eligible_state',
                    'cddirect_production_school_program',
                    'cddirect_production_school_program_ineligible_state',
                    'cddirect_production_school_provider',
                    'cddirect_production_school_provider_campus',
                    'cddirect_production_school_provider_cap',
                    'cddirect_production_school_provider_cap_program',
                    'cddirect_production_school_provider_cap_publisher',
                    'cddirect_production_school_provider_cap_state',
                    'cddirect_production_school_provider_category',
                    'cddirect_production_school_provider_education_level',
                    'cddirect_production_school_provider_leadid_flag',
                    'cddirect_production_school_provider_program',
                    'cddirect_production_school_publisher',
                    'cddirect_production_school_targus_score',
                    'cddirect_production_state',
                    'cddirect_production_tag',
                    'cddirect_production_targus_score',
                    'cddirect_production_user',
                    'cddirect_production_widget_category',
                    'cddirect_production_widget_degree',
                    'cddirect_production_widget_degree_recommendation',
                    'cddirect_production_widget_subject',
                    'cddirect_production_widget_subject_alias',
                    'cddirect_production_widget_subject_recommendation',
                    'form_position_csv'
                ],
                'date_partition': True,
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_to_parquet_current_dimensions': {
                'bucket': 'highereducation-dw-transformed-data',
                'initial_folders': ['EDUDirectDB-current'],
                'files': [
                    'cddirect_production_affiliate',
                    'cddirect_production_country',
                    'cddirect_production_lead_cap',
                    'cddirect_production_publisher',
                    'cddirect_production_school',
                    'cddirect_production_school_program',
                    'cddirect_production_school_provider',
                    'cddirect_production_school_provider_cap',
                    'cddirect_production_school_provider_category',
                    'cddirect_production_school_provider_program',
                    'cddirect_production_school_provider_education_level',
                    'cddirect_production_state',
                    'cddirect_production_user',
                    'cddirect_production_widget_category',
                    'cddirect_production_widget_degree',
                    'cddirect_production_widget_subject',
                    'cddirect_production_widget_degree_recommendation',
                    'cddirect_production_widget_subject_recommendation',
                    'form_position_csv',
                ],
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_user_agent': {
                'args': {
                    '--TYPE': 'historical',
                },
                'bucket': 'highereducation-dw-transformed-data',
                'tables': ['user_agent'],
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_to_staging': {
                'args': {
                    '--TYPE': 'historical',
                    '--ENVIRONMENT': 'dev',
                    '--START_DATE': '000',
                    '--END_DATE': '000',
                },
                'bucket': 'highereducation-dw-staging-data',
                'initial_folders': ['EDUDirectDB', 'tmp'],
                'date_partition': True,
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            'EDUDirect_related_subject': {
                'args': {
                    '--TYPE': 'historical',
                    '--ENVIRONMENT': 'dev',
                    '--DATABASE': 'highereducation-dw-edudirectdb-parquet-current',
                    '--START_DATE': '000',
                    '--END_DATE': '000',
                },
                'bucket': 'highereducation-dw-staging-data',
                'initial_folders': ['EDUDirectDB', 'env'],
                'date_partition': True,
                'file_extension': 'parquet',
                'job_type': ['file_creation']
            },
            # migration to redshift
            'UpdateCrawler': {
                'args': {
                    '--CRAWLER': 'highereducation-dw-edudirectdb-staging'
                },
                'job_type': ['update_db']
            },
            'Migrate_EDUDirect_to_Redshift': {
                'args': {
                    '--ORIGIN': 'stag-platformevents-db-staging.normalized_events_fact_table_stag_current', #database and table to query
                    '--DESTINATION': 'stag_platform_events_staging_internal.normalized_events_fact_table_stag_current',
                },
                'job_type': ['update_db'],
                'output_db':  'stag_platform_events_staging_internal',
                'output_table': 'normalized_events_fact_table_stag_current'
                '
            },
            
        }

        # initialize logger
        self.logger = logging.getLogger()

        handler = logging.StreamHandler()
        handler.setFormatter(logmatic.JsonFormatter())

        self.logger.addHandler(handler)
        self.logger.setLevel(logging.INFO)
示例#19
0
import logging
import logging.handlers
import sys
import socket
import logmatic

logger = logging.getLogger()

sh = logging.handlers.SysLogHandler('/dev/log')
sh.setFormatter(logmatic.JsonFormatter(extra={"hello": "world","hostname":socket.gethostname()},prefix="appname: "))
logger.addHandler(sh)

logger.setLevel(logging.INFO)

test_logger = logging.getLogger("test")
test_logger.info({"special": "value", "run": 12})
test_logger.info("classic message", extra={"special": "value", "run": 12})

def exception_test():
    try:
        raise Exception('test')
    except Exception:
        test_logger.exception("This is a fake exception")

exception_test()
示例#20
0
#!/usr/bin/env python

import logmatic
import logging
import socket
"""
https://github.com/logmatic/logmatic-python
"""
logger = logging.getLogger()

handler = logging.StreamHandler()
handler.setFormatter(
    logmatic.JsonFormatter(extra={"hostname": socket.gethostname()}))

logger.addHandler(handler)
logger.setLevel(logging.INFO)

test_logger = logging.getLogger("test")
test_logger.info("classic message", extra={"special": "value", "run": 12})
logger.warn("No user currently authenticated.",
            extra={
                "customer": "my_beloved_customer",
                "login_name": "*****@*****.**"
            })
示例#21
0
import logging
import sys
import socket

sys.path.append('logmatic/')
import logmatic

logger = logging.getLogger()

handler = logging.StreamHandler()
handler.setFormatter(logmatic.JsonFormatter(extra={"hello": "world","hostname":socket.gethostname()}))

logger.addHandler(handler)
logger.setLevel(logging.INFO)

test_logger = logging.getLogger("test")
test_logger.info({"special": "value", "run": 12})
test_logger.info("classic message", extra={"special": "value", "run": 12})

def exception_test():
    try:
        raise Exception('test')
    except Exception:
        test_logger.exception("This is a fake exception")

exception_test()
    os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = get_key()

alert = False

path = os.path.expanduser('~/python-logs')
logfile = os.path.expanduser('~/python-logs/security.log')

if os.path.isdir(path):
    pass
else:
    os.mkdir(path)

logger = logging.getLogger("Rotating Log")
logger.setLevel(logging.INFO)
handler = RotatingFileHandler(logfile, maxBytes=5 * 1024 * 1024, backupCount=5)
handler.setFormatter(logmatic.JsonFormatter())
logger.addHandler(handler)

for project in get_projects():
    try:
        service = discovery.build('compute', 'v1')
        request = service.networks().list(project=project)
        response = request.execute()
        items = response['items']

        for item in items:
            vpc = item['name']
            autocreate = item['autoCreateSubnetworks']

            if vpc == 'default' and autocreate is True:
                alert = True
示例#23
0
parser.set_defaults(attrs=[])
parser.set_defaults(debug=False)
parser.set_defaults(docker_version="auto")
parser.set_defaults(skip_name=None)
parser.set_defaults(skip_image=None)
parser.set_defaults(match_name=None)
parser.set_defaults(match_image=None)
parser.set_defaults(match_label=None)
parser.set_defaults(timeout=120)

args = parser.parse_args()

# Initialise the logger for Logmatic.io
logmatic_logger = logging.getLogger("docker-logmatic")
handler = logmatic.LogmaticHandler(args.token, host=args.hostname, port=args.port, ssl=args.ssl)
handler.setFormatter(logmatic.JsonFormatter(fmt="%(message)"))
logmatic_logger.addHandler(handler)
logmatic_logger.setLevel(logging.DEBUG)
logmatic_logger.propagate = False

if args.debug is True:
    internal_logger.setLevel(logging.DEBUG)
    sys_handler = logging.StreamHandler(sys.stderr)
    internal_logger.addHandler(sys_handler)
    internal_logger.debug(args)
else:
    internal_logger.disabled = True


# Initialise the connection to the local daemon
base_url = 'unix://var/run/docker.sock'
示例#24
0
    def setUp(self):

        # access glue service
        self.glue = boto3.client(
            service_name='glue',
            region_name='us-east-1',
            endpoint_url='https://glue.us-east-1.amazonaws.com'
            )

        # Create CloudWatch client
        self.cloudwatch = boto3.client('cloudwatch')

        # access s3 storage
        self.s3 = boto3.resource('s3')

        # get json file for this test suite 
        self.json_results = {}

        # define the jobs list, including initial params
        self.job_list = {
            'EDUDirect_to_parquet_current_day': {
                'bucket' : 'highereducation-dw-transformed-data',
                'initial_folders': ['EDUDirectDB-current'],
                'tables': [
                    'cddirect_production_lead',
                    'cddirect_production_visitor'
                ],
                'file_extension': 'parquet'
            },
            'EDUDirect_user_agent': {
                'args': {
                    '--TYPE': 'current_day'
                },
                'bucket' : 'highereducation-dw-transformed-data',
                'initial_folders': [
                    'EDUDirectDB-current'
                ],
                'tables': [
                    'user_agent'
                ],
                'file_extension': 'parquet'
            },
            'EDUDirect_to_staging': {
                'args': {
                    '--TYPE': 'current_day',
                    '--ENVIRONMENT': 'dev',
                    '--START_DATE': '000',
                    '--END_DATE': '000',
                },
                'bucket': 'highereducation-dw-staging-data',
                'inital_folders': ['EDUDirectDB','tmp'],
                'tables': [
                    'lead_fact_table_dev_current_v1'
                ],
                'date_partition': True,
                'file_extension': 'parquet'
            },
            'EDUDirect_related_subject': {
                'args': {
                    '--TYPE': 'current_day',
                    '--DATABASE': 'highereducation-dw-edudirectdb-parquet-current',
                    '--ENVIRONMENT': 'dev',
                    '--START_DATE': '000',
                    '--END_DATE': '000',
                },
                'bucket': 'highereducation-dw-staging-data',
                'inital_folders': ['EDUDirectDB','dev'],
                'tables': [
                    'lead_fact_table_env_current'
                ],
                'date_partition': True
            },
        }

        # initialize logger
        self.logger = logging.getLogger()

        handler = logging.StreamHandler()
        handler.setFormatter(logmatic.JsonFormatter())

        self.logger.addHandler(handler)
        self.logger.setLevel(logging.INFO)
示例#25
0
unk_vnf_coll = os.environ['UNK_COLL']
log_level = os.environ['LOG_LEVEL']

# cat_url  = "http://pre-int-sp-ath.5gtango.eu:4011/catalogues/api/v2/"
# db_host = "mongo"
# db_port = 27017
# db_name = "tng-sdk-analyze-weight"
# dict_coll = "dictionaries"
# unk_vnf_coll = "unknown_vnfs"
# enc_fig_coll = "encoded_figs"
# log_level = "INFO"

logger = logging.getLogger()
handler = logging.StreamHandler()
handler.setFormatter(
    logmatic.JsonFormatter(extra={"hostname": "tng-sdk-analyze-weight"}))
logger.addHandler(handler)
level = logging.getLevelName(log_level)
enc_fig_coll = "encoded_figs"
logger.setLevel(level)


# Create a URL route in our application for "/"
@app.route('/tng-sdk-analyze-weight/api/weight/v1/home')
def home():
    logger.info("Logging home end point")
    return render_template('index.html')


@app.route('/tng-sdk-analyze-weight/api/weight/v1/mgmt')
def mgmt():
    def setUp(self):

        # access glue service
        self.glue = boto3.client(
            service_name='glue',
            region_name='us-east-1',
            endpoint_url='https://glue.us-east-1.amazonaws.com')

        # Create CloudWatch client
        self.cloudwatch = boto3.client('cloudwatch')

        # access s3 storage
        self.s3 = boto3.resource('s3')

        # get json file for this test suite
        self.json_results = {}

        # define the jobs list, including initial params
        self.job_list = {
            'PlatformEvents_cap_info_to_parquet': {
                'args': {
                    '--TYPE': 'current_day'
                },
                'bucket': 'highereducation-dw-transformed-data',
                'initial_folders': ['PlatformEvents-current'],
                'tables': ['cap_info_csv'],
                'file_extension': 'parquet'
            },
            'PlatformEvents_to_parquet': {
                'args': {
                    '--MONTHS': '3',
                },
                'bucket': 'highereducation-dw-transformed-data',
                'initial_folders': ['PlatformEvents'],
                'tables': ['platform_events_public_normalized_events'],
                'date_partition': True,
                'file_extension': 'parquet'
            },
            'PlatformEvents_to_staging': {
                'args': {
                    '--TYPE': 'current_day',
                    '--ENVIRONMENT': 'dev'
                },
                'bucket': 'highereducation-dw-staging-data',
                'initial_folders': ['PlatformEvents', 'tmp'],
                'tables': ['fact_table_dev_current_v1'],
                'date_partition': True,
                'file_extension': 'parquet'
            },
            'PlatformEvents_prices': {
                'args': {
                    '--TYPE': 'current_day',
                    '--ENVIRONMENT': 'dev'
                },
                'bucket': 'highereducation-dw-staging-data',
                'initial_folders': ['PlatformEvents', 'dev'],
                'tables': ['normalized_events_fact_table_dev_current'],
                'date_partition': True,
                'file_extension': 'parquet'
            },
        }

        # initialize logger
        self.logger = logging.getLogger()

        handler = logging.StreamHandler()
        handler.setFormatter(logmatic.JsonFormatter())

        self.logger.addHandler(handler)
        self.logger.setLevel(logging.INFO)