Esempio n. 1
0
    def attach_appinsights(logger, instrumentation_key: str):
        if not instrumentation_key:
            logger.warning(
                "appinsights instrumentation key is null; not writing to app insights"
            )
            return

        handler = LoggingHandler(instrumentation_key)

        # TODO: extend this collection to jobid, pipelineid or farmid etc.
        handler.client._context.properties['Collection'] = 'ADF_LOGS'

        # Removing all PIO information from context.
        # Due to a bug in LoggingHanlder constructor, its not honoring context passed.
        # so trying to set values once handler is created. Overriding with 'None' string
        # instead of None as for later value, its taking default value from constructor.
        handler.client._context.device = contracts.Device()
        handler.client._context.device.os_version = NONE_STRING
        handler.client._context.device.locale = NONE_STRING
        handler.client._context.device.id = NONE_STRING
        handler.client._context.device.type = NONE_STRING
        handler.client._context.device.oem_name = NONE_STRING
        handler.client._context.device.model = NONE_STRING

        handler.client._context.location = contracts.Location()
        handler.client._context.location.ip = NONE_STRING

        handler.client._context.user = contracts.User()
        handler.client._context.user.id = NONE_STRING
        handler.client._context.user.account_id = NONE_STRING
        handler.client._context.user.auth_user_id = NONE_STRING

        handler.client._context.session = contracts.Session()
        handler.client._context.session.id = NONE_STRING

        handler.client._context.cloud = contracts.Cloud()
        handler.client._context.cloud.role = NONE_STRING

        handler.setLevel(Logger.get_logging_level())

        # Log formatter looks similar to default python logging statement.
        handler.setFormatter(
            logging.Formatter(
                '[%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(process)d:%(thread)d]: %(message)s'
            ))

        # enable exceptions to app insights
        enable(instrumentation_key)

        logger.addHandler(handler)
        logger.info("Attached app insights handler to logger")
Esempio n. 2
0
    def InitializeAppInsights(self):
        # AppInsights initialization
        cur_thread = threading.current_thread()
        logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-7.7s]: %(message)s")
        appInsightsKey = os.environ['APPINSIGHTS_KEY']
        self.rootLogger.info("AppInsights key: '" + appInsightsKey + "'")   # log locally

        # create a child logger per thread so that we can set the SessionId without collision during concurrent execution
        #  by default logging will propagate to the parent rootLogger
        self.telemetryLogger = self.rootLogger.getChild('AppInsights.{0}'.format(cur_thread) )
        telemetryhandler = LoggingHandler(appInsightsKey)
        telemetryhandler.setFormatter(logFormatter)
        telemetryhandler.client.context.application.id = "DiskInspect-Service"
        telemetryhandler.client.context.application.ver = self.containerVersion
        telemetryhandler.client.context.properties['releaseName'] = self.releaseName
        self.telemetryLogger.addHandler(telemetryhandler)

        self.telemetryClient = telemetryhandler.client
Esempio n. 3
0
def setup_logger():
    logging.setLoggerClass(CustomLogger)
    _logger = logging.getLogger()

    _logger.setLevel(logging.DEBUG)
    # Stdout handler
    c_handler = logging.StreamHandler()
    c_handler.setLevel(logging.DEBUG)
    c_handler.setFormatter(logging.Formatter(format_str))
    _logger.addHandler(c_handler)

    # Application insight handler
    if utils.convert_to_boolean(os.environ["AZURE_USEAPPINSIGHT"]):
        from applicationinsights import TelemetryClient
        from applicationinsights.logging import LoggingHandler
        a_handler = LoggingHandler(os.environ["AZURE_INSTRUMENTATION_KEY"])
        a_handler.setLevel(logging.DEBUG)
        a_handler.setFormatter(logging.Formatter(ai_str))
        _logger.addHandler(a_handler)
        tc = TelemetryClient(os.environ["AZURE_INSTRUMENTATION_KEY"])
        tc.channel.queue.max_queue_length = 2
Esempio n. 4
0
enable(instrumentation_key)

#setup other needed variables
tc = TelemetryClient(instrumentation_key)
tc.context.application.ver = '0.0.1'
tc.context.device.id = 'Sample notebook'

telemetry_channel = channel.TelemetryChannel()
telemetry_channel.context.application.ver = '1.0.0'
telemetry_channel.context.properties['application_name'] = 'sample_notebook'
telemetry_channel.context.properties['application_id'] = sc.applicationId

handler = LoggingHandler(instrumentation_key,
                         telemetry_channel=telemetry_channel)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger = logging.getLogger('simple_logger')
logger.setLevel(logging.INFO)
logger.addHandler(handler)

logger.info('Starting sample app ... ')


def getReadConfig():
    readConfig = {
        "Endpoint": "https://nomier-test-sql.documents.azure.com:443/",
        "Masterkey": "<MK>",
        "Database": "partitionIdTestDB",
        "Collection": "sourcecollection",
        "SamplingRatio": "1.0"
    }
Esempio n. 5
0
 ) _)/ \/ \) _) )   ( (_ \) _)/    / )(  
(____)_)(_(____|__\_)\___(____)_)__)(__) 
 """
# set up channel with context
telemetry_channel = channel.TelemetryChannel()
telemetry_channel.context.application.ver = '0.0.0.0'
#telemetry_channel.context.properties['my_property'] = 'my_value'

# set up logging
az_handler = LoggingHandler('1bd7b388-4afd-4b58-8b2f-060ac172d00d', 
                            telemetry_channel=telemetry_channel)
az_handler.setLevel(logging.DEBUG)
hash = hashlib.sha1()
hash.update(str(time.time()).encode("utf-8", "strict"))
az_handler.setFormatter(
    logging.Formatter(f'{hash.hexdigest()[:16]} ||'
                       '%(name)s - %(levelname)s: %(message)s')
)

f_handler = logging.FileHandler('output.log')
f_handler.setLevel(logging.DEBUG)
f_handler.setFormatter(
    logging.Formatter('%(name)s - %(levelname)s: %(message)s')
)

c_handler = logging.StreamHandler()
c_handler.setLevel(logging.DEBUG)#logging.ERROR)
c_handler.setFormatter(
    logging.Formatter('%(name)s - %(levelname)s: %(message)s')
)
log = logging.getLogger('molgen')
log.setLevel(logging.ERROR)
Esempio n. 6
0
# Sysout Logging Setup
logger = logging.getLogger("monitofi")
logger.setLevel(logging.INFO)
syshandler = logging.StreamHandler(sys.stdout)
syshandler.setLevel(logging.INFO)
formatter = json_log_formatter.JSONFormatter()
syshandler.setFormatter(formatter)
logger.addHandler(syshandler)

if IKEY != "REPLACE_ME":
    # Logging unhandled exceptions with Appinsights
    enable(IKEY)
    # Applications Insights Logging Setup
    handler = LoggingHandler(IKEY)
    handler.setFormatter(formatter)
    logger.addHandler(handler)

iclient = InfluxDBClient(INFLUXDB_SERVER, INFLUXDB_PORT, INFLUXDB_USERNAME,
                         INFLUXDB_PASSWORD, INFLUXDB_DATABASE)
iclient.create_database(INFLUXDB_DATABASE)


def flattening(nested, prefix, ignore_list):
    field = {}

    flatten(True, nested, field, prefix, ignore_list)

    return field