def init_logging(args, log_file_path): """Intake a argparse.parse_args() object and setup python logging""" # configure logging handlers_ = [] log_format = logging.Formatter(fmt="[%(asctime)s] [%(levelname)s] - %(message)s") if args.log_dir: os.makedirs(args.log_dir, exist_ok=True) file_handler = TimedRotatingFileHandler( os.path.join(args.log_dir, log_file_path), when="d", interval=1, backupCount=7, encoding="UTF-8", ) file_handler.setFormatter(log_format) file_handler.setLevel(args.log_level) handlers_.append(file_handler) if args.verbose: stream_handler = logging.StreamHandler(stream=sys.stderr) stream_handler.setFormatter(log_format) stream_handler.setLevel(args.log_level) handlers_.append(stream_handler) if args.graylog_address: graylog_handler = graypy.GELFTCPHandler(args.graylog_address, args.graylog_port) handlers_.append(graylog_handler) logging.basicConfig(handlers=handlers_, level=args.log_level)
def sendTCPLog(message): my_logger = logging.getLogger('test_logger') my_logger.setLevel(logging.DEBUG) handler = graypy.GELFTCPHandler('10.0.75.1', 12201) my_logger.addHandler(handler) my_logger.debug(message)
def __init__(self, FileName, queue): self.my_logger = logging.getLogger('test_logger') self.my_logger.setLevel(logging.DEBUG) self.handler = graypy.GELFTCPHandler('graylog', '12201') self.my_logger.addHandler(self.handler) self.my_logger.info("launching SendData") self.FileName = FileName self.queue = queue self.SendData()
def __init__(self, queue): self.my_logger = logging.getLogger('test_logger') self.my_logger.setLevel(logging.DEBUG) self.handler = graypy.GELFTCPHandler('graylog', '12201') self.my_logger.addHandler(self.handler) self.my_logger.debug("launching sciflask") self.queue = queue self.Model = Predictor() self.my_logger.info("Consume Predict starting...") print("", flush=True) self.Consume()
def invalidEmail(user): my_logger = logging.getLogger('test_logger') my_logger.setLevel(logging.DEBUG) handler = graypy.GELFTCPHandler('10.0.75.1', 12201) my_logger.addHandler(handler) my_adapter = logging.LoggerAdapter(logging.getLogger('test_logger'), { 'first': user["first"], 'last': user["last"] }) my_adapter.debug('Invalid email ' + user["email"])
def setup_logger( level: int = logging.DEBUG, log_file_name: Optional[str] = None, graylog_logger_address: Optional[str] = None, ) -> logging.Logger: if log_file_name is not None: logging.basicConfig(filename=log_file_name, format="%(asctime)s %(message)s") else: logging.basicConfig() logger = logging.getLogger(logger_name) logger.setLevel(level) if graylog_logger_address is not None: host, port = graylog_logger_address.split(":") handler = graypy.GELFTCPHandler(host, int(port)) logger.addHandler(handler) return logger
def main(): """Send a message to a given Graylog server and then check to see if the log was saved.""" args = parse_args() graylog_server = "http://" + args.server_ip if args.host_port: graylog_server += ':' + args.host_port graylog_server_api = graylog_server + '/api/' connection_type = args.connection_type.lower().strip() if connection_type not in ("udp", "tcp"): print("Connection type of " + connection_type + " is invalid. Valid options: UDP/TCP") exit(1) try: response_check = requests.get(graylog_server_api, auth=(args.username, args.password), headers={"accept": "application/json"}, timeout=2) if response_check.status_code != 200: logging.error("Received response code: %s", str(response_check.status_code)) exit(1) response_json = response_check.json() if not response_json['version']: logging.error("Failed to connect to the graylog server at: %s", graylog_server_api) exit(1) print("Connected to Graylog server at " + graylog_server + " with version " + response_json['version']) except requests.exceptions.ConnectionError: logging.error("Timeout connecting to the graylog server at: %s", graylog_server_api) exit(1) my_logger = logging.getLogger('graylog_logger') my_logger.setLevel(logging.DEBUG) if connection_type == "udp": graylog = graypy.GELFUDPHandler(args.server_ip) else: graylog = graypy.GELFTCPHandler(args.server_ip) my_logger.addHandler(graylog) print("Sending: " + args.message + " to the Graylog server!") jsondata = {"message": args.message} my_logger.debug(jsondata)
from flask import Flask, jsonify, request import base64 import numpy as np import graypy import logging import cv2 import pytesseract import traceback import sys import json import re app = Flask(__name__) SERVICE_NAME = "THARACT_API" handler_1 = graypy.GELFTCPHandler('localhost', 5556) handler_1.setLevel(logging.INFO) logger_1 = logging.getLogger("{}".format(SERVICE_NAME)) logger_1.propagate = False logger_1.setLevel(logging.INFO) logger_1.addHandler(handler_1) SERVING_ADDRESS = "localhost" def base64_to_image(img_b64): img = np.fromstring(base64.b64decode(img_b64), np.uint8) img = cv2.imdecode(img, cv2.IMREAD_UNCHANGED) #img[:,:,:3] = cv2.cvtColor(img[:,:,:3], cv2.COLOR_BGR2RGB) return img
log_filename = cwd + "/logs/log_" + os.path.splitext( os.path.basename(args.config))[0] + ".log" loghandle = logging.FileHandler(log_filename, 'a') formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') loghandle.setFormatter(formatter) log.addHandler(loghandle) # Remote Logging if args.graylog: graylog_config = cwd + "/config/graylog.yml" assert path.exists( graylog_config), 'Device map not found: %s' % graylog_config graylog_servers = yaml.load(open(graylog_config), Loader=yaml.FullLoader) grayhandler = graypy.GELFTCPHandler( graylog_servers['server'][0]['server_address'], graylog_servers['server'][0]['port']) log.addHandler(grayhandler) log.addFilter(LoggingFilter()) #Start log.info("=== Datalogger Started ===") #Initialize GracefulKiller to record SIGINTs killer = GracefulKiller() #Run the main code try: main(args=args) except Exception as e: log.error("Exception (Main Thread): " + str(e))
def setup_logging(logfile: str, gelf_configuration: Optional[GelfConfig] = None): """ Sets up the logging system Args: gelf_configuration : logfile (str): file path to log into """ args = cli_manager.GET_ARGUMENTS() # check for CLI verbosity flag if args.trace: loglevel = "TRACE" elif args.verbose: loglevel = "DEBUG" else: loglevel = "INFO" # check for nocolor flag if args.nocolors: log_colors = False else: log_colors = True # check for new-log flag, overwriting existing log, # otherwise, append to the file per normal. if args.clean_log: log_filemode = "w" else: log_filemode = "a" handlers = [ dict( sink=sys.stdout, format="<b><c><{time}</c></b> [{name}] | {extra} | " "<level>{level.name}</level> > {message}", colorize=True, backtrace=False, diagnose=False, level=loglevel, ), dict( sink=logfile, level="DEBUG", format="< {time} > " "[ {module} ] {message}", rotation="50 MB", enqueue=True, mode=log_filemode, ), ] if gelf_configuration: handlers.append( dict( sink=graypy.GELFTCPHandler( gelf_configuration.host, gelf_configuration.port, ), format="<{time}[{name}] {level.name}> {message}", colorize=False, backtrace=False, diagnose=False, level=gelf_configuration.log_level, )) logger.configure(handlers=handlers) logger.info("Configuration file loading...")
'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARNING, 'error': logging.ERROR, 'critical': logging.CRITICAL }[config['Logging']['Level'].lower()]) if config['FileLog']['Enabled'].lower() == 'true': _filelogHandler = logging.FileHandler(config['FileLog']['File']) _filelogHandler.setFormatter(logging.Formatter(config['FileLog']['Format'])) log.addHandler(_filelogHandler) if config['Graylog']['Enabled'].lower() == 'true': import graypy _graylogHandler = graypy.GELFTCPHandler( config['Graylog']['Host'], config['Graylog']['Port'] ) _graylogHandler.setFormatter(logging.Formatter(config['Graylog']['Format'])) log.addHandler(_graylogHandler) # cmd arguments cmdParser = argparse.ArgumentParser(description='EvoSC Beginner Classifier.') cmdSubParsers = cmdParser.add_subparsers(dest='cmd') datasetCmdParser = cmdSubParsers.add_parser("dataset", help='Create datasets model generator.') datasetCmdParser.add_argument('--out', dest='dataset_file', help='CSV-file to output the dataset to (will append).', required=True) modelCmdParser = cmdSubParsers.add_parser("model", help='Generate classifier models.') modelCmdParser.add_argument('--dataset', dest='dataset_file', help='CSV-file containing data points used for training.', required=True) modelCmdParser.add_argument('--batch-size', dest='batch_size', help='Size of training batches.', default=100) modelCmdParser.add_argument('--epochs', dest='epochs', help='Number of training epochs.', default=32)