db = SQLA(app) cache = Cache(app, config=app.config.get('CACHE_CONFIG')) migrate = Migrate(app, db, directory=APP_DIR + "/migrations") # Logging configuration logging.basicConfig(format=app.config.get('LOG_FORMAT')) logging.getLogger().setLevel(app.config.get('LOG_LEVEL')) if app.config.get('ENABLE_TIME_ROTATE'): logging.getLogger().setLevel(app.config.get('TIME_ROTATE_LOG_LEVEL')) handler = TimedRotatingFileHandler( app.config.get('FILENAME'), when=app.config.get('ROLLOVER'), interval=app.config.get('INTERVAL'), backupCount=app.config.get('BACKUP_COUNT')) logging.getLogger().addHandler(handler) class MyIndexView(IndexView): @expose('/') def index(self): return redirect('/caravel/welcome') appbuilder = AppBuilder( app, db.session, base_template='caravel/base.html',
import os from flask import Flask from flask_common import Common app = Flask(__name__) common = Common(app) # TODO: better config app.config.from_object("calendonator.default_settings") app.config.from_envvar("CALENDONATOR_SETTINGS") __version__ = "0.0.2" app.config["VERSION"] = __version__ if not app.debug: import logging from logging.handlers import TimedRotatingFileHandler # https://docs.python.org/3.6/library/logging.handlers.html#timedrotatingfilehandler file_handler = TimedRotatingFileHandler( os.path.join(app.config["LOG_DIR"], "calendonator.log"), "midnight") file_handler.setLevel(logging.WARNING) file_handler.setFormatter( logging.Formatter("<%(asctime)s> <%(levelname)s> %(message)s")) app.logger.addHandler(file_handler) import calendonator.views
import logging import os import re import time import json import unittest import requests from logging.handlers import TimedRotatingFileHandler from apscheduler.schedulers.background import BackgroundScheduler logger = logging.getLogger('sync job logger') logger.setLevel(logging.DEBUG) handler = TimedRotatingFileHandler('../logs/sync_job.log', when='midnight', interval=1, backupCount=30) handler.setLevel(logging.DEBUG) handler.setFormatter( logging.Formatter( fmt='[%(asctime)s.%(msecs)03d] [%(levelname)s]: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')) handler.suffix = "%Y%m%d" handler.extMatch = re.compile(r"^\d{8}$") logger.addHandler(handler) rs = requests.session() _http_headers = {'Content-Type': 'application/json'} ADMIN_USER = os.getenv('ADMIN_USERNAME') ADMIN_PASSWORD = os.getenv('ADMIN_PASSWORD')
type=int, help="delay in seconds (default=60)", default=60) parser.add_argument("--debug", help="debug flag", action='store_true') args = parser.parse_args() # Set up Logging logger = logging.getLogger('loggerlog') if args.debug: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) # Rotate at midnight logHandler = TimedRotatingFileHandler(filename=args.filename, when="midnight", backupCount=31) logFormatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') logHandler.setFormatter(logFormatter) logger.addHandler(logHandler) try: # Loop forever while True: try: # Run command cmd_out = subprocess.check_output(args.command, shell=True).decode()
################################################################ ############################ LOGGER ############################ ################################################################ # create logger with 'app_log' logger = logging.getLogger(logconf['name']) logger.setLevel(logging.DEBUG) # create stdout handler with a higher log level sh = logging.StreamHandler(sys.stdout) sh.setLevel(logging.DEBUG) log_path = logconf["folder"] + '/' # create stdout_file handler whiefh logs even debug messages sfh = TimedRotatingFileHandler(log_path + logconf['stdout']['name'], when="W0", interval=1, backupCount=5) sfh.setLevel(logging.DEBUG) # create file handler with a higher log level efh = TimedRotatingFileHandler(log_path + logconf['error']['name'], when="W0", interval=1, backupCount=5) efh.setLevel(logging.ERROR) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s: %(message)s') sh.setFormatter(formatter) sfh.setFormatter(formatter)
def __init__( self, name: str, filename: str = None, root: str = None, cmd_output: bool = True, level: str = "INFO", colors=None, database=None, excludes=None, config={} ): self.date_str: str = "" self.database_name: str = database self.database = None self.excludes = excludes self.config = config if "ALPHA_LOG_CMD_OUTPUT" in os.environ: cmd_output = "Y" in os.environ["ALPHA_LOG_CMD_OUTPUT"].upper() if filename is None: filename = name if root is None: """ parentframe = inspect.stack()[1] module = inspect.getmodule(parentframe[0]) root = os.path.abspath(module.__file__).replace(module.__file__,'')""" root = _utils.get_alpha_logs_root() self.root = _utils.check_root(root) log_path = self.root + os.sep + filename + ".log" # Create logger self.logger = logging.getLogger(name) self.set_level(level) # File handler if config is not None and len(config) != 0: handler = TimedRotatingFileHandler( log_path, **config ) else: handler = TimedRotatingFileHandler( log_path, when="midnight", interval=1, backupCount=90 ) if PLATFORM == "windows": handler = ConcurrentRotatingFileHandler(log_path, "a", 512 * 1024, 5) # handler.suffix = "%Y%m%d" self.logger.addHandler(handler) if cmd_output: handler = logging.StreamHandler(sys.stdout) if colors: handler.addFilter(_colorations.ColorFilter(colors)) self.logger.addHandler(handler) if self.excludes and len(self.excludes): self.logger.addFilter( NoParsingFilter(excludes=self.excludes, level=self.level) ) self.pid = os.getpid() self.name = name # self.cmd_output = cmd_output if cmd_output is not None else True self.last_level = None self.last_message = None
logging.basicConfig(level='INFO', format='%(message)s', datefmt='%d/%m/%y %H:%M:%S') stream_format = logging.Formatter('%(asctime)s - %(message)s', '%Y-%m-%d %H:%M:%S') file_format = logging.Formatter('%(asctime)s %(levelname)s -: %(message)s', '%Y-%m-%d %H:%M:%S') stream_handler = logging.StreamHandler() stream_handler.setLevel(logging.DEBUG) generic_file_handler = TimedRotatingFileHandler(os.path.join( APP_ROOT_PATH, '..', 'log', 'log.log'), when='midnight', interval=2, backupCount=2, encoding='utf-8', delay=False) generic_file_handler.setLevel(logging.DEBUG) errors_file_handler = TimedRotatingFileHandler(os.path.join( APP_ROOT_PATH, '..', 'log', 'errors.log'), when='midnight', interval=2, backupCount=2, encoding='utf-8', delay=False) errors_file_handler.setLevel(logging.DEBUG) stream_handler.setFormatter(stream_format)
# -*- coding: utf-8 -*- import logging from logging.handlers import TimedRotatingFileHandler from scihub_eva.utils.path_utils import * DEFAULT_LOGGER = logging.getLogger('default') DEFAULT_LOGGER.setLevel(logging.INFO) DEFAULT_LOG_DIRECTORY = logs_dir() DEFAULT_LOG_FILE = DEFAULT_LOG_DIRECTORY / 'SciHubEVA.log' DEFAULT_LOG_HANDLER = TimedRotatingFileHandler( DEFAULT_LOG_FILE.resolve().as_posix(), when='d', encoding='utf-8') DEFAULT_LOG_HANDLER.setLevel(logging.INFO) DEFAULT_LOG_FORMATTER = logging.Formatter( '%(asctime)s - %(levelname)s - %(message)s') DEFAULT_LOG_HANDLER.setFormatter(DEFAULT_LOG_FORMATTER) DEFAULT_LOGGER.addHandler(DEFAULT_LOG_HANDLER) LOGGER_SEP = '-' * 100 def format_log_message(message): return DEFAULT_LOG_FORMATTER.format(message) __all__ = [
from flask_moment import Moment from werkzeug.http import HTTP_STATUS_CODES from get_staticfeature import find_doc, preprocess_doc from data_processor import DataProcess from model import Model, ModelConfig from feature_extrator import FeatureExtrator from test_data_collator import TestDataCollator app = Flask(__name__) app.config['SECRET_KEY'] = 'dev' if __name__ != '__main__': gunicorn_logger = logging.getLogger('gunicorn.error') logHandler = TimedRotatingFileHandler('logs/prod_pipeline.log', when='midnight', interval=1, backupCount=30) formatter = logging.Formatter( '[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s') logHandler.setFormatter(formatter) gunicorn_logger.addHandler(logHandler) app.logger.handlers = gunicorn_logger.handlers app.logger.setLevel(gunicorn_logger.level) bootstrap = Bootstrap(app) moment = Moment(app) def error_response(status_code, message=None): payload = {'error': HTTP_STATUS_CODES.get(status_code, 'Unknown error')} if message:
# coding=utf-8
# from logging.handlers import RotatingFileHandler from logging.handlers import TimedRotatingFileHandler import logging loglevel = logging.INFO logfile = "./autodial.log" # logHandler = RotatingFileHandler(logfile, mode='a', maxBytes=50*1024*1024, backupCount=10, encoding=None, delay=0) logHandler = TimedRotatingFileHandler(logfile, when='D', interval=1, backupCount=20) #logFormatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-6s %(message)s') logFormatter = logging.Formatter( '%(asctime)s %(filename)-12s[line:%(lineno)d] %(thread)d %(levelname)s %(message)s' ) logHandler.setFormatter(logFormatter) logger = logging.getLogger('') logger.addHandler(logHandler) logger.setLevel(loglevel)
def get_file_handler(logger_name): file_handler = TimedRotatingFileHandler(logger_name, when="midnight", backupCount=7) # Only keep 7 previous logs. file_handler.setFormatter(FORMATTER) return file_handler
def create_logger( self, log_file_name='{0}/e2etest_running.log'.format(config.BASE_DIR), log_level=logging.DEBUG, log_date_format='%Y-%m-%d %H:%M:%S%z', log_formater='%(asctime)s %(filename)s:%(funcName)s %(levelname)s [line:%(lineno)d] %(message)s', max_log_files=3, one_day_one_file=True, max_log_file_size=10485760, log_to_standard_output=False): ''' @summary: create the logger @param log_file_name: the log file name, should be absolute path. default value is /tmp/vamp/videocenter_running.log if the value is None or "", print the log to standard output @param log_level: Integer of the log level. default value is logging.DEBUG @param max_log_files: the max number of files. It is valid when one_day_one_file equal False. default value is 3 @param one_day_one_file: whether only create a file in one day. default value is True, one day one log file @param max_log_file_size: the max size of the log file. unit is byte. default value is 10 MB @param log_date_format: String of log date format. default value is '%Y-%m-%d %H:%M:%S%z', like 2017-06-01 11:44:06+0000 @param log_to_standard_output: whether print logs into standard output, this argument will ignore log_file_name value @return: the logger ''' # initialize log file if log_file_name: log_file_name = os.path.abspath( log_file_name) # change path to absolute path if not os.path.exists(os.path.dirname(log_file_name)): os.makedirs(os.path.dirname(log_file_name)) # write log into file or standard output if log_file_name and type(log_file_name) == type( '') and log_file_name != '': # write log to file logger = logging.getLogger(log_file_name) logger.setLevel(log_level) # write a new log file every day if one_day_one_file: Rthandler = TimedRotatingFileHandler(log_file_name, when='D', backupCount=max_log_files) else: Rthandler = RotatingFileHandler(log_file_name, maxBytes=max_log_file_size, backupCount=max_log_files) formatter = logging.Formatter(fmt=log_formater, datefmt=log_date_format) Rthandler.setFormatter(formatter) logger.addHandler(Rthandler) # write log to standard output synchronously if log_to_standard_output: console = logging.StreamHandler() console.setLevel(log_level) console.setFormatter(formatter) logger.addHandler(console) # write log to standard output default else: logging.basicConfig(level=log_level, format=log_formater, datefmt=log_date_format) logger = logging return logger
from dockerplace import app if __name__ == '__main__': if not app.debug: import logging from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler( "dockerplace.log", when="D", backupCount=10) file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) app.debug = app.config['DEBUG'] app.run( port=app.config['PORT'])
from logging.handlers import TimedRotatingFileHandler import logging import sys import os if not os.path.exists('logs'): os.mkdir('logs') if not os.path.exists('logs/developer_entry_task.log'): open("logs/developer_entry_task.log", "w+").close() formater = logging.Formatter( '[%(levelname)s] - %(name)s - %(asctime)s - %(message)s') file_handler = TimedRotatingFileHandler('logs/developer_entry_task.log', when='midnight', backupCount=20) file_handler.setFormatter(formater) file_handler.suffix = "%Y-%m-%d" def get_logger(name): log = logging.getLogger(name) if '--debug' not in sys.argv: log.setLevel(logging.INFO) else: log.setLevel(logging.DEBUG) log.addHandler(file_handler) return log
from logging.handlers import TimedRotatingFileHandler from logging.handlers import RotatingFileHandler import traceback import queue import tkinter as tk from tkinter.scrolledtext import ScrolledText from email_crawler import DEFAULT_SITE, crawl, crawler_main, export_emails, OutputUIInterface # Debugging # import pdb;pdb.set_trace() # Logging #logging.config.dictConfig(LOGGING) logger = logging.getLogger("crawler_logger") logger.setLevel(logging.INFO) handler = TimedRotatingFileHandler('logs/log','midnight',1,30) formatter = logging.Formatter('%(asctime)s %(name)-2s %(levelname)-2s %(message)s','%y-%m-%d %H:%M:%S') handler.setFormatter(formatter) logger.addHandler(handler) global main_window ui_callback_queue = queue.Queue() class OutputUI(OutputUIInterface): def __init__(self, ctrl: tk.Text): self._ctrl = ctrl def append(self, ls): for line in ls: self.append_line(line)
from lexi.core.util.util import read_blacklist_words from lexi.server.util import statuscodes from lexi.server.util.html import process_html from lexi.server.util.communication import make_response SCRIPTDIR = os.path.dirname(os.path.realpath(__file__)) # os.makedirs(MODELS_DIR, exist_ok=True) # LOGGING # os.makedirs(LOG_DIR, exist_ok=True) logger = logging.getLogger('lexi') log_level = logging.DEBUG # get logging level from CL argument, if set logger.setLevel(log_level) fh = TimedRotatingFileHandler(LOG_DIR+'/lexi.log', when="midnight", interval=1, encoding="UTF-8") fh.suffix = "%Y-%m-%d" fh.setLevel(log_level) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(log_level) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s - %(name)s - ' '{%(filename)s:%(lineno)d} ' '%(levelname)s - %(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch)
from logging.handlers import TimedRotatingFileHandler from apscheduler.schedulers.asyncio import AsyncIOScheduler from tempfile import NamedTemporaryFile from secrets import * # Setup logger logging.basicConfig( encoding='utf-8', datefmt="%d-%m-%y %H:%M:%S", level=logging.INFO, ) logger = logging.getLogger('moebot') handler = TimedRotatingFileHandler( filename=f'moebot-{strftime("%d-%m-%y")}.log', when="D", interval=1, backupCount=5, encoding='utf-8', delay=False) handler.setFormatter(fmt=Formatter( "%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)s] %(message)s")) logger.addHandler(handler) async def main() -> None: """Main loop of the entire bot""" logger.info("Starting main loop...") # Avoid rate limiting retries = 0
def access_to_website(url): chrome.get(url) #Pour envoyer un mail après l'export from email import encoders from email.mime.base import MIMEBase from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText # Permet d'enregistrer les logs dans un fichier et de faire La rotation formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') rotation_handler = TimedRotatingFileHandler( filename=r"C://Users//alain.singaye//Documents//GLPI//Logs//log.txt", when='H', interval=5) rotation_handler.setFormatter(formatter) logger = logging.getLogger() logger.addHandler(rotation_handler) logger.setLevel(logging.DEBUG) # Le dossier dans lequel sera saugarder l'export. download_dir = r"C:\Users\alain.singaye\Documents\GLPI" chrome_options = webdriver.ChromeOptions() path = r'C:/bin/chromedriver.exe' preferences = { "download.default_directory": download_dir, "directory_upgrade": True, "safebrowsing.enabled": True, "plugins.always_open_pdf_externally": True
""" add some params for honeypot """ msg['machine_id'] = os.getenv("MACHINE_ID", "") msg['honey_name'] = os.getenv("HONEY_NAME", "") msg['honey_type'] = os.getenv("HONEY_TYPE", "") msg['dst_ip'] = os.getenv("DOCKER_HOST", "") msg['dst_port'] = os.getenv("OUT_PORT", "") msg = json.dumps(msg) return msg logger = logging.getLogger() logger.addHandler(logging.StreamHandler()) logger.addHandler( TimedRotatingFileHandler(filename=filename, when='D', backupCount=3)) logging = TophantLoggerAdapter(logger, '') from rdpy.core import rss from rdpy.protocol.rdp import rdp from twisted.internet import reactor class HoneyPotServer(rdp.RDPServerObserver): def __init__(self, controller, rssFileSizeList): """ @param controller: {RDPServerController} @param rssFileSizeList: {Tuple} Tuple(Tuple(width, height), rssFilePath) """ rdp.RDPServerObserver.__init__(self, controller)
def get_file_handler(): file_handler = TimedRotatingFileHandler( LOG_FILE, when='midnight') file_handler.setFormatter(FORMATTER) file_handler.setLevel(logging.WARNING) return file_handler
import pickle import urllib3 from bs4 import BeautifulSoup import time import logging from logging.handlers import TimedRotatingFileHandler from twilio.rest import Client logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) handler = TimedRotatingFileHandler('########/OINP.log', when="midnight", interval=1) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) account_sid = "########" auth_token = "########" pickle_in = open("#########/OINP.pickle", "rb") archieve = pickle.load(pickle_in) pickle_in.close() for i in range(26): try: page_url = 'http://www.ontarioimmigration.ca/en/pnp/OI_PNPNEW.html'
import logging from logging.handlers import TimedRotatingFileHandler from tools.os import os_tool """ 封装log方法 """ logger = logging.getLogger(__name__) logger.setLevel(level=logging.INFO) root_path = os_tool.get_root_path() + 'logs/' os_tool.mkdir(root_path) handler = TimedRotatingFileHandler(root_path + 'info.log', when='d', interval=1, backupCount=30, encoding='utf-8') handler.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) handler2 = TimedRotatingFileHandler(root_path + 'error.log', when='d', interval=1, backupCount=30, encoding='utf-8') handler2.setLevel(logging.ERROR) formatter2 = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler2.setFormatter(formatter2)
import logging from logging.handlers import TimedRotatingFileHandler import time from volta_plus.models import VoltaNetwork logging.basicConfig(level=logging.WARNING, format='[%(levelname)s][%(asctime)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S', handlers=[ TimedRotatingFileHandler('volta_plus.log', when='midnight', backupCount=3, utc=True) ]) if __name__ == '__main__': volta_network = VoltaNetwork(poor=True) while True: try: volta_network.update() logging.debug("updated Volta Network") time.sleep(15) except Exception as e: logging.exception(e) time.sleep(30)
mkdir(logsPath) #设置logger模块 log = logging.getLogger('mylogger') if options.env == 'Debug': log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) #定义格式 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') #定义文件名 loggerFileName = options.base_dirname+os.sep+'logs'+os.sep+options.logger_name if options.sep_logger: #定义根据日期分割日志 fileTimeHandler = TimedRotatingFileHandler(loggerFileName, "h", 1, 30) fileTimeHandler.suffix = "%Y%m%d" fileTimeHandler.setFormatter(formatter) log.addHandler(fileTimeHandler) else: #不根据日期分割日志 fh = logging.FileHandler(loggerFileName) fh.setFormatter(formatter) log.addHandler(fh) log.info('log start in {0}'.format(options.env))
""" Hi, I am ObsPy's docs deploy bot. I request github runs and extract uploaded doc artifacts to the ObsPy server. Outdated PR docs older than 90 days will be deleted. """ import logging from logging.handlers import TimedRotatingFileHandler import sched import signal import sys import requests handlers = [TimedRotatingFileHandler('log.txt', 'D', 30, 5)] format_ = '%(levelname)s:%(name)s:%(asctime)s %(message)s' datefmt = '%Y-%m-%d %H:%M:%S' logging.basicConfig(level=logging.INFO, format=format_, datefmt=datefmt, handlers=handlers) from deploy_docs import deploy from remove_old_pr_docs import remove_old_docs log = logging.getLogger('docsdeploybot') log.info(' '.join(__doc__.strip().splitlines())) T1 = 60 T2 = 24 * 3600 def sdeploy():
import time import json import socket import urllib import urllib2 import logging import datetime from logging.handlers import TimedRotatingFileHandler reload(sys) sys.setdefaultencoding('utf8') # log LOG_FILE = "/home/opvis/utils/log/pmonitor.log" logger = logging.getLogger() logger.setLevel(logging.INFO) fh = TimedRotatingFileHandler(LOG_FILE, when='D', interval=1, backupCount=30) datefmt = '%Y-%m-%d %H:%M:%S' format_str = '%(asctime)s %(levelname)s %(message)s ' formatter = logging.Formatter(format_str, datefmt) fh.setFormatter(formatter) logger.addHandler(fh) arg = sys.argv[1] arg_number = arg.split("=")[1].replace("\n", "")[-2:-1] arg_time = arg.split("=")[1].replace("\n", "")[-1:] allitems = "/home/opvis/utils/pm/allitems" resend_datas_m = "/home/opvis/utils/pm/resend_datas_m" resend_datas_h = "/home/opvis/utils/pm/resend_datas_h"
os.environ['USERNAME'] = user os.environ['UID'] = "%s" % uid os.environ['GID'] = "%s" % gid cnt = 0 url = "http://localhost/go-to-bed/" pp = pprint.PrettyPrinter(indent=4) active_crons = {} testing = False logger = logging.getLogger("go-to-bed") logger.setLevel(logging.INFO) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") handler = TimedRotatingFileHandler(os.path.expanduser("~/.go-to-bed.log"), when="midnight", backupCount=20) handler.setFormatter(formatter) logger.addHandler(handler) session_re = re.compile("(Session[0-9]+)\:") var_val_re = re.compile("\t(.*)\s\=\s\'(.*)\'") var_val_bool_re = re.compile("\t(.*)\s\=\s(TRUE|FALSE)") ps_re = re.compile("(\d+)\s(.*?)\s+(.*)") if "--test" in sys.argv: testing = True idx = sys.argv.index("--test") if len(sys.argv) > idx + 1: testing = sys.argv[idx + 1] logger.info("testing:%s", testing)
format='%(asctime)s %(levelname)s %(module)s - %(funcName)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S") logger = logging.getLogger() lshandler = None if os.environ["USE_LOGSTASH"] == "true": logger.info("Adding logstash appender") lshandler = AsynchronousLogstashHandler("logstash", 5001, database_path='logstash_test.db') lshandler.setLevel(logging.ERROR) logger.addHandler(lshandler) handler = TimedRotatingFileHandler("logs/" + MODULE + ".log", when="d", interval=1, backupCount=30) logFormatter = logging.Formatter( '%(asctime)s.%(msecs)03d %(levelname)s %(module)s - %(funcName)s: %(message)s' ) handler.setFormatter(logFormatter) logger.addHandler(handler) logger.info("==============================") logger.info("Starting: %s" % MODULE) logger.info("Module: %s" % (VERSION)) logger.info("==============================") #>> AMQC server = {
def get_file_handler(log_file): file_handler = TimedRotatingFileHandler(log_file, when='midnight') file_handler.setFormatter(FORMATTER) return file_handler