Exemple #1
0
def create_app(config_name):
    global app
    app.config.from_object(config[config_name])

    # Setup database
    # Currently inits mongoDB
    init_db(app)

    # Todo make intializing blueprints consistent
    app.register_blueprint(bp_index)
    app.register_blueprint(bp_auth)
    app.register_blueprint(bp_timer)
    app.register_blueprint(v1_api)
    init_api(app)

    file_handler = FileHandler("flask.log")
    file_handler.setLevel(logging.INFO)
    app.logger.addHandler(file_handler)

    config[config_name].init_app(app)
    #
    init_flask_security(app)

    mail = Mail(app)

    return app
Exemple #2
0
def addHandler(handler=None, stream=None, filename=None, filemode='a',
               format=None, datefmt=None, level=None, max_level=None,
               filters=(), logger=None):
    """stream, filename, filemode, format, datefmt: as per logging.basicConfig

       handler: use a precreated handler instead of creating a new one
       logging: logging to add the handler to (uses root logging if none specified)
       filters: an iterable of filters to add to the handler
       level: only messages of this level and above will be processed
       max_level: only messages of this level and below will be processed
    """
    # Create the handler if one hasn't been passed in
    if handler is None:
        if filename is not None:
            handler = FileHandler(filename, filemode)
        else:
            handler = StreamHandler(stream)
    # Set up the formatting of the log messages
    # New API, so it can default to str.format instead of %-formatting
    formatter = Formatter(format, datefmt)
    handler.setFormatter(formatter)
    # Set up filtering of which messages to handle
    if level is not None:
        handler.setLevel(level)
    if max_level is not None:
        handler.addFilter(LowPassFilter(max_level))
    for filter in filters:
        handler.addFilter(filter)
    # Add the fully configured handler to the specified logging
    if logger is None:
        logger = getLogger()
    logger.addHandler(handler)
    return handler
 def close(self):
     if self.closed:
         return
     FileHandler.close(self)
     f = os.fdopen(self.fd)
     summary = f.read().decode(self.charset)
     f.close()
     # try and encode in ascii, to keep emails simpler:
     try:
         summary = summary.encode('ascii')
     except UnicodeEncodeError:
         # unicode it is then
         pass
     if os.path.exists(self.filename):
         os.remove(self.filename)
     if self.send_level is None or self.maxlevelno >= self.send_level:
         self.mailer.handle(
             LogRecord(
                 name = 'Summary',
                 level = self.maxlevelno,
                 pathname = '',
                 lineno = 0,
                 msg = summary,
                 args = (),
                 exc_info = None
                 )
             )
     self.closed = True
Exemple #4
0
    def __setup_logger(self, name, log_path):
        """
        Configure python logging system for our neeeds.

        Required arguments:
        name -- name of root python logger which will be
        used as root for our logger object
        log_path -- path to file into which log will be written
        """
        self.__root_logger = getLogger(name)
        # Set level to INFO to enable handling of
        # all messages with level up to info
        self.__root_logger.setLevel(INFO)
        # Clear any handlers this logger already may have
        for handler in self.__root_logger.handlers:
            self.__root_logger.removeHandler(handler)
        # Create folder for logger if it doesn't exist yet
        log_folder = os.path.dirname(log_path)
        if os.path.isdir(log_folder) is not True:
            os.makedirs(log_folder, mode=0o755)
        handler = FileHandler(log_path, mode='a', encoding='utf-8', delay=False)
        # Set up formatter options
        msg_format = '{asctime:19.19} | {levelname:7.7} | {name:23.23} | {message}'
        time_format = '%Y-%m-%d %H:%M:%S'  # Must be specified in old style, as of python 3.2
        formatter = Formatter(fmt=msg_format, datefmt=time_format, style='{')
        handler.setFormatter(formatter)
        self.__root_logger.addHandler(handler)
Exemple #5
0
	def setup_logger(self, fname):
		self.logger = logging.getLogger()
		self.logger.setLevel(logging.INFO)
		fhandler = FileHandler(filename=fname)
		formatter = logging.Formatter('%(asctime)-15s %(message)s')
		fhandler.setFormatter(formatter)
		self.logger.addHandler(fhandler)
Exemple #6
0
def get_task_logger(worker, task, subtask=None, workunit=None):
    """
    Initializes a logger for tasks and subtasks.  Logs for tasks are stored as
    in separate files and aggregated.  This allow workunits to be viewed in a
    single log.  Otherwise a combined log could contain messages from many 
    different workunits making it much harder to grok.

    @param worker: there may be more than one Worker per Node.  Logs are
                      stored per worker.
    @param task: ID of the task instance.  Each task instance receives 
                             its own log.
    @param subtask: (optional) subtask_key.  see workunit_id
    @param workunit: (optional) ID of workunit.  workunits receive their
                         own log file so that the log can be read separately.
                         This is separate from the task instance log.
    """
    directory, filename = task_log_path(task, subtask, workunit, worker)
    makedirs(directory)

    logger_name = 'task.%s' % task

    if workunit:
        logger_name += '.%s' % workunit

    logger = logging.getLogger(logger_name)
    handler = FileHandler(filename)
    
    formatter = logging.Formatter(LOG_FORMAT % ("[%s]" % worker))
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(settings.LOG_LEVEL)

    return logger
 def __init__(self, name, level=0):
     Log.__init__(self, name, level)
     del self.handlers[:]
     handler = FileHandler(join(Path.config_dir, "umitweb.log"))
     handler.setFormatter(self.formatter)
     
     self.addHandler(handler)
Exemple #8
0
def _setup_task_logger(logger):
    """Configure a task logger to generate site- and task-specific logs."""
    if logger.handlers:  # Already processed
        return

    parts = logger.name.split(".")
    if len(parts) < 4:  # Malformed
        return
    site = parts[2]
    task = parts[3]

    _ensure_dirs(os.path.join(_log_dir, site))

    formatter = Formatter(
        fmt="[%(asctime)s %(levelname)-7s] %(message)s",
        datefmt=_DATE_FORMAT)

    infohandler = TimedRotatingFileHandler(
        os.path.join(_log_dir, site, task + ".log"), "midnight", 1, 30)
    infohandler.setLevel("INFO")

    debughandler = FileHandler(
        os.path.join(_log_dir, site, task + ".log.verbose"), "w")
    debughandler.setLevel("DEBUG")

    errorhandler = RotatingFileHandler(
        os.path.join(_log_dir, site, task + ".err"), maxBytes=1024**2,
        backupCount=4)
    errorhandler.setLevel("WARNING")

    for handler in [infohandler, debughandler, errorhandler]:
        handler.setFormatter(formatter)
        logger.addHandler(handler)
Exemple #9
0
def setup_logging(verbose_level: int = 0, filename: str = None):
    level = None
    if verbose_level == -1:
        level = logging.CRITICAL
    if verbose_level is 0:
        level = logging.INFO
    elif verbose_level >= 1:
        level = logging.DEBUG

    formatter = logging.Formatter(fmt="%(asctime)-10s%(message)s", datefmt="%H:%M:%S")

    stdout_handler = ColorizingStreamHandler()
    stdout_handler.setFormatter(formatter)
    stdout_handler.stream = sys.stdout

    if not filename:
        filename = "last.log"
    file_handler = FileHandler(filename=filename, mode="w")
    file_handler.setFormatter(formatter)

    root = logging.getLogger()
    root.addHandler(stdout_handler)
    root.addHandler(file_handler)

    root.setLevel(level)

    return logging.getLogger(__name__)
Exemple #10
0
def init_webapp(app, db):
    print 'Initializing PRAC webapp...'

    pracApp.app = app
    # use html templates from prac app
    prac_loader = jinja2.ChoiceLoader([
        pracApp.app.jinja_loader,
        jinja2.FileSystemLoader(['/opt/practools/tools/prac/pracweb/gui/templates']),
    ])
    pracApp.app.jinja_loader = prac_loader
    pracApp.app.config['PRAC_STATIC_PATH'] = '/opt/practools/tools/prac/pracweb/gui/build'

    # settings for fileuploads and logging
    pracApp.app.config['ALLOWED_EXTENSIONS'] = {'mln', 'db', 'pracmln', 'emln'}
    pracApp.app.config['UPLOAD_FOLDER'] = '/home/ros/pracfiles'
    pracApp.app.config['PRAC_ROOT_PATH'] = '/opt/practools/tools/prac'
    pracApp.app.config['LOG_FOLDER'] = os.path.join('/home/ros/pracfiles/prac', 'log')

    if not os.path.exists(pracApp.app.config['LOG_FOLDER']):
        os.mkdir(pracApp.app.config['LOG_FOLDER'])

    # separate logger for user statistics
    root_logger = logging.getLogger('userstats')
    handler = FileHandler(os.path.join(pracApp.app.config['LOG_FOLDER'], "userstats.json"))
    formatter = logging.Formatter("%(message)s,")
    handler.setFormatter(formatter)
    root_logger.addHandler(handler)

    print 'Registering PRAC routes...'
    from pracweb.gui.pages import inference
    from pracweb.gui.pages import views
    from pracweb.gui.pages import utils
Exemple #11
0
def init_app_logger(app):
    file_handler = FileHandler('flask.log')
    file_handler.setFormatter(Formatter(
        '%(asctime)s|%(levelname)s|%(pathname)s:%(lineno)d|%(message)s'
    ))
    file_handler.setLevel(logging.INFO)
    app.logger.addHandler(file_handler)
Exemple #12
0
def init_logging():
    # Disable default stderr handler
    root = getLogger().addHandler(logging.NullHandler())

    # Get the loggers used in pytelemetry.telemetry.telemetry file
    rx = getLogger("telemetry.rx")
    tx = getLogger("telemetry.tx")
    rx.setLevel(logging.DEBUG)
    tx.setLevel(logging.DEBUG)

    # Format how data will be .. formatted
    formatter = logging.Formatter('%(asctime)s | %(levelname)s | %(message)s')

    # Create a handler to save logging output to a file
    dateTag = datetime.datetime.now().strftime("%Y-%b-%d_%H-%M-%S")
    in_handler = FileHandler('in-%s.log' % dateTag)
    in_handler.setLevel(logging.DEBUG) # Also pass all messages
    in_handler.setFormatter(formatter)

    out_handler = FileHandler('out-%s.log' % dateTag)
    out_handler.setLevel(logging.DEBUG) # Also pass all messages
    out_handler.setFormatter(formatter)

    # Attach the logger to the handler
    rx.addHandler(in_handler)
    tx.addHandler(out_handler)
 def __init__(self, job, level):
     
     self.job = job
     self.level = level
     
     # Create queue through which log records can be sent from various
     # processes and threads to the logging thread.
     self.queue = Queue()
     
     formatter = Formatter('%(asctime)s %(levelname)-8s %(message)s')
     
     # Create handler that writes log messages to the job log file.
     os_utils.create_parent_directory(job.log_file_path)
     file_handler = FileHandler(job.log_file_path, 'w')
     file_handler.setFormatter(formatter)
     
     # Create handler that writes log messages to stderr.
     stderr_handler = StreamHandler()
     stderr_handler.setFormatter(formatter)
     
     self._record_counts_handler = _RecordCountsHandler()
     
     # Create logging listener that will run on its own thread and log
     # messages sent to it via the queue.
     self._listener = QueueListener(
         self.queue, file_handler, stderr_handler,
         self._record_counts_handler)
Exemple #14
0
def _initialize_logging(config):
    """
    Configure logging.

    Two loggers are established: ``tiddlyweb`` and ``tiddlywebplugins``.
    Modules which wish to log should use ``logging.getLogger(__name__)``
    to get a logger in the right part of the logging hierarchy.
    """
    logger = logging.getLogger('tiddlyweb')
    logger.propagate = False
    logger.setLevel(config['log_level'])

    plugin_logger = logging.getLogger('tiddlywebplugins')
    plugin_logger.propagate = False
    plugin_logger.setLevel(config['log_level'])

    from logging import FileHandler
    file_handler = FileHandler(
            filename=os.path.join(config['root_dir'], config['log_file']))
    formatter = logging.Formatter(
            '%(asctime)s %(levelname)s %(name)s: %(message)s')
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)
    plugin_logger.addHandler(file_handler)

    logger.debug('TiddlyWeb starting up as %s', sys.argv[0])
def create_app(config_name):
    app.config.from_object(config[config_name])
    db.init_app(app)
    login_manager.init_app(app)
    login_manager.session_protection = 'strong'
    login_manager.login_view = 'admin.login'

    if not app.debug:
        import logging
        from logging import FileHandler, Formatter

        file_handler = FileHandler(Constant.LOG_DIR, encoding='utf8')
        file_handler.setLevel(logging.DEBUG)
        file_handler.setFormatter(Formatter(
            '[%(asctime)s] %(levelname)s: %(message)s '
            '[in %(pathname)s:%(lineno)d]'))
        app.logger.addHandler(file_handler)

    from main import main as main_blueprint
    app.register_blueprint(main_blueprint)

    from admin import admin as admin_blueprint
    app.register_blueprint(admin_blueprint, url_prefix='/admin')

    patch_request_class(app, size=16*1024*1024) # 16MB
    configure_uploads(app, resource_uploader)

    return app
Exemple #16
0
def setup_logging():
    global log

    progname = basename(argv[0])
    log = getLogger()
    log.setLevel(DEBUG)

    handlers = []
    buildlog_handler = FileHandler(getenv("HOME") + "/build.log")
    buildlog_handler.setFormatter(
        Log8601Formatter("%(asctime)s " + progname + " %(levelname)s " +
                         "%(filename)s:%(lineno)s: %(message)s"))
    handlers.append(buildlog_handler)

    stderr_handler = StreamHandler(stderr)
    stderr_handler.setFormatter(
        Log8601Formatter("%(asctime)s %(name)s %(levelname)s " +
                         "%(filename)s:%(lineno)s: %(message)s"))
    handlers.append(stderr_handler)
    
    if exists("/dev/log"):
        syslog_handler = SysLogHandler(
            address="/dev/log", facility=LOG_LOCAL1)
        syslog_handler.setFormatter(
            Log8601Formatter(progname +
                             " %(asctime)s %(levelname)s: %(message)s"))
        handlers.append(syslog_handler)


    log.addHandler(MultiHandler(handlers))

    getLogger("boto").setLevel(INFO)
    getLogger("boto3").setLevel(INFO)
    getLogger("botocore").setLevel(INFO)
    return
Exemple #17
0
def start_logging():
    global logfocus, file_handler
    from lmfdb.utils.config import Configuration
    config = Configuration()
    logging_options = config.get_logging()

    file_handler = FileHandler(logging_options['logfile'])
    file_handler.setLevel(WARNING)

    if 'logfocus' in logging_options:
        logfocus = logging_options['logfocus']
        getLogger(logfocus).setLevel(DEBUG)

    root_logger = getLogger()
    root_logger.setLevel(INFO)
    root_logger.name = "LMFDB"

    formatter = Formatter(LmfdbFormatter.fmtString.split(r'[')[0])
    ch = StreamHandler()
    ch.setFormatter(formatter)
    root_logger.addHandler(ch)

    cfg = config.get_all()
    if "postgresql_options" and "password" in cfg["postgresql_options"]:
        cfg["postgresql_options"]["password"] = "******"
    info("Configuration = {}".format(cfg) )
    check_sage_version()
Exemple #18
0
def get_log(name):
    logger = getLogger('Stream')
    logger.setLevel(ERROR)
    file_handler = FileHandler('var/logs/stream.log')
    file_handler.setFormatter(Formatter("%(asctime)-15s %(name)s [%(levelname)s] - %(message)s"))
    logger.addHandler(file_handler)
    return logger
Exemple #19
0
    def __setup(self, name, logFolder):
        """
        Configure python logging system for our neeeds.

        Positional arguments:
        name -- name of root python logger which will be
        used as root for our logger object
        logFolder -- path to folder for logs
        """
        self.__rootLogger = getLogger(name)
        # Set level to INFO to enable handling of
        # all messages with level up to info
        self.__rootLogger.setLevel(INFO)
        # Clear any handlers this logger already may have
        for handler in self.__rootLogger.handlers:
            self.__rootLogger.removeHandler(handler)
        # Define log storage options
        logPath = os.path.join(logFolder, '{}.log'.format(name))
        os.makedirs(os.path.dirname(logPath), mode=0o755, exist_ok=True)
        handler = FileHandler(logPath, mode='a', encoding='utf-8', delay=False)
        # Set up formatter options
        msgFormat = '{asctime:19.19} | {levelname:7.7} | {name:23.23} | {message}'
        timeFormat = '%Y-%m-%d %H:%M:%S'  # Must be specified in old style, as of python 3.2
        formatter = Formatter(fmt=msgFormat, datefmt=timeFormat, style='{')
        handler.setFormatter(formatter)
        self.__rootLogger.addHandler(handler)
Exemple #20
0
def root_doc(args, l, rc):

    from ambry.ui import app, configure_application, setup_logging
    import ambry.ui.views as views
    import os

    import logging
    from logging import FileHandler
    import webbrowser

    port = args.port if args.port else 8085

    cache_dir = l._doc_cache.path('', missing_ok=True)

    config = configure_application(dict(port=port))

    file_handler = FileHandler(os.path.join(cache_dir, "web.log"))
    file_handler.setLevel(logging.WARNING)
    app.logger.addHandler(file_handler)

    print 'Serving documentation for cache: ', cache_dir

    if not args.debug:
        # Don't open the browser on debugging, or it will re-open on every
        # application reload
        webbrowser.open("http://localhost:{}/".format(port))

    app.run(host=config['host'], port=int(port), debug=args.debug)
Exemple #21
0
    def __init__(self, filename):
        FileHandler.__init__(self, filename, mode='w', encoding=None, delay=0)
        self.stream.write('''<html>
    <head>
        <title>WebUI Log</title>
        <script language="javascript">
            function SetPicWidth(obj) {
                iMaxWidth = 800;
                iMinWidth = 100;
                iPicWidth = obj.width;
                if (iPicWidth == iMinWidth) {
                    obj.width = iMaxWidth;
                } else {
                    obj.width = iMinWidth;
                }
            }
        </script>
        <style type="text/css">
            div.debug {background-color:rgb(215,208,183); padding: 5px;}
            div.info {background-color:rgb(149,245,123); padding: 5px;}
            div.warning {background-color:rgb(252,252,142); padding: 5px;}
            div.error {background-color:rgb(253,143,135); padding: 5px;}
        </style>
    </head>
<body>
''')
Exemple #22
0
    def actionWork(self, *args, **kwargs):
        """Performing the set of actions"""
        nextinput = args

        #set the logger to save the tasklog
        formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(module)s:%(message)s")
        taskdirname = "logs/tasks/%s/" % self._task['tm_username']
        if not os.path.isdir(taskdirname):
            os.mkdir(taskdirname)
        taskhandler = FileHandler(taskdirname + self._task['tm_taskname'] + '.log')
        taskhandler.setLevel(logging.DEBUG)
        self.logger.addHandler(taskhandler)

        for work in self.getWorks():
            self.logger.debug("Starting %s on %s" % (str(work), self._task['tm_taskname']))
            t0 = time.time()
            try:
                output = work.execute(nextinput, task=self._task)
            except StopHandler, sh:
                msg = "Controlled stop of handler for %s on %s " % (self._task, str(sh))
                self.logger.error(msg)
                nextinput = Result(task=self._task, result='StopHandler exception received, controlled stop')
                break #exit normally. Worker will not notice there was an error
            except TaskWorkerException, twe:
                self.logger.debug(str(traceback.format_exc())) #print the stacktrace only in debug mode
                raise WorkerHandlerException(str(twe)) #TaskWorker error, do not add traceback to the error propagated to the REST
Exemple #23
0
def setup_logger(app_name):
    """ Instantiate a logger object

        Usage:
            logger = setup_logger('foo')     # saved as foo.log
            logger.info("Some info message")
            logger.warn("Some warning message")
            logger.error("Some error message")
            ... [for more options see: http://docs.python.org/2/library/logging.html]
    """
    logger = getLogger(app_name)
    logger.setLevel(DEBUG)
    # create file handler which logs even debug messages
    fh = FileHandler(app_name + '.log')
    fh.setLevel(DEBUG)
    # create console handler with a higher log level
    ch = StreamHandler()
    ch.setLevel(ERROR)
    # create formatter and add it to the handlers
    formatter = Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fh.setFormatter(formatter)
    ch.setFormatter(formatter)
    # add the handlers to the logger
    logger.addHandler(fh)
    logger.addHandler(ch)
    return logger
Exemple #24
0
def log_server(level, queue, filename, mode='w'):
    """Run the logging server.

    This listens to the queue of log messages, and handles them using Python's
    logging handlers.  It prints to stderr, as well as to a specified file, if
    it is given.

    """
    formatter = _get_formatter()
    handlers = []

    sh = StreamHandler()
    sh.setFormatter(formatter)
    sh.setLevel(level)
    handlers.append(sh)

    if filename:
        fh = FileHandler(filename, mode)
        fh.setFormatter(formatter)
        fh.setLevel(level)
        handlers.append(fh)

    listener = QueueListener(queue, *handlers)
    listener.start()

    # For some reason, queuelisteners run on a separate thread, so now we just
    # "busy wait" until terminated.
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        pass
    finally:
        listener.stop()
Exemple #25
0
def add_disk_handler(prefix, level=logging.NOTSET):
    """
    Enable typical logging to disk.
    """

    # generate an unused log file path
    from os.path import lexists
    from itertools import count

    for i in count():
        path = "%s.%i" % (prefix, i)

        if not lexists(path):
            break

    # build a handler
    from cargo.temporal import utc_now

    handler = FileHandler(path, encoding="utf-8")

    handler.setFormatter(VerboseFileFormatter())
    handler.setLevel(level)

    # add it
    logging.root.addHandler(handler)

    log.debug("added log handler for file %s at %s", path, utc_now())

    return handler
def __setup_logging(app):
    log_file_path = ""
    log_dir_path = ""
    log_level = app.config.get('LOG_LEVEL', logging.INFO)
    
    if os.path.isabs(app.config['LOG_DIR']):
        log_dir_path = app.config['LOG_DIR']
        log_file_path = log_dir_path + app.config['LOG_FILE']
        
    else:
        here = os.path.dirname(os.path.abspath(__file__))
        log_dir_path = os.path.join(
            os.path.dirname(here), app.config['LOG_DIR'])
        log_file_path = log_dir_path + app.config['LOG_FILE']
    
    if not os.path.isdir(log_dir_path):
        os.makedirs(log_dir_path, mode=app.config['LOG_FILE_MODE'])    
    
    if not os.path.isfile(log_file_path):
        open(log_file_path, 'a').close()
    
    log_file_handler = FileHandler(filename=log_file_path, encoding='utf-8')
    log_file_handler.setLevel(log_level)
    log_file_handler.setFormatter(Formatter(
        '[%(asctime)s] [%(levelname)s] %(message)s %(module)s:%(funcName)s:%(lineno)d'
    ))
    
    app.logger.addHandler(log_file_handler)
    app.logger.setLevel(log_level)
Exemple #27
0
    def create_logger(self, debug, log_file):
        logger = logging.getLogger("ensime-{}".format(self.window))
        file_log_formatter = logging.Formatter(LOG_FORMAT)
        console_log_formatter = logging.Formatter(CONSOLE_LOG_FORMAT)

        logger.handlers.clear()
        with open(log_file, "w") as f:
            now = datetime.datetime.now()
            tm = now.strftime("%Y-%m-%d %H:%M:%S.%f")
            f.write("{}: {} - {}\n".format(tm, "Initializing project", self.project_root))
        file_handler = FileHandler(log_file)
        file_handler.setFormatter(file_log_formatter)
        logger.addHandler(file_handler)

        console_handler = logging.StreamHandler()
        console_handler.setFormatter(console_log_formatter)
        logger.addHandler(console_handler)

        if debug:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.INFO)

        logger.info("Logger initialised.")
        return logger
Exemple #28
0
def configure_loggers(log, verbosity, log_file, log_verbosity):
    LOGFMT_CONSOLE = (
        "[%(asctime)s] %(name)-10s %(levelname)-7s in %(module)s.%(funcName)s()," " line %(lineno)d\n\t%(message)s"
    )

    LOGFMT_FILE = (
        "[%(asctime)s] [%(process)d]%(name)-10s %(levelname)-7s in %(module)s.%(funcName)s(),"
        " line %(lineno)d\n\t%(message)s"
    )

    # Configure root logger to log to stdout
    logging.basicConfig(level=verbosity, datefmt="%H:%M:%S", format=LOGFMT_CONSOLE)

    # Configure main logger to rotate log files
    rh = RotatingFileHandler(log_file, maxBytes=100000, backupCount=25)
    log.addHandler(rh)

    # Configure main logger to log to a file
    if log_file:
        fh = FileHandler(log_file, "w")
        fh.setFormatter(Formatter(LOGFMT_FILE, "%Y-%m-%d %H:%M:%S"))
        fh.setLevel(log_verbosity)
        log.addHandler(fh)

    return log
Exemple #29
0
def setup_logging():
    log_formatter = Formatter(
        '''{"message_type":"%(levelname)s","location":"%(pathname)s","line_number":%(lineno)d,"module":"%(module)s","function":"%(funcName)s","time":"%(asctime)s","message":"%(message)s"}''')  # pylint: disable=C0301
    fh = FileHandler('flask_logs.log')
    fh.setLevel(INFO)
    fh.setFormatter(log_formatter)
    application.logger.addHandler(fh)
    application.logger.setLevel(INFO)
    if not application.debug:
        from application.models import Administrator
        dbadmins = Administrator.query.all()
        if dbadmins is not None:
            emails = [dbadmin.email for dbadmin in dbadmins]
            emailErrorHandler = TlsSMTPHandler(
                ('smtp.gmail.com', 587),
                '*****@*****.**',
                emails,
                'Server Error',
                ('*****@*****.**', 'ANRISNTPTV')
            )
            emailErrorHandler.setFormatter(Formatter(
                '''
    Message type:       %(levelname)s
    Location:           %(pathname)s:%(lineno)d
    Module:             %(module)s
    Function:           %(funcName)s
    Time:               %(asctime)s

    Message:

    %(message)s
    '''))
            application.logger.addHandler(emailErrorHandler)
Exemple #30
0
from flask import request, jsonify, make_response, send_from_directory, render_template
from werkzeug.utils import secure_filename
from flask_sqlalchemy import SQLAlchemy
from uuid import uuid4  # for public id
from werkzeug.security import generate_password_hash, check_password_hash
from datetime import datetime, timedelta
from logging import FileHandler, INFO
from os import path, makedirs, listdir
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address

from assignment.database import *
from assignment.models import *

file_handler = FileHandler('server.log')
app.logger.addHandler(file_handler)
app.logger.setLevel(INFO)

PROJECT_HOME = path.dirname(path.realpath(__file__))
UPLOAD_FOLDER = '{}/uploads/'.format(PROJECT_HOME)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024  #16MB
ALLOWED_EXTENSIONS = ['png', 'jpg', 'jpeg', 'gif']

uploaded_file_name = {}

# default value
limiter = Limiter(app,
                  key_func=get_remote_address,
                  default_limits=["200 per day", "50 per hour"])
Exemple #31
0
class MainBase(wx.App):
	def __init__(self):
		"""アプリを初期化する。"""
		super().__init__()

		#実行環境の取得(exeファイルorインタプリタ)
		self.frozen=hasattr(sys,"frozen")

		#各種初期設定
		self.InitLogger()
		self.LoadSettings()
		try:
			if self.config["general"]["locale"]!=None:
				locale.setlocale(locale.LC_TIME,self.config["general"]["locale"])
			else:
				locale.setlocale(locale.LC_TIME)
		except:
			locale.setlocale(locale.LC_TIME)
			self.config["general"]["locale"]=""
		self.SetTimeZone()
		self.InitTranslation()
		self.InitSpeech()
		# ログのファイルハンドラーが利用可能でなければ警告を出す
		if not self.log.hasHandlers():
			simpleDialog.errorDialog(_("ログ機能の初期化に失敗しました。下記のファイルへのアクセスが可能であることを確認してください。") + "\n" + os.path.abspath(constants.LOG_FILE_NAME))

	def InitSpeech(self):
		# 音声読み上げの準備
		try:
			self._InitSpeech()
		except OutputError as e:
			self.log.error("Failed to initialize speech output.")
			self.log.error(traceback.format_exc())
			simpleDialog.winDialog(_("音声エンジンエラー"), _("音声読み上げ機能の初期化に失敗したため、読み上げ機能を使用できません。出力先の変更をお試しください。"))
			self.speech = accessible_output2.outputs.nospeech.NoSpeech()

	def _InitSpeech(self):
		# 音声読み上げの準備
		reader=self.config["speech"]["reader"]
		if(reader=="PCTK"):
			self.log.info("use reader 'PCTalker'")
			self.speech=accessible_output2.outputs.pc_talker.PCTalker()
		elif(reader=="NVDA"):
			self.log.info("use reader 'NVDA'")
			self.speech=accessible_output2.outputs.nvda.NVDA()
		#SAPI4はバグってるっぽいので無効にしておく
#		elif(reader=="SAPI4"):
#			self.log.info("use reader 'SAPI4'")
#			self.speech=accessible_output2.outputs.sapi4.Sapi4()
		elif(reader=="SAPI5"):
			self.log.info("use reader 'SAPI5'")
			self.speech=accessible_output2.outputs.sapi5.SAPI5()
		elif(reader=="AUTO"):
			self.log.info("use reader 'AUTO'")
			self.speech=accessible_output2.outputs.auto.Auto()
		elif(reader=="JAWS"):
			self.log.info("use reader 'JAWS'")
			self.speech=accessible_output2.outputs.jaws.Jaws()
		elif(reader=="CLIPBOARD"):
			self.log.info("use reader 'CLIPBOARD'")
			self.speech=accessible_output2.outputs.clipboard.Clipboard()
		elif(reader=="NOSPEECH"):
			self.log.info("use reader 'NOSPEECH'")
			self.speech=accessible_output2.outputs.nospeech.NoSpeech()
		else:
			self.config.set("speech","reader","AUTO")
			self.log.warning("Setting missed! speech.reader reset to 'AUTO'")
			self.speech=accessible_output2.outputs.auto.Auto()

	def InitLogger(self):
		"""ログ機能を初期化して準備する。"""
		try:
			self.hLogHandler=FileHandler(constants.LOG_FILE_NAME, mode="w", encoding="UTF-8")
			self.hLogHandler.setLevel(logging.DEBUG)
			self.hLogFormatter=Formatter("%(name)s - %(levelname)s - %(message)s (%(asctime)s)")
			self.hLogHandler.setFormatter(self.hLogFormatter)
			logger=getLogger(constants.LOG_PREFIX)
			logger.setLevel(logging.DEBUG)
			logger.addHandler(self.hLogHandler)
		except Exception as e:
			traceback.print_exc()
		self.log=getLogger(constants.LOG_PREFIX+".Main")
		r="executable" if self.frozen else "interpreter"
		self.log.info("Starting"+constants.APP_NAME+" "+constants.APP_VERSION+" as %s!" % r)

	def LoadSettings(self):
		"""設定ファイルを読み込む。なければデフォルト設定を適用し、設定ファイルを書く。"""
		self.config = DefaultSettings.DefaultSettings.get()
		if not self.config.read(constants.SETTING_FILE_NAME):
			#初回起動
			self.config.read_dict(DefaultSettings.initialValues)
			self.config.write()
		self.hLogHandler.setLevel(self.config.getint("general","log_level",20,0,50))

	def InitTranslation(self):
		"""翻訳を初期化する。"""
		loc = locale.getdefaultlocale()[0].replace("_", "-")
		lang=self.config.getstring("general","language","",constants.SUPPORTING_LANGUAGE.keys())
		if lang == "":
			if loc in list(constants.SUPPORTING_LANGUAGE.keys()):
				self.config["general"]["language"] = loc
			else:
				# 言語選択を表示
				langSelect = views.langDialog.langDialog()
				langSelect.Initialize()
				langSelect.Show()
				self.config["general"]["language"] = langSelect.GetValue()
			lang = self.config["general"]["language"]
		self.translator=gettext.translation("messages","locale", languages=[lang], fallback=True)
		self.translator.install()

	def GetFrozenStatus(self):
		"""コンパイル済みのexeで実行されている場合はTrue、インタプリタで実行されている場合はFalseを帰す。"""
		return self.frozen

	def say(self,s,interrupt=False):
		"""スクリーンリーダーでしゃべらせる。"""
		self.speech.speak(s, interrupt=interrupt)
		self.speech.braille(s)

	def SetTimeZone(self):
		bias=win32api.GetTimeZoneInformation(True)[1][0]*-1
		hours=bias//60
		minutes=bias%60
		self.timezone=datetime.timezone(datetime.timedelta(hours=hours,minutes=minutes))

	def getAppPath(self):
		"""アプリの絶対パスを返す
		"""
		if self.frozen:
			# exeファイルで実行されている
			return sys.executable
		else:
			# pyファイルで実行されている
			return os.path.join(os.path.dirname(__file__), os.path.basename(sys.argv[0]))
Exemple #32
0
  # TODO: on unsuccessful db insert, flash an error instead.
  # e.g., flash('An error occurred. Show could not be listed.')
  # see: http://flask.pocoo.org/docs/1.0/patterns/flashing/
  return render_template('pages/home.html')

@app.errorhandler(404)
def not_found_error(error):
    return render_template('errors/404.html'), 404

@app.errorhandler(500)
def server_error(error):
    return render_template('errors/500.html'), 500


if not app.debug:
    file_handler = FileHandler('error.log')
    file_handler.setFormatter(
        Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
    )
    app.logger.setLevel(logging.INFO)
    file_handler.setLevel(logging.INFO)
    app.logger.addHandler(file_handler)
    app.logger.info('errors')

#----------------------------------------------------------------------------#
# Launch.
#----------------------------------------------------------------------------#

# Default port:
if __name__ == '__main__':
    app.run()
Exemple #33
0
 def open_logfile():
     if not options.logfile:
         options.logfile = deluge.configmanager.get_config_dir("deluged.log")
         file_handler = FileHandler(options.logfile)
         log.addHandler(file_handler)
Exemple #34
0
    fpr, tpr, thr = roc_curve(y, pred, pos_label=1)
    g = 2 * auc(fpr, tpr) - 1
    return g


if __name__ == '__main__':

    log_fmt = Formatter(
        '%(asctime)s %(name)s %(lineno)d [%(levelname)s][%(funcName)s] %(message)s '
    )
    handler = StreamHandler()
    handler.setLevel('INFO')
    handler.setFormatter(log_fmt)
    logger.addHandler(handler)

    handler = FileHandler(DIR + 'train.py.log', 'a')
    handler.setLevel(DEBUG)
    handler.setFormatter(log_fmt)
    logger.setLevel(DEBUG)
    logger.addHandler(handler)

    logger.info('start')

    df = load_train_data()

    x_train = df.drop('target', axis=1)
    y_train = df['target'].values

    use_cols = x_train.columns.values

    logger.debug('train columns: {} {}'.format(use_cols.shape, use_cols))
Exemple #35
0
app = Flask(__name__)
app.template_folder = "tpl"
app.config["MAKO_DEFAULT_FILTERS"] = ["h"]
mako.init_app(app)


class RegexConverter(BaseConverter):
    def __init__(self, url_map, *items):
        super(RegexConverter, self).__init__(url_map)
        self.regex = items[0]


app.url_map.converters["regex"] = RegexConverter
logger = getLogger("newtrackon_logger")
logger.setLevel(INFO)
handler = FileHandler("data/trackon.log")
logger_format = Formatter("%(asctime)s - %(message)s")
handler.setFormatter(logger_format)
logger.addHandler(handler)
logger.info("Server started")


@app.route("/")
def main():
    trackers_list = db.get_all_data()
    trackers_list = utils.format_uptime_and_downtime_time(trackers_list)
    return render_template("main.mako", trackers=trackers_list, active="main")


@app.route("/", methods=["POST"])
def new_trackers():
Exemple #36
0
def SetLogFile(LogFile):
    if os.path.exists(LogFile):
        remove(LogFile)

    _Ch = FileHandler(LogFile)
    _Ch.setFormatter(_DEBUG_FORMATTER)
    _DEBUG_LOGGER.addHandler(_Ch)

    _Ch = FileHandler(LogFile)
    _Ch.setFormatter(_INFO_FORMATTER)
    _INFO_LOGGER.addHandler(_Ch)

    _Ch = FileHandler(LogFile)
    _Ch.setFormatter(_ERROR_FORMATTER)
    _ERROR_LOGGER.addHandler(_Ch)
Exemple #37
0
        # Delete the old record
        c.execute("DELETE FROM users WHERE  encoded_user_id = '%s'" %
                  token['encoded_user_id'])
    c.execute("INSERT INTO users VALUES ('%s', '%s', '%s')" %
              (token['encoded_user_id'], token['oauth_token'],
               token['oauth_token_secret']))
    conn.commit()
    conn.close()
    return render_template('callback.html')


if __name__ == '__main__':
    """ Set up file debugging"""
    formatter = Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    file_handler = FileHandler(LOG_FILE, mode='a')
    file_handler.setFormatter(formatter)
    file_handler.setLevel(logging.DEBUG)
    """ Set add both handlers"""
    app.logger.addHandler(file_handler)
    app.debug = APP_DEBUG
    app.logger.debug('About to call FitBitOauthClient()')
    app.run()


def get_db():
    db = getattr(g, '_database', None)
    if db is None:
        db = g._database = connect_to_database()
    return db
Exemple #38
0
		print(decoded)

	def flush(self):
		pass


LOGS = {
	'live': {
		'handler': StreamHandler(LiveStream(LOG_QUEUE)),
		'formatter': Formatter("""{
			"level": "%(levelname)s",
			"message": "%(message)s"
		}"""),
	},
	'file': {
		'handler': FileHandler('diary.log'),
		'formatter': Formatter('%(asctime)s - %(levelname)s - %(message)s'),
	}
}

logger = logging.getLogger('distress')
logger.setLevel(logging.INFO)

for info in LOGS.values():
	info['handler'].setFormatter(info['formatter'])
	logger.addHandler(info['handler'])


def debug(message):
	def log(func):
		@wraps(func)
Exemple #39
0
import logging
import sys
from logging import FileHandler
from logging import Formatter

LOG_FORMAT = ("%(message)s")
LOG_LEVEL = logging.INFO

# messaging logger
RESULTADOS_LOG_FILE = str(sys.argv[1]) + ".log"

resultados_logger = logging.getLogger(str(sys.argv[1]))
resultados_logger.setLevel(LOG_LEVEL)
resultados_logger_file_handler = FileHandler(RESULTADOS_LOG_FILE)
resultados_logger_file_handler.setLevel(LOG_LEVEL)
resultados_logger_file_handler.setFormatter(Formatter(LOG_FORMAT))
resultados_logger.addHandler(resultados_logger_file_handler)

# payments logger
PROGRESSO_LOG_FILE = str(sys.argv[1]) + "_progresso_rodadas.log"
progresso_logger = logging.getLogger(str(sys.argv[1]) + "_progresso_rodadas")

progresso_logger.setLevel(LOG_LEVEL)
progresso_file_handler = FileHandler(PROGRESSO_LOG_FILE)
progresso_file_handler.setLevel(LOG_LEVEL)
progresso_file_handler.setFormatter(Formatter(LOG_FORMAT))
progresso_logger.addHandler(progresso_file_handler)

#1 FASE

RESULTADOS_1FASE_LOG_FILE = str(sys.argv[1]) + "_1fase.log"
Exemple #40
0
        endTime = unix_time_millis(datetime.datetime.now())
        totalTime = endTime - startTime
        response_data['vthResponse']['testDuration'] = totalTime
        response_data['vthResponse']['abstractMessage'] = 'error: ' + str(ex)
        app.logger.error('ERROR:{}'.format(str(ex)))
        return jsonify(response_data)

    #finish up building response
    endTime = unix_time_millis(datetime.datetime.now())
    totalTime = endTime - startTime
    response_data['vthResponse']['testDuration'] = totalTime
    if ret_url is not None:
        sendCallback(ret_url, response_data)
        return '', 200
    return jsonify(response_data), 200


@app.route("/otf/vth/oran/smo/v1/health", methods=['GET'])
def getHealth():
    return 'UP'


if __name__ == '__main__':
    logHandler = FileHandler('smo-o1-vth.log', mode='a')
    logHandler.setLevel(logging.INFO)
    app.logger.setLevel(logging.INFO)
    app.logger.addHandler(logHandler)
    # context = ('opt/cert/otf.pem', 'opt/cert/privateKey.pem')
    # app.run(debug = False, host = '0.0.0.0', port = 5000, ssl_context = context)
    app.run(debug=False, host='0.0.0.0', port=5000)
Exemple #41
0
if __name__ == '__main__':
    import pandas as pd
    from functools import partial
    from logging import StreamHandler, DEBUG, Formatter, FileHandler, NullHandler

    log_fmt = Formatter(
        '%(asctime)s %(name)s %(lineno)d [%(levelname)s][%(funcName)s] %(message)s '
    )

    handler = StreamHandler()
    handler.setLevel('DEBUG')
    handler.setFormatter(log_fmt)
    logger.setLevel('DEBUG')
    logger.addHandler(handler)

    handler = FileHandler(os.path.basename(__file__) + '.log', 'a')
    handler.setLevel(DEBUG)
    handler.setFormatter(log_fmt)
    logger.setLevel(DEBUG)
    logger.addHandler(handler)

    filepath = 'cities1000.csv'
    df = pd.read_csv(filepath)
    func = partial(score_path, filepath)
    gd = SA(df.shape[0], df[['X', 'Y']].values, filepath, func, init_sol=None)
    gd.solve()

    filepath = 'cities10000.csv'
    df = pd.read_csv(filepath)
    func = partial(score_path, filepath)
    gd = SA(df.shape[0], df[['X', 'Y']].values, filepath, func, init_sol=None)
Exemple #42
0
    df_que2 = pandas.concat([df1, df2], ignore_index=True)
    df_que2 = df_que2.drop_duplicates().fillna('')
    logger.info('df_que2 {}'.format(df_que2.shape))
    df_que2['qid'] = numpy.arange(df_que2.shape[0])

    map_test = dict(zip(df_que2['question'], range(df_que2.shape[0])))

    return map_train, map_test, train_num


if __name__ == '__main__':
    from logging import StreamHandler, DEBUG, Formatter, FileHandler

    log_fmt = Formatter(
        '%(asctime)s %(name)s %(lineno)d [%(levelname)s][%(funcName)s] %(message)s '
    )
    handler = FileHandler('doc2vec.py.log', 'w')
    handler.setLevel(DEBUG)
    handler.setFormatter(log_fmt)
    logger.setLevel(DEBUG)
    logger.addHandler(handler)

    handler = StreamHandler()
    handler.setLevel('INFO')
    handler.setFormatter(log_fmt)
    logger.setLevel('INFO')
    logger.addHandler(handler)

    # load_data()
    train()
Exemple #43
0
def addHandler(*,
               handler=None,
               stream=None,
               filename=None,
               filemode='a',
               format=None,
               datefmt=None,
               style='{',
               level=None,
               max_level=None,
               filters=(),
               logger=None):
    """stream, filename, filemode, format, datefmt: as per logging.basicConfig

       handler: use a precreated handler instead of creating a new one
       logger: logger to add the handler to (uses root logger if none specified)
       filters: an iterable of filters to add to the handler
       level: only messages of this level and above will be processed
       max_level: only messages of this level and below will be processed
       style: as per logging.basicConfig, but defaults to '{' (i.e. str.format)
    """
    # Create the handler if one hasn't been passed in
    if handler is None:
        if filename is not None:
            handler = FileHandler(filename, filemode)
        else:
            handler = StreamHandler(stream)
    # Set up the formatting of the log messages
    # New API, so it can default to str.format instead of %-formatting
    formatter = Formatter(format, datefmt, style)
    handler.setFormatter(formatter)
    # Set up filtering of which messages to handle
    if level is not None:
        handler.setLevel(level)
    if max_level is not None:

        def level_ok(record):
            return record.levelno <= max_level

        handler.addFilter(level_ok)
    for filter in filters:
        handler.addFilter(filter)
    # Add the fully configured handler to the specified logger
    if logger is None:
        logger = getLogger()
    logger.addHandler(handler)
    return handler
Exemple #44
0
                body=_body,
                author=config['GENERAL']['acc_name'],
                tags=config['POSTER']['tags'].replace(' ', '').split(','),
                self_vote=config.getboolean('POSTER', 'self_vote'),
                app="https://github.com/PortalMine/portalvotes"))
    except MissingKeyError as err:
        log.exception(err)
        if not config.getboolean('GENERAL', 'testing'):
            exit(1)
    shared_steem_instance().wallet.lock()


handlers = []
if config.getboolean('LOGGING', 'to_file'):
    handlers.append(
        FileHandler(filename=config['LOGGING']['log_file'].replace(
            'NAME', 'poster').replace('PID', '')))
if config.getboolean('LOGGING', 'to_console'):
    handlers.append(logging.StreamHandler())

logging.basicConfig(
    level=config['LOGGING']['level_main'].upper(),
    format='%(asctime)s | %(name)s -> %(levelname)s: %(message)s',
    handlers=handlers)
del handlers

log = logging.getLogger('poster')
log.setLevel(level=config['LOGGING']['level'].upper())
log.info('Try publish.')

t = datetime.now()
date = '{!s}.{!s}.{!s}'.format(t.day, t.month, t.year)
from mysql_connector import MysqlConnector
from datetime import datetime, timedelta
import oanda_wrapper
import re
import traceback
import numpy as np

from logging import getLogger, FileHandler, DEBUG
from send_mail import SendMail

mode = sys.argv[1]
filename = sys.argv[0].split(".")[0]
print(filename)
debug_logfilename = "%s-%s-%s.log" % (mode, filename, datetime.now().strftime("%Y%m%d%H%M%S"))
debug_logger = getLogger("debug")
debug_fh = FileHandler(debug_logfilename, "a+")
debug_logger.addHandler(debug_fh)
debug_logger.setLevel(DEBUG)

con = MysqlConnector()

instrument_list = ["EUR_GBP", "EUR_USD", "EUR_JPY", "GBP_USD", "GBP_JPY", "USD_JPY"]
instrument_list = ["GBP_JPY", "EUR_JPY", "AUD_JPY", "GBP_USD", "EUR_USD", "AUD_USD", "USD_JPY"]
#insert_time = '2019-04-01 07:00:00'
insert_time = '2019-07-10 20:00:00'
insert_time = datetime.strptime(insert_time, "%Y-%m-%d %H:%M:%S")
now = datetime.now()
#end_time = datetime.strptime('2019-07-06 00:00:00', "%Y-%m-%d %H:%M:%S")
end_time = datetime.strptime('2019-07-13 08:00:00', "%Y-%m-%d %H:%M:%S")

def decide_season(base_time):

# a more complex example
@contextmanager
def error_logging(logger, level):
    oldlevel = logger.level
    try:
        logger.setLevel(level)
        yield
    finally:
        logger.setLevel(oldlevel)


if __name__ == "__main__":
    logger = Logger('name', 20)
    handler = FileHandler('flog.log')
    logger.addHandler(handler)
    logger.info('this will get logged')
    with error_logging(logger, 30):
        logger.info('this will not get logged')
    logger.info('this will get logged because the level is {}'.format(
        logger.level))


class Simple_obj(object):
    def __init__(self, arg):
        self.some_property = arg


'''
s = Simple_obj(5)
Exemple #47
0
def loop2():
	cmd='php -d max_execution_time=120 runconsole.php cronjob/runLoop2'
	while True:
		doCommand(cmd)
		time.sleep(60);	

def appmain():
	cmd='php -d max_execution_time=120 runconsole.php cronjob/run'
	while True:
		doCommand(cmd)
		time.sleep(90);	

def main(): 
	os.chdir(WORKDIR)
	th1 = threading.Thread(target=appmain)
	th2 = threading.Thread(target=loop2)
	th3 = threading.Thread(target=blocks)

	th1.start()
	th2.start()
	th3.start()


if __name__ == "__main__":

	logger = getLogger(__name__)
	logger.addHandler(FileHandler(LOGFILE))
	main()


    app.jinja_env.globals['recent_posts'] = postClass.get_posts(10, 0)['data']
    app.jinja_env.globals['tags'] = postClass.get_tags()['data']


@app.errorhandler(404)
def page_not_found(error):
    return render_template('404.html', meta_title='404'), 404


@app.template_filter('formatdate')
def format_datetime_filter(input_value, format_="%Y %m %d"):
    return input_value.strftime(format_)


settingsClass = settings.Settings(app.config)
postClass = post.Post(app.config)
userClass = user.User(app.config)

app.jinja_env.globals['url_for_other_page'] = url_for_other_page
app.jinja_env.globals['meta_description'] = app.config['BLOG_DESCRIPTION']

if not app.config['DEBUG']:
    import logging
    from logging import FileHandler
    file_handler = FileHandler(app.config['LOG_FILE'])
    file_handler.setLevel(logging.WARNING)
    app.logger.addHandler(file_handler)

if __name__ == '__main__':
    app.run(host="0.0.0.0", debug=app.config['DEBUG'])
        logger.info('best score: %s' % min_score)
        logger.info('best_param: %s' % (min_params))

    clf = LogisticRegression(**min_params)
    clf.fit(x, y)

    return clf


if __name__ == "__main__":
    from logging import StreamHandler, DEBUG, Formatter, FileHandler

    log_fmt = Formatter(
        '%(asctime)s %(name)s %(lineno)d [%(levelname)s][%(funcName)s] %(message)s '
    )
    handler = FileHandler('mxnet_train.log', 'w')
    handler.setLevel(DEBUG)
    handler.setFormatter(log_fmt)
    logger.setLevel(DEBUG)
    logger.addHandler(handler)

    handler = StreamHandler()
    handler.setLevel(DEBUG)
    handler.setFormatter(log_fmt)
    logger.setLevel(DEBUG)
    logger.addHandler(handler)

    # clf = train_lightgbm(verbose=False)
    clf = train_xgboost()
    with open('model.pkl', 'wb') as f:
        pickle.dump(clf, f, -1)
Exemple #50
0
def init_logging(log_file=DEFAULT_LOG_FILE,
                 log_level=None,
                 max_bytes=102400,
                 max_files=5,
                 log_format=None):
    ''' inits the logger, if default log file is used, a rotating file-handler
    will be added to the logger, otherwise it adds a standard FileHandler. In
    addition to that it also streams the streams the result to stdout.
    :param log_file: log file to use
    :param log_level: log level
    :param max_bytes: maximum bytes for the RotatingFileHandler
    :param max_files: maximum number of files for the RotatingFileHandler
    :param log_format: standard or compact
    :returns: the initialized worker.
     '''
    from logging import FileHandler
    from logging.handlers import RotatingFileHandler
    logger = logging.getLogger('')
    logging.addLevelName(65, 'EWRT_INFO')

    if not log_format or log_format not in LOG_FORMAT:
        log_format = 'standard'

    if len(logger.handlers):
        logger.handlers = []

    if log_level and isinstance(log_level, basestring):
        log_level = log_level.upper()
        if log_level in LOG_LEVELS:
            log_level = LOG_LEVELS[log_level]
        else:
            print('log_level %s not found using "ERROR"' % log_level)

    if not log_level:
        log_level = DEFAULT_LOG_LEVEL

    hdlr = logging.StreamHandler()

    logger.addHandler(hdlr)
    hdlr.setLevel(log_level)
    logger.setLevel(log_level)
    formatter = logging.Formatter(LOG_FORMAT.get(log_format))
    hdlr.setFormatter(formatter)

    # setting loglevel of tldextract to ERROR to prevent extensive messages
    tld_logger = logging.getLogger('tldextract')
    tld_logger.setLevel(logging.ERROR)

    try:
        file_hdlr = None

        if log_file:
            if log_file == DEFAULT_LOG_FILE:
                file_hdlr = RotatingFileHandler(log_file,
                                                maxBytes=max_bytes,
                                                backupCount=max_files,
                                                encoding='utf-8')
            else:
                log_dir = dirname(log_file)

                if not exists(log_dir):
                    makedirs(log_dir)

                file_hdlr = FileHandler(filename=log_file, encoding='utf-8')

        if file_hdlr:
            file_hdlr.setLevel(log_level)
            file_hdlr.setFormatter(formatter)
            logger.addHandler(file_hdlr)

    except Exception as e:
        logger.error('Couldnt create LogHandler %s: %s' % (log_file, e))

    return logger
Exemple #51
0
from logging import getLogger, FileHandler, DEBUG, Formatter
from os import mkdir, listdir
from os.path import exists
import codecs
import sys
import gzip
from tqdm import tqdm
'''
Twitterのスクリーンネーム(@につづく名前),取得したTwitterのつぶやきデータを保存するリスト,リクエストを投げる際に必要なパラメータを
引数にとる.
返り値はつぶやきデータの入ったリストである.
ネットワークに繋がっていなかったり異常がある場合は強制終了する
'''

# Logger settings
handler = FileHandler('getTweet.log', mode='a', encoding='utf_8')
handler.setLevel(DEBUG)
formatter = Formatter('%(asctime)s-%(name)s-%(levelname)s-%(message)s')
handler.setFormatter(formatter)


def getTweet(screen_name: str, params: dict, twitter_keys: list):
    """
    過去に遡ってツイートを取得する
    """
    # Set Logger
    logger_getTweet = getLogger('getTweet')
    logger_getTweet.setLevel(DEBUG)
    logger_getTweet.addHandler(handler)
    # initialize the list to save tweets
    save_list = []
Exemple #52
0
def note_and_log(cls):
    """
    This will be used as a decorator on class to activate
    logging and store messages in the variable cls._notes
    This will allow quick access to events in the web app.

    A note can be added to cls._notes without logging if passing
    the argument log=false to function note()
    Something can be logged without addind a note using function log()
    """
    if hasattr(cls, 'DEBUG_LEVEL'):
        if cls.DEBUG_LEVEL == 'debug':
            file_level = logging.DEBUG
            console_level = logging.DEBUG
        elif cls.DEBUG_LEVEL == 'info':
            file_level = logging.INFO
            console_level = logging.INFO
    else:
        file_level = logging.INFO
        console_level = logging.WARNING
    # Notes object
    cls._notes = namedtuple('_notes', ['timestamp', 'notes'])
    cls._notes.timestamp = []
    cls._notes.notes = []

    # Defining log object
    cls.logname = '{} | {}'.format(cls.__module__, cls.__name__)
    root_logger = logging.getLogger()
    cls._log = logging.getLogger('BAC0')
    if not len(root_logger.handlers):
        root_logger.addHandler(cls._log)
    
    # Console Handler
    ch = logging.StreamHandler()
    ch.set_name('stderr')
    ch2 = logging.StreamHandler(sys.stdout)
    ch2.set_name('stdout')
    ch.setLevel(console_level)
    ch2.setLevel(logging.CRITICAL)

    formatter = logging.Formatter(
        '{asctime} - {levelname:<8}| {message}', style='{')

    # File Handler
    _PERMISSION_TO_WRITE = True
    logUserPath = expanduser('~')
    logSaveFilePath = join(logUserPath, '.BAC0')

    logFile = join(logSaveFilePath, 'BAC0.log')
    if not os.path.exists(logSaveFilePath):
        try:
            os.makedirs(logSaveFilePath)
        except:
            _PERMISSION_TO_WRITE = False
    if _PERMISSION_TO_WRITE:
        fh = FileHandler(logFile)
        fh.set_name('file_handler')
        fh.setLevel(file_level)
        fh.setFormatter(formatter)

    ch.setFormatter(formatter)
    ch2.setFormatter(formatter)
    # Add handlers the first time only...
    if not len(cls._log.handlers):
        if _PERMISSION_TO_WRITE:
            cls._log.addHandler(fh)
        cls._log.addHandler(ch)
        cls._log.addHandler(ch2)
    
#    cls._log.setLevel(logging.CRITICAL)
        
    def log_title(self, title, args=None, width=35):
        cls._log.info("")
        cls._log.info("#"*width)
        cls._log.info("# {}".format(title))
        cls._log.info("#"*width)
        if args:
            cls._log.debug("{!r}".format(args))
            cls._log.debug("#"*35)

    def log_subtitle(self, subtitle, args=None, width=35):
        cls._log.info("")
        cls._log.info("="*width)
        cls._log.info("{}".format(subtitle))
        cls._log.info("="*width)
        if args:
            cls._log.debug("{!r}".format(args))
            cls._log.debug("="*width)

    def log(self, note, *, level=logging.DEBUG):
        """
        Add a log entry...no note
        """
        if not note:
            raise ValueError('Provide something to log')
        note = '{} | {}'.format(cls.logname, note)
        cls._log.log(level, note)

    def note(self, note, *, level=logging.INFO, log=True):
        """
        Add note to the object. By default, the note will also
        be logged

        :param note: (str) The note itself
        :param level: (logging.level)
        :param log: (boolean) Enable or disable logging of note
        """
        if not note:
            raise ValueError('Provide something to log')
        note = '{} | {}'.format(cls.logname, note)
        cls._notes.timestamp.append(datetime.now())
        cls._notes.notes.append(note)
        if log:
            cls.log(level, note)

    @property
    def notes(self):
        """
        Retrieve notes list as a Pandas Series
        """
        if not _PANDAS:
            return dict(zip(self._notes.timestamp, self._notes.notes))
        return pd.Series(self._notes.notes, index=self._notes.timestamp)

    def clear_notes(self):
        """
        Clear notes object
        """
        cls._notes.timestamp = []
        cls._notes.notes = []

    # Add the functions to the decorated class
    cls.clear_notes = clear_notes
    cls.note = note
    cls.notes = notes
    cls.log = log
    cls.log_title = log_title
    cls.log_subtitle = log_subtitle
    return cls
import cv2
import pytesseract
import numpy

from messages import RequestMessage, AddRequestMessage, QueryRequestMessage, QueryResponseMessage, QueryResponseEntry

LOG = getLogger(__name__)
LOG.setLevel(DEBUG)

SCREENSHOTS_DIRECTORY = Path('./screenshots')
ENTRY_LIST_PATH = Path('./entry_list.json')
STRING_ENCODING = 'utf-8'
IMG_HOST = 'http://localhost:4545'
LOG_PATH = './ocr_web_native_application.log'

file_handler = FileHandler(filename=LOG_PATH)
file_handler.setFormatter(
    Formatter('%(asctime)s - %(levelname)s - %(message)s'))
LOG.addHandler(hdlr=file_handler)


@dataclass
class Entry:
    url: str
    title: str
    timestamp_ms: int
    hash_value: bytes
    bloom_filter: BloomFilter

    @property
    def image_path(self) -> Path:
Exemple #54
0
    r = redis.Redis(host='localhost', port=6379, db=0)


connect()

REALTIME_DELAY = timedelta(seconds=60)
MONITORING = False

if MONITORING:
    monitoring = Monitoring("reddit_feed",
                            logger=logger,
                            batch_size=50,
                            flush_on_exit=True)

formatter = logging.Formatter('%(asctime)s %(levelname)-5s %(message)s')
file_handler = FileHandler("reddit_feed.log")
file_handler.setFormatter(formatter)
for h in logger.handlers:
    logger.removeHandler(h)
logger.addHandler(file_handler)
logger.addHandler(StreamHandler(sys.stdout))


def serialize(thing):
    if isinstance(thing, Comment) or type(thing).__name__ == "comment":
        return {
            "_id":
            int(thing.id, 36),
            "author":
            str(thing.author) if thing.author is not None else None,
            "author_flair_text":
Exemple #55
0
from sklearn.metrics import mean_squared_error

import catboost as cat

import warnings
warnings.simplefilter('ignore')

utils.start(__file__)
#==============================================================================
# Logger
#==============================================================================
from logging import getLogger, FileHandler, Formatter, DEBUG
logger = getLogger(__name__)
logger.setLevel(DEBUG)

file_handler = FileHandler(os.path.join('logs', 'log_{}'.format(str(datetime.datetime.today().date()).replace('-', ''))))
formatter = Formatter('%(message)s')
file_handler.setFormatter(formatter)
file_handler.setLevel(DEBUG)

logger.addHandler(file_handler)
logger.propagate = False

#==============================================================================
PATH = os.path.join('..', 'data')

KEY = 'card_id'

SEED = 18
# SEED = np.random.randint(9999)
Exemple #56
0
from flask import Flask, request, Response
import os
from copr_keygen.exceptions import BadRequestException, \
    KeygenServiceBaseException

app = Flask(__name__)
app.config.from_object("copr_keygen.default_settings")
app.config.from_envvar("COPR_KEYGEN_CONFIG", silent=True)


# setup logger
if not app.config["DEBUG"] or app.config["DEBUG_WITH_LOG"]:
    filename = os.path.join(app.config["LOG_DIR"], "main.log")
    if os.path.exists(app.config["LOG_DIR"]):
        handler = FileHandler(filename)
        handler.setLevel(app.config["LOG_LEVEL"])
        handler.setFormatter(logging.Formatter(
            '%(asctime)s %(levelname)s'
            '[%(module)s:%(pathname)s:%(lineno)d]'
            ': %(message)s '
        ))
        logger = getLogger(__name__)
        logger.addHandler(handler)
        logger.setLevel(app.config["LOG_LEVEL"])

# end setup logger


from .logic import create_new_key, user_exists
## Errors

@app.errorhandler(403)
def not_found_error(error):
    return render_template('page_403.html'), 403

@app.errorhandler(404)
def not_found_error(error):
    return render_template('page_404.html'), 404

@app.errorhandler(500)
def internal_error(error):
    return render_template('page_500.html'), 500

## Logs

if not app.debug:
    file_handler = FileHandler('error.log')
    format = '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
    file_handler.setFormatter(Formatter(format))
    app.logger.setLevel(logging.INFO)
    file_handler.setLevel(logging.INFO)
    app.logger.addHandler(file_handler)
    app.logger.info('errors')

if __name__ == '__main__':
    # run on port 5000 by default
    port = int(os.environ.get('PORT', 5000))
    app.run(host='0.0.0.0', port=port)
Exemple #58
0
# デレステのリザルト画面から、データを抜き出しCSVで返すスクリプト
from PIL import Image
from PIL import ImageOps
import json
import numpy as np
import os
from datetime import datetime
from update_tune_info import tune_info
import unicodedata
from sklearn.neighbors import KNeighborsClassifier
from functools import reduce
import glob

from logging import getLogger, FileHandler, DEBUG
logger = getLogger(__name__)
handler = FileHandler(filename='extract.log', mode="a")
handler.setLevel(DEBUG)
logger.setLevel(DEBUG)
logger.addHandler(handler)
logger.propagate = False

logger.debug(datetime.now().strftime("[%y/%m/%d %H:%M:%S]"))


def yes_or_no(question):
    """
    ユーザーにyes/noを尋ね、結果をTrue/Falseで返す
    """
    while True:
        choice = input(question).lower()
        if choice in ['y', 'ye', 'yes']:
def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument("--model_name",
                        type=str,
                        help="bert-name used for biencoder")
    parser.add_argument("--model_path", type=str, help="model save path")
    parser.add_argument("--index_path", type=str, help="model save path")
    parser.add_argument("--load_index",
                        action="store_true",
                        help="model save path")
    parser.add_argument("--mention_dataset",
                        type=str,
                        help="mention dataset path")
    parser.add_argument("--category", type=str, help="mention dataset path")
    parser.add_argument("--candidate_dataset",
                        type=str,
                        help="candidate dataset path")
    parser.add_argument("--candidate_preprocessed",
                        action="store_true",
                        help="whether candidate_dataset is preprocessed")
    parser.add_argument("--builder_gpu",
                        action="store_true",
                        help="bert-name used for biencoder")
    parser.add_argument("--max_ctxt_len",
                        type=int,
                        help="maximum context length")
    parser.add_argument("--max_title_len",
                        type=int,
                        help="maximum title length")
    parser.add_argument("--max_desc_len",
                        type=int,
                        help="maximum description length")
    parser.add_argument("--mlflow",
                        action="store_true",
                        help="whether using inbatch negative")
    parser.add_argument("--parallel",
                        action="store_true",
                        help="whether using inbatch negative")
    parser.add_argument("--fp16",
                        action="store_true",
                        help="whether using inbatch negative")
    parser.add_argument('--fp16_opt_level', type=str, default="O1")
    parser.add_argument("--logging",
                        action="store_true",
                        help="whether using inbatch negative")
    parser.add_argument("--log_file",
                        type=str,
                        help="whether using inbatch negative")

    args = parser.parse_args()

    if args.mlflow:
        mlflow.start_run()
        arg_dict = vars(args)
        for key, value in arg_dict.items():
            mlflow.log_param(key, value)

    logger = None

    if args.logging:
        logger = getLogger(__name__)
        #handler = StreamHandler()

        logger.setLevel(DEBUG)
        #handler.setLevel(DEBUG)
        formatter = Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        #handler.setFormatter(formatter)
        #logger.addHandler(handler)

        if args.log_file:
            fh = FileHandler(filename=args.log_file)
            fh.setLevel(DEBUG)
            fh.setFormatter(formatter)
            logger.addHandler(fh)

    return args, logger
Exemple #60
-1
def start(resultdir: str, configfile: str):
    basepath = Path(resultdir)
    print(("storing results in {}".format(basepath)))

    now = datetime.now()
    timestamp = now.strftime("%Y_%m_%d-%H_%M_%S")
    experiment_path = basepath.joinpath(timestamp + "_" + STRATEGY)
    num = 2
    while experiment_path.exists():
        experiment_path = basepath.joinpath(timestamp + "_" + "v" + str(num))
    experiment_path.mkdir()

    MODULE_LOGGER_NAME = 'salma'
    # logging.config.fileConfig("experiment01.logging.conf")
    logging.basicConfig()
    logger = logging.getLogger(MODULE_LOGGER_NAME)
    logger.setLevel(logging.DEBUG)
    fh = FileHandler(str(experiment_path / "experiment.log"))
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)

    experiment = Experiment01(experiment_path, None if configfile is None else Path(configfile))
    experiment.initialize()
    runner = SingleProcessExperimentRunner()

    with experiment_path.joinpath("experiment.csv").open("w") as f:
        f.write(create_csv_header() + "\n")
        f.flush()
        experiment.step_listeners.append(create_step_logger(f))
        experiment.step_listeners.append(break_when_all_delivered)
        experiment.step_listeners.append(break_when_all_broken)
        # _, res, trial_infos = runner.run_trials(experiment, number_of_trials=1, max_steps=3000, max_retrials=0)
        experiment.run(max_steps=5000)
    experiment.world.printState()