Beispiel #1
0
def setup_log():
	# Configure the logging mechanisms
	# Default log level to DEBUG and filter the logs for console output.
	global log_level
	logging.getLogger().setLevel(logging.DEBUG)
	logging.getLogger("cherrypy").setLevel(logging.INFO) # cherrypy must be forced
	svc_log = logging.getLogger(LOG_NAME)
	handler = logging.StreamHandler()
	handler.setFormatter(
	   logging.Formatter(
	       '%(asctime)s %(thread)d %(name)s %(levelname)s - %(message)s'))
	# User-specified log level only controls console output.
	handler.setLevel(log_level)
	svc_log.addHandler(handler)
	log_folder = appdirs.user_log_dir(APP_NAME, APP_AUTHOR)
	if not exists(log_folder):
	 os.makedirs(log_folder)
	log_file = join(log_folder, APP_AUTHOR + ".ingest.log")
	print "Log file:", log_file
	handler = logging.handlers.RotatingFileHandler(log_file, backupCount=10)
	handler.setFormatter(
	   logging.Formatter(
	       '%(asctime)s %(thread)d %(name)s %(levelname)s - %(message)s'))
	handler.setLevel(logging.DEBUG)
	handler.doRollover()
	svc_log.addHandler(handler)
Beispiel #2
0
    def _create_file_handler(self):
        """ Adds a handler to send logs to a file.  This is a rotating file handler,
            so when files reach log_file_size they will get renamed and have a numeric
            suffix added.

            It will attempt to make this directory if it does not exist.
        """
        if not self.log_dir:
            if self.app_name:
                self.log_dir = appdirs.user_log_dir(self.app_name)
            else:
                print('CRITICAL: cannot write logs to files without either log_dir or app_name')
                sys.exit(1)

        try:
            os.makedirs(self.log_dir)
        except OSError as exception:
            if exception.errno != errno.EEXIST:
                self.logger.critical('Log directory creation failed.  Log dir: %s' % self.log_dir)
                raise

        log_fqfn = os.path.join(self.log_dir, self.log_fn)

        file_handler = logging.handlers.RotatingFileHandler(log_fqfn,
                                                            maxBytes=self.log_file_size,
                                                            backupCount=self.log_count)
        file_handler.setFormatter(self.formatter)
        self.logger.addHandler(file_handler)
Beispiel #3
0
def get_log_folder_path(in_appname, in_appauthor):
    retVal = appdirs.user_log_dir(appname=in_appname, appauthor=in_appauthor)
    try:
        os.makedirs(retVal)
    except:  # os.makedirs raises is the directory already exists
        pass
    return retVal
Beispiel #4
0
def setup_logging(verbosity=None, application_path=None):

    if not application_path:
        import appdirs
        application_path = Path(str(appdirs.user_log_dir("pydio", "pydio")))
        if not application_path.exists():
            application_path.mkdir(parents=True)

    general_config = global_config_manager.get_general_config()

    log_file = os.path.join(DEFAULT_DATA_PATH, str(general_config['log_configuration']['log_file_name']))

    log_level_mapping ={'WARNING'  : logging.WARNING,
                        'INFO'     : logging.INFO,
                        'DEBUG'    : logging.DEBUG
                       }

    levels = dict((int(k), log_level_mapping[v]) for k, v in general_config['log_configuration']['log_levels'].items())
    level = levels.get(verbosity, logging.NOTSET)

    general_config['log_configuration']['disable_existing_loggers'] = bool(general_config['log_configuration']['disable_existing_loggers'])
    general_config['log_configuration']['handlers']['file']['filename'] = log_file
    general_config['log_configuration']['handlers']['console']['level'] = level

    configuration = general_config['log_configuration']

    from logging.config import dictConfig

    dictConfig(configuration)
    logging.debug("verbosity: %s" % verbosity)
Beispiel #5
0
def get_arguments():

    desc = textwrap.dedent(
'''
Most OSs (including Windows and MacOS) only change the modification time of a folder/directory based on its immediate
children.  propmtime analyzes a folder and all of its children, and propagates (updates) the modification times of 
each folder to be the most recent time of all of its children.

Author: James Abel
URL: https://github.com/jamesabel/propmtime
LICENSE: GPLv3''')

    parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument("-p", "--path", default=".", help='Path to folder or directory. (default=".")')
    parser.add_argument('-s', '--silent', action='store_true', default=False,
                        help='Do not produce summary output (default=False)')
    parser.add_argument('--hidden', action='store_true', help='Process hidden files (default=False)', default=False)
    parser.add_argument('--system', action='store_true', help='Process system files (default=False)', default=False)
    parser.add_argument('-v', '--verbose', action='store_true', default=False,
                        help='Increase verbosity (default=False)')
    parser.add_argument("-l", "--logdir", default=appdirs.user_log_dir(__application_name__, __author__, ),
                        help='Set the log directory (default from appdirs)')
    parser.add_argument('-d', '--dellog', action='store_true', default=False,
                        help='Delete existing log files on invocation (default=False)')
    parser.add_argument('-n', '--noupdate', action='store_true', default=False,
                        help='Suppress updating the mtime (do a "dry run") (default=False)')
    args = parser.parse_args()

    return args
 def user_log_dir(self):
     """Return ``user_log_dir``."""
     directory = appdirs.user_log_dir(self.appname, self.appauthor,
                         version=self.version)
     if self.create:
         self._ensure_directory_exists(directory)
     return directory
Beispiel #7
0
 def test_helpers(self):
     self.assertIsInstance(
         appdirs.user_data_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.site_data_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.user_cache_dir('MyApp', 'MyCompany'), STRING_TYPE)
     self.assertIsInstance(
         appdirs.user_log_dir('MyApp', 'MyCompany'), STRING_TYPE)
Beispiel #8
0
 def test_helpers(self):
     self.assertTrue(isinstance(
         appdirs.user_data_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.site_data_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.user_cache_dir('MyApp', 'MyCompany'), str))
     self.assertTrue(isinstance(
         appdirs.user_log_dir('MyApp', 'MyCompany'), str))
Beispiel #9
0
def setup_logging(verbosity=None, application_path=None):

    if not application_path:
        import appdirs
        application_path = Path(str(appdirs.user_log_dir("pydio", "pydio")))
        if not application_path.exists():
            application_path.mkdir(parents=True)

    log_file = str(application_path / "pydio.log")

    levels = {
        0: logging.WARNING,
        1: logging.INFO,
        2: logging.DEBUG,
    }
    level = levels.get(verbosity, logging.NOTSET)

    configuration = {
        'version': 1,
        'disable_existing_loggers': True,
        'formatters': {
            'short': {
                'format': '%(asctime)s %(levelname)-7s %(thread)-5d %(threadName)-8s %(message)s',
                'datefmt': '%H:%M:%S',
            },
            # this will slow down the app a little, due to
            'verbose': {
                'format': '%(asctime)s %(levelname)-7s %(thread)-5d %(threadName)-8s %(filename)s:%(lineno)s | %(funcName)s | %(message)s',
                'datefmt': '%Y-%m-%d %H:%M:%S',
            },
        },
        'handlers': {
            'file': {
                'level': 'INFO',
                'class': 'logging.handlers.RotatingFileHandler',
                'formatter': 'verbose',
                'backupCount': 8,
                'maxBytes': 4194304,  # 4MB
                'filename': log_file
            },
            'console': {
                'level': level,
                'class': 'logging.StreamHandler',
                'formatter': 'short',
            },
        },
        'root': {
            'level': 'DEBUG',
            'handlers': ['console', 'file'],
        }

    }
    from logging.config import dictConfig

    dictConfig(configuration)
    #logging.info("Logging setup changed")
    logging.debug("verbosity: %s" % verbosity)
Beispiel #10
0
def get_invocations_file_path():
    # if Desktop/Logs exists put the file there, otherwise in the user's folder
    folder_to_write_in = os.path.expanduser("~")
    logs_dir = os.path.join(os.path.expanduser("~"), "Desktop", "Logs")
    if os.path.isdir(logs_dir):
        folder_to_write_in = logs_dir
    else:
        folder_to_write_in = appdirs.user_log_dir("")
    invocations_file_path = os.path.join(folder_to_write_in, "instl_invocations.txt")
    return invocations_file_path
Beispiel #11
0
def logdir():
  appname = 'ffcleaner'
  appauthor = 'hrrmsn'
  logpath = appdirs.user_log_dir(appname, appauthor)
  try:
    if not os.path.exists(logpath):
      os.makedirs(logpath)
  except OSError:
    inform('Error when creating path to log file: \'' + logpath + '\'.')
    log(msg=traceback.format_exc())
    sys_exit(1, error='OSError', send_log=True)
  return logpath
Beispiel #12
0
def logger(name, console_loglevel = 'INFO', file_loglevel = 'INFO'):
    log = logging.getLogger(name)
    log.setLevel(logging.DEBUG)

    # create null handler if running silent
    if console_loglevel == 'NONE' and file_loglevel == 'NONE':
        nh = logging.NullHandler()
        log.addHandler(nh)

    # set up console logging
    if console_loglevel != 'NONE':
        ch = logging.StreamHandler()
        ch.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))

        if console_loglevel == 'CRITICAL':
            ch.setLevel(logging.CRITICAL)
        elif console_loglevel == 'ERROR':
            ch.setLevel(logging.ERROR)
        elif console_loglevel == 'WARNING':
            ch.setLevel(logging.WARNING)
        elif console_loglevel == 'DEBUG':
            ch.setLevel(logging.DEBUG)
        else: ch.setLevel(logging.INFO)

        log.addHandler(ch)

    # set up file logging
    if file_loglevel != 'NONE':
        log_path = os.path.join(appdirs.user_log_dir(name), name + '.log')
        try:
            os.makedirs(os.path.dirname(log_path), 0o700)
        except OSError as exception:
            if exception.errno != errno.EEXIST:
                raise

        fh = logging.FileHandler(log_path)
        fh.setFormatter(logging.Formatter('%(asctime)s - %(name)s [%(levelname)s]: %(message)s'))

        if file_loglevel == 'CRITICAL':
            fh.setLevel(logging.CRITICAL)
        elif file_loglevel == 'ERROR':
            fh.setLevel(logging.ERROR)
        elif file_loglevel == 'WARNING':
            fh.setLevel(logging.WARNING)
        elif file_loglevel == 'DEBUG':
            fh.setLevel(logging.DEBUG)
        else: fh.setLevel(logging.INFO)

        log.addHandler(fh)

    return log
Beispiel #13
0
 def __init__(self, app_data_folders, latus_folders, syncs):
     super().__init__()
     self.app_data_folders = app_data_folders
     self.latus_folders = latus_folders
     self.syncs = syncs
     [os.makedirs(d, exist_ok=True) for d in self.latus_folders]
     self.run_latus_test = True
     self.exit_event = threading.Event()
     self.file_count = 0
     # the test latus processes use the default log path
     self.log_file_path = os.path.join(appdirs.user_log_dir(latus.const.NAME, latus.const.COMPANY), latus.const.LOG_FILE)
     try:
         os.remove(self.log_file_path)
     except PermissionError:
         pass
Beispiel #14
0
def main():
    """
    main test function
    """

    number_of_nodes = 3

    parser = argparse.ArgumentParser(description='latus system test')
    parser.add_argument('-n', '--no_sync', action='store_true', default=False, help="don't do sync (for testing)")
    args = parser.parse_args()

    # set up test folder and logging
    test_root = os.path.join('temp', 'systst')
    try:
        shutil.rmtree(test_root)
    except FileNotFoundError:
        pass

    # backup_count=0 so we don't have log file rollover (the code that looks into the logs can't handle log file rollover)
    latus.logger.init(os.path.join(appdirs.user_log_dir(latus.const.NAME, latus.const.COMPANY), 'systst'),
                      delete_existing_log_files=True, backup_count=0)
    latus.logger.set_console_log_level(logging.WARN)
    latus.logger.set_file_log_level(logging.DEBUG)
    latus.logger.log.info('starting %s' % __name__)
    time.sleep(1)

    # set up the preferences for the nodes we'll run
    app_data_folders = setup_preferences(test_root, number_of_nodes)
    latus_folders = []
    for app_data_folder in app_data_folders:
        pref = latus.preferences.Preferences(app_data_folder)
        latus_folders.append(pref.get_latus_folder())

    # create the nodes and start them
    syncs = []
    if not args.no_sync:
        # start the sync
        syncs = [SyncProc(app_data_folder) for app_data_folder in app_data_folders]
        [sync.start() for sync in syncs]

    # do the tests (and provide the user with a "Stop" button)
    files_test = FilesTest(app_data_folders, latus_folders, syncs)
    files_test.start()
    user_interface()
    files_test.request_exit()
    files_test.join()
    if not args.no_sync:
        [sync.request_exit() for sync in syncs]
Beispiel #15
0
def init_data_dir(args):
    import os
    import appdirs

    global data_dir
    data_dir = appdirs.user_data_dir(appauthor=XCP_NAME, appname=APP_NAME, roaming=True)
    if not os.path.isdir(data_dir):
        os.makedirs(data_dir)

    global config_dir  
    config_dir = appdirs.user_config_dir(appauthor=XCP_NAME, appname=APP_NAME, roaming=True)
    if not os.path.isdir(config_dir):
        os.makedirs(config_dir)

    global log_dir
    log_dir = appdirs.user_log_dir(appauthor=XCP_NAME, appname=APP_NAME)
    if not os.path.isdir(log_dir):
        os.makedirs(log_dir)
    def setUp(self):
        """
            Setting up tests by running the first run setup and setting specific variables
            based on the config file.
        """

        SettingsManager().first_run_test()
        self.app_name = etltest_config['app_name']
        self.app_author = etltest_config['app_author']
        self.data_dir = appdirs.user_data_dir(self.app_name, self.app_author)
        self.log_dir = appdirs.user_log_dir(self.app_name, self.app_author)
        self.settings_file = os.path.join(self.data_dir, etltest_config['settings_file'])
        self.connection_file = os.path.join(self.data_dir, etltest_config['connection_file'])
        self.data_location = SettingsManager().find_setting('Locations', 'data')
        self.tools_file = os.path.join(self.data_dir, etltest_config['tools_file'])
        self.copy_file = os.path.join(self.data_dir, 'copy.test')
        self.tests_location = SettingsManager().find_setting('Locations', 'tests')
        self.output_location = SettingsManager().find_setting('Locations', 'output')

        self.maxDiff = None
Beispiel #17
0
def main():

    # set up logger like any normal GUI application
    logger = logging.getLogger(APPLICATION_NAME)

    log_folder = appdirs.user_log_dir(APPLICATION_NAME, AUTHOR)
    if not os.path.exists(log_folder):
        os.makedirs(log_folder)
    log_file_path = os.path.join(log_folder, 'launch.log')
    print(log_file_path)

    file_handler = logging.FileHandler(log_file_path)
    formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)
    logger.setLevel(logging.INFO)

    # illustrate what Python we are running
    logger.info('test_example : sys.version : %s' % str(sys.version))
    logger.info('test_example : sys.path : %s' % str(sys.path))

    # run my application
    test_example.mymodule.run()
Beispiel #18
0
import os
import appdirs
import errno
import sys
import traceback

import log


# make various user data directories
def makedirs(path):
    try:
        os.makedirs(path)
    except OSError as e:
        if e.errno != errno.EEXIST:
            print "Couldn't create directory %s" % path

for path in [appdirs.user_data_dir('nab'), appdirs.user_config_dir('nab'),
             appdirs.user_cache_dir('nab'), appdirs.user_log_dir('nab')]:
    makedirs(path)


# set up logging for uncaught exceptions
_log = log.log.getChild("exception")


def _handle_exception(*exception):
    _log.exception("".join(traceback.format_exception(*exception)))

sys.excepthook = _handle_exception
Beispiel #19
0
import os
import sys
from appdirs import user_data_dir, user_log_dir

# Information for humans:
# -----------------------------------------------------------------------------
APPNAME = 'Auto-Rental'
AUTHOR = 'hopur-32'

# Information for computers:
# -----------------------------------------------------------------------------
# OS specific directory to store data
DATA_DIR = user_data_dir(APPNAME, AUTHOR)
if not os.path.isdir(DATA_DIR):
    os.makedirs(DATA_DIR)

if sys.platform == 'linux':
    LOGGING_DIR = '/tmp'
else:
    LOGGING_DIR = user_log_dir(APPNAME, AUTHOR)
    if not os.path.isdir(LOGGING_DIR):
        os.makedirs(LOGGING_DIR)
Beispiel #20
0
 def __init__(self, time):
     logdir = pathlib.Path(appdirs.user_log_dir("fogtools"))
     now = datetime.datetime.now()
     logfile = logdir / f"{now:%Y-%m-%d}" / f"fogdb-{time:%Y%m%d-%H%M}.log"
     logfile.parent.mkdir(parents=True, exist_ok=True)
     super().__init__(logfile)
Beispiel #21
0
    def init_logger(self):
        """
        Initialize the logger.  Call exactly once.
        """

        assert (self.name is not None)
        assert (self.author is not None)
        self.handlers = {}
        if self.is_root:
            self.log = logging.getLogger()
        else:
            self.log = logging.getLogger(self.name)
        if not self.propagate:
            self.log.propagate = False

        # set the root log level
        if self.verbose:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        if self.log.hasHandlers():
            self.log.info("Logger already initialized.")

        # create file handler
        if self.log_directory is None:
            self.log_directory = appdirs.user_log_dir(self.name, self.author)
        if self.log_directory is not None:
            if self.delete_existing_log_files:
                for file_path in glob(
                        os.path.join(self.log_directory,
                                     "*%s" % self.log_extension)):
                    try:
                        os.remove(file_path)
                    except OSError:
                        pass
            os.makedirs(self.log_directory, exist_ok=True)
            self.log_path = os.path.join(
                self.log_directory, "%s%s" % (self.name, self.log_extension))
            file_handler = logging.handlers.RotatingFileHandler(
                self.log_path,
                maxBytes=self.max_bytes,
                backupCount=self.backup_count)
            file_handler.setFormatter(self.log_formatter)
            if self.verbose:
                file_handler.setLevel(logging.DEBUG)
            else:
                file_handler.setLevel(logging.INFO)
            self.log.addHandler(file_handler)
            self.handlers[HandlerType.File] = file_handler
            self.log.info('log file path : "%s" ("%s")' %
                          (self.log_path, os.path.abspath(self.log_path)))

        if self.gui:
            # GUI will only pop up a dialog box - it's important that GUI not try to output to stdout or stderr
            # since that would likely cause a permissions error.
            dialog_box_handler = DialogBoxHandler(self.rate_limits)
            if self.verbose:
                dialog_box_handler.setLevel(logging.WARNING)
            else:
                dialog_box_handler.setLevel(logging.ERROR)
            self.log.addHandler(dialog_box_handler)
            self.handlers[HandlerType.DialogBox] = dialog_box_handler
        else:
            console_handler = logging.StreamHandler()
            console_handler.setFormatter(self.log_formatter)
            if self.verbose:
                console_handler.setLevel(logging.INFO)
            else:
                console_handler.setLevel(logging.WARNING)
            self.log.addHandler(console_handler)
            self.handlers[HandlerType.Console] = console_handler

        string_list_handler = BalsaStringListHandler(
            self.max_string_list_entries)
        string_list_handler.setFormatter(self.log_formatter)
        string_list_handler.setLevel(logging.INFO)
        self.log.addHandler(string_list_handler)
        self.handlers[HandlerType.StringList] = string_list_handler

        # setting up Sentry error handling
        # For the Client to work you need a SENTRY_DSN environmental variable set, or one must be provided.
        if self.use_sentry:
            sample_rate = 0.0 if self.inhibit_cloud_services else 1.0
            integrations = []
            if self.use_sentry_django:
                from sentry_sdk.integrations.django import DjangoIntegration
                integrations.append(DjangoIntegration())
            if self.use_sentry_flask:
                from sentry_sdk.integrations.flask import FlaskIntegration
                integrations.append(FlaskIntegration())
            if self.use_sentry_lambda:
                from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
                integrations.append(AwsLambdaIntegration())
            if self.use_sentry_sqlalchemy:
                from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
                integrations.append(SqlalchemyIntegration())
            if self.use_sentry_celery:
                from sentry_sdk.integrations.celery import CeleryIntegration
                integrations.append(CeleryIntegration())

            if self.sentry_dsn is None:
                if 'SENTRY_DSN' not in os.environ:
                    raise ValueError(f"Missing sentry_dsn")
                else:
                    sentry_sdk.init(
                        dsn=os.environ['SENTRY_DSN'],
                        sample_rate=sample_rate,
                        integrations=integrations,
                    )
            else:
                sentry_sdk.init(
                    dsn=self.sentry_dsn,
                    sample_rate=sample_rate,
                    integrations=integrations,
                )

        # error handler for callback on error or above
        # (this is last since the user may do a sys.exit() in the error callback)
        if self.error_callback is not None:
            error_callback_handler = BalsaNullHandler(self.error_callback)
            error_callback_handler.setLevel(logging.ERROR)
            self.log.addHandler(error_callback_handler)
            self.handlers[HandlerType.Callback] = error_callback_handler
Beispiel #22
0

    return Binstar(token, domain=url,)

def load_config(config_file):
    if exists(config_file):
        with open(config_file) as fd:
            data = yaml.load(fd)
            if data:
                return data

    return {}

SITE_CONFIG = join(appdirs.site_data_dir('binstar', 'ContinuumIO'), 'config.yaml')
USER_CONFIG = join(appdirs.user_data_dir('binstar', 'ContinuumIO'), 'config.yaml')
USER_LOGDIR = appdirs.user_log_dir('binstar', 'ContinuumIO')

def get_config(user=True, site=True):

    config = {}
    if site:
        config.update(load_config(SITE_CONFIG))
    if user:
        config.update(load_config(USER_CONFIG))

    return config

def set_config(data, user=True):

    config_file = USER_CONFIG if user else SITE_CONFIG
def shutdown():
    with open(os.path.join(user_log_dir("reboot_on_connexion_lost", "pawmint"), "shutdown.txt"), "a") as myfile:
        myfile.write(datetime.now().strftime('%Y%m%d_%H%M\n'))
    os.system("shutdown now -r")
Beispiel #24
0
STATICFILES_DIRS = (
    os.path.join(BASE_DIR, 'static'),
)


# template directories
TEMPLATE_DIRS = (os.path.join(BASE_DIR, 'templates'),)


# test runner
TEST_RUNNER = 'rook.test_runner.TestRunner'


import appdirs

log_dir = appdirs.user_log_dir('rook')

if not os.path.exists(log_dir):
    os.makedirs(log_dir)


LOGGING = {
    'version': 1,
    'handlers': {
        'console': {
            'level': 'INFO',
            'class': 'logging.StreamHandler',
        },
        'file': {
            'level': 'DEBUG',
            'class': 'logging.FileHandler',
Beispiel #25
0
# Adapted for GSSHApy
#
# License BSD-3 Clause

import logging
import appdirs
import os

from .metadata import version

logger = logging.getLogger('gsshapy')
null_handler = logging.NullHandler()
logger.addHandler(null_handler)
logger.propagate = False

default_log_dir = appdirs.user_log_dir('gsshapy', 'logs')
default_log_file = os.path.join(default_log_dir, 'gsshapy.log')


def log_to_console(status=True, level=None):
    """Log events to  the console.

    Args:
        status (bool, Optional, Default=True)
            whether logging to console should be turned on(True) or off(False)
        level (string, Optional, Default=None) :
            level of logging; whichever level is chosen all higher levels will be logged.
            See: https://docs.python.org/2/library/logging.html#levels
      """

    if status:
Beispiel #26
0
# Copyright (c) 2013, Mathijs Dumon
# All rights reserved.
# Complete license can be found in the LICENSE file.

import argparse, os, sys
from pyxrd.__version import __version__
from appdirs import user_data_dir, user_log_dir

### General Information ###
VERSION = __version__

DEBUG = False
FINALIZERS = [] #A list of callables that are called before the main function is left
BGSHIFT = True

LOG_FILENAME = os.path.join(user_log_dir('PyXRD'), 'errors.log')

### The URL where PyXRD looks for updates & the online manual ###
UPDATE_URL = 'http://users.ugent.be/~madumon/pyxrd/'
MANUAL_URL = UPDATE_URL

### Factor to multiply the CSDS average with to obtain the maximum CSDS ###
LOG_NORMAL_MAX_CSDS_FACTOR = 2.5

### What type of residual error we use: ###
#   "Rp" = 'standard' pattern Rp factor
#   "Rpw" = 'weighted' pattern Rp factor
#   "Rpder" = Rp factor of first-derivatives
RESIDUAL_METHOD = "Rp"

### Default wavelength if no Goniometer is available ###
Beispiel #27
0
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# 		   http://ilastik.org/license.html
###############################################################################

import os
import logging.config
import warnings

import appdirs
from . import loggingHelpers
from ilastik.config import cfg as ilastik_config

DEFAULT_LOGFILE_PATH = os.path.join(
    appdirs.user_log_dir(appname="ilastik", appauthor=False), "log.txt")


class OutputMode(object):
    CONSOLE = 0
    LOGFILE = 1
    BOTH = 2
    LOGFILE_WITH_CONSOLE_ERRORS = 3


def get_logfile_path():
    root_handlers = logging.getLogger().handlers
    for handler in root_handlers:
        if isinstance(handler, logging.FileHandler):
            return handler.baseFilename
    return None
Beispiel #28
0
import appdirs
import sys
import os

APP_NAME = "Reaper"
APP_AUTHOR = "UQ"

DATA_DIR = appdirs.user_data_dir(APP_NAME, APP_AUTHOR)
LOG_DIR = appdirs.user_log_dir(APP_NAME, APP_AUTHOR)
CACHE_DIR = appdirs.user_cache_dir(APP_NAME, APP_AUTHOR)


def _calc_path(path):
    head, tail = os.path.split(path)
    if tail == 'reaper':
        return path
    else:
        return _calc_path(head)

BUNDLE_DIR = sys._MEIPASS if getattr(sys, "frozen", False) else \
    _calc_path(os.path.dirname(os.path.abspath(__file__)))
Beispiel #29
0
from appdirs import user_log_dir
import os
import logging.handlers

# Normal base logging directory name
log_directory_name = "irida_uploader"
# When running tests, the Makefile creates an environment variable IRIDA_UPLOADER_TEST to 'True'
# If it exists then we are running a test and should be logging to the test logs directory
if os.environ.get('IRIDA_UPLOADER_TEST'):
    log_directory_name = "irida_uploader_test"
# Use systems default logging path, and append our named directory
log_file_path = os.path.join(user_log_dir(log_directory_name),
                             'irida-uploader.log')

if not os.path.exists(user_log_dir(log_directory_name)):
    os.makedirs(user_log_dir(log_directory_name))

# Looks something like this:
# 2019-02-07 14:50:02 INFO     Log message goes here...
log_format = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s',
                               datefmt='%Y-%m-%d %H:%M:%S')

# setup root logger
root_logger = logging.getLogger()
root_logger.handlers = []
logging.basicConfig(
    level=logging.
    NOTSET,  # Default to highest (NOTSET) level, so everything is possible to be logged by handlers
    handlers=[logging.NullHandler()
              ]  # Default log to Null, so that we can handle it manually
)
Beispiel #30
0
def get_user_log_dir():
    return user_log_dir(log_directory_name)
Beispiel #31
0
#: The unique USB vendor ID for Adafruit boards.
VENDOR_ID = 9114
#: The regex used to extract ``__version__`` and ``__repo__`` assignments.
DUNDER_ASSIGN_RE = re.compile(r"""^__\w+__\s*=\s*['"].+['"]$""")
#: Flag to indicate if the command is being run in verbose mode.
VERBOSE = False
#: The location of data files used by circup (following OS conventions).
DATA_DIR = appdirs.user_data_dir(appname="circup", appauthor="adafruit")
#: The path to the JSON file containing the metadata about the current bundle.
BUNDLE_DATA = os.path.join(DATA_DIR, "circup.json")
#: The path to the zip file containing the current library bundle.
BUNDLE_ZIP = os.path.join(DATA_DIR, "adafruit-circuitpython-bundle-{}.zip")
#: The path to the directory into which the current bundle is unzipped.
BUNDLE_DIR = os.path.join(DATA_DIR, "adafruit_circuitpython_bundle_{}")
#: The directory containing the utility's log file.
LOG_DIR = appdirs.user_log_dir(appname="circup", appauthor="adafruit")
#: The location of the log file for the utility.
LOGFILE = os.path.join(LOG_DIR, "circup.log")
#: The version of CircuitPython found on the connected device.
CPY_VERSION = ""


# Ensure DATA_DIR / LOG_DIR related directories and files exist.
if not os.path.exists(DATA_DIR):  # pragma: no cover
    os.makedirs(DATA_DIR)
if not os.path.exists(LOG_DIR):  # pragma: no cover
    os.makedirs(LOG_DIR)


# Setup logging.
logger = logging.getLogger(__name__)
Beispiel #32
0
except OSError:
    if not _os.path.isdir( _os.environ["pydna_config_dir"] ):
        raise

_ini_path = _os.path.join( _os.environ["pydna_config_dir"], "pydna.ini" )

_parser = _SafeConfigParser()

if _os.path.exists(_ini_path):
    _parser.read(_ini_path)
else:
    with open(_ini_path, 'w') as f:
        _parser.add_section('main')
        _parser.set('main','email', "*****@*****.**")
        _parser.set('main','data_dir', _appdirs.user_data_dir("pydna"))
        _parser.set('main','log_dir',  _appdirs.user_log_dir("pydna"))
        _parser.set('main','cache','cached')
        _parser.set('main','ape','')
        _parser.set('main','primers','')
        _parser.write(f)


_os.environ["pydna_email"]    = _os.getenv("pydna_email")    or _parser.get("main", "email")
_os.environ["pydna_data_dir"] = _os.getenv("pydna_data_dir") or _parser.get("main", "data_dir")
_os.environ["pydna_log_dir"]  = _os.getenv("pydna_log_dir")  or _parser.get("main", "log_dir")
_os.environ["pydna_cache"]    = _os.getenv("pydna_cache")    or _parser.get("main", "cache")

_os.environ["pydna_ape"]      = _os.getenv("pydna_ape")      or _parser.get("main", "ape")
_os.environ["pydna_primers"]  = _os.getenv("pydna_primers")  or _parser.get("main", "primers")

#_os.environ["pydna_cache"] = _os.getenv("pydna_cache") or "cached"
Beispiel #33
0
import shutil
import appdirs
from PyQt5.QtWidgets import QMessageBox
from pyflakes.api import check
from pycodestyle import StyleGuide, Checker
from mu.resources import path
from mu import __version__

# The user's home directory.
HOME_DIRECTORY = os.path.expanduser('~')
# Name of the directory within the home folder to use by default
WORKSPACE_NAME = 'mu_code'
# The default directory for application data (i.e., configuration).
DATA_DIR = appdirs.user_data_dir(appname='mu', appauthor='python')
# The default directory for application logs.
LOG_DIR = appdirs.user_log_dir(appname='mu', appauthor='python')
# The path to the log file for the application.
LOG_FILE = os.path.join(LOG_DIR, 'mu.log')
# Regex to match pycodestyle (PEP8) output.
STYLE_REGEX = re.compile(r'.*:(\d+):(\d+):\s+(.*)')
# Regex to match flake8 output.
FLAKE_REGEX = re.compile(r'.*:(\d+):\s+(.*)')
# Regex to match false positive flake errors if microbit.* is expanded.
EXPAND_FALSE_POSITIVE = re.compile(r"^'microbit\.(\w+)' imported but unused$")
# The text to which "from microbit import \*" should be expanded.
EXPANDED_IMPORT = ("from microbit import pin15, pin2, pin0, pin1, "
                   " pin3, pin6, pin4, i2c, pin5, pin7, pin8, Image, "
                   "pin9, pin14, pin16, reset, pin19, temperature, "
                   "sleep, pin20, button_a, button_b, running_time, "
                   "accelerometer, display, uart, spi, panic, pin13, "
                   "pin12, pin11, pin10, compass")
def ensure_shutdown_dir():
    try:
        os.stat(user_log_dir('reboot_on_connexion_lost', 'pawmint'))
    except:
        os.makedirs(user_log_dir('reboot_on_connexion_lost', 'pawmint'))
Beispiel #35
0
from osfoffline.utils.path import ensure_folders

logger = logging.getLogger(__name__)

try:
    from osfoffline.settings.local import *  # noqa
except ImportError:
    logger.warning('No local.py found. Using default settings.')

# Generated setttings

# Variables used to control where application config data is stored
PROJECT_DB_DIR = user_data_dir(appname=PROJECT_NAME, appauthor=PROJECT_AUTHOR)
PROJECT_DB_FILE = os.path.join(PROJECT_DB_DIR, 'osf.db')

PROJECT_LOG_DIR = user_log_dir(appname=PROJECT_NAME, appauthor=PROJECT_AUTHOR)
PROJECT_LOG_FILE = os.path.join(PROJECT_LOG_DIR, 'osfoffline.log')

# Ensure that storage directories are created when application starts
for path in (PROJECT_DB_DIR, PROJECT_LOG_DIR):
    logger.info('Ensuring {} exists'.format(path))
    ensure_folders(path)

# Best for last, the logging configuration

LOGGING_CONFIG = {
    'version': 1,
    'disable_existing_loggers': False,
    'formatters': {
        'console': CONSOLE_FORMATTER,
        'file_log': {
Beispiel #36
0
def initialise(database_file=None, log_file=None, api_log_file=None,
                testnet=False, testcoin=False,
                backend_name=None, backend_connect=None, backend_port=None,
                backend_user=None, backend_password=None,
                backend_ssl=False, backend_ssl_verify=True,
                backend_poll_interval=None, 
                rpc_host=None, rpc_port=None,
                rpc_user=None, rpc_password=None,
                rpc_allow_cors=None,
                force=False, verbose=False):

     # Data directory
    data_dir = appdirs.user_data_dir(appauthor=config.XMN_NAME, appname=config.APP_NAME, roaming=True)
    if not os.path.isdir(data_dir):
        os.makedirs(data_dir, mode=0o755)

    # testnet
    if testnet:
        config.TESTNET = testnet
    else:
        config.TESTNET = False

    # testcoin
    if testcoin:
        config.TESTCOIN = testcoin
    else:
        config.TESTCOIN = False

    network = ''
    if config.TESTNET:
        network += '.testnet'
    if config.TESTCOIN:
        network += '.testcoin'

    # Database
    if database_file:
        config.DATABASE = database_file
    else:
        filename = '{}{}.db'.format(config.APP_NAME, network)
        config.DATABASE = os.path.join(data_dir, filename)

    # Log directory
    log_dir = appdirs.user_log_dir(appauthor=config.XMN_NAME, appname=config.APP_NAME)
    if not os.path.isdir(log_dir):
        os.makedirs(log_dir, mode=0o755)

    # Log
    if log_file:
        config.LOG = log_file
    else:
        filename = 'server{}.log'.format(network)
        config.LOG = os.path.join(log_dir, filename)
    logger.debug('Writing server log to file: `{}`'.format(config.LOG))

    if api_log_file:
        config.API_LOG = api_log_file
    else:
        filename = 'server{}.api.log'.format(network)
        config.API_LOG = os.path.join(log_dir, filename)
    logger.debug('Writing API log to file: `{}`'.format(config.API_LOG))

    ##############
    # THINGS WE CONNECT TO

    # Backend name
    if backend_name:
        config.BACKEND_NAME = backend_name
    else:
        config.BACKEND_NAME = 'addrindex'
    if config.BACKEND_NAME == 'jmcorgan':
        config.BACKEND_NAME = 'addrindex'

    # Backend RPC host (Bitcoin Core)
    if backend_connect:
        config.BACKEND_CONNECT = backend_connect
    else:
        config.BACKEND_CONNECT = 'localhost'

    # Backend Core RPC port (Bitcoin Core)
    if backend_port:
        config.BACKEND_PORT = backend_port
    else:
        if config.TESTNET:
            config.BACKEND_PORT = config.DEFAULT_BACKEND_PORT_TESTNET
        else:
            config.BACKEND_PORT = config.DEFAULT_BACKEND_PORT
    try:
        config.BACKEND_PORT = int(config.BACKEND_PORT)
        if not (int(config.BACKEND_PORT) > 1 and int(config.BACKEND_PORT) < 65535):
            raise ConfigurationError('invalid backend API port number')
    except:
        raise ConfigurationError("Please specific a valid port number backend-port configuration parameter")

    # Backend Core RPC user (Bitcoin Core)
    if backend_user:
        config.BACKEND_USER = backend_user
    else:
        config.BACKEND_USER = '******'

    # Backend Core RPC password (Bitcoin Core)
    if backend_password:
        config.BACKEND_PASSWORD = backend_password
    else:
        raise ConfigurationError('backend RPC password not set. (Use configuration file or --backend-password=PASSWORD)')

    # Backend Core RPC SSL
    if backend_ssl:
        config.BACKEND_SSL = backend_ssl
    else:
        config.BACKEND_SSL = False  # Default to off.

    # Backend Core RPC SSL Verify
    if backend_ssl_verify is not None:
        config.BACKEND_SSL_VERIFY = backend_ssl_verify
    else:
        config.BACKEND_SSL_VERIFY = True # Default to on (don't support self‐signed certificates)

    # Backend Poll Interval
    if backend_poll_interval:
        config.BACKEND_POLL_INTERVAL = backend_poll_interval
    else:
        config.BACKEND_POLL_INTERVAL = 2.0

    # Construct backend URL.
    config.BACKEND_URL = config.BACKEND_USER + ':' + config.BACKEND_PASSWORD + '@' + config.BACKEND_CONNECT + ':' + str(config.BACKEND_PORT)
    if config.BACKEND_SSL:
        config.BACKEND_URL = 'https://' + config.BACKEND_URL
    else:
        config.BACKEND_URL = 'http://' + config.BACKEND_URL


    ##############
    # THINGS WE SERVE

    # metronotesd API RPC host
    if rpc_host:
        config.RPC_HOST = rpc_host
    else:
        config.RPC_HOST = 'localhost'

    # metronotesd API RPC port
    if rpc_port:
        config.RPC_PORT = rpc_port
    else:
        if config.TESTNET:
            if config.TESTCOIN:
                config.RPC_PORT = config.DEFAULT_RPC_PORT_TESTNET + 1
            else:
                config.RPC_PORT = config.DEFAULT_RPC_PORT_TESTNET
        else:
            if config.TESTCOIN:
                config.RPC_PORT = config.DEFAULT_RPC_PORT + 1
            else:
                config.RPC_PORT = config.DEFAULT_RPC_PORT
    try:
        config.RPC_PORT = int(config.RPC_PORT)
        if not (int(config.RPC_PORT) > 1 and int(config.RPC_PORT) < 65535):
            raise ConfigurationError('invalid metronotesd API port number')
    except:
        raise ConfigurationError("Please specific a valid port number rpc-port configuration parameter")

    #  metronotesd API RPC user
    if rpc_user:
        config.RPC_USER = rpc_user
    else:
        config.RPC_USER = '******'

    #  metronotesd API RPC password
    if rpc_password:
        config.RPC_PASSWORD = rpc_password
    else:
        raise ConfigurationError('RPC password not set. (Use configuration file or --rpc-password=PASSWORD)')

    config.RPC = 'http://' + urlencode(config.RPC_USER) + ':' + urlencode(config.RPC_PASSWORD) + '@' + config.RPC_HOST + ':' + str(config.RPC_PORT)

    # RPC CORS
    if rpc_allow_cors is not None:
        config.RPC_ALLOW_CORS = rpc_allow_cors
    else:
        config.RPC_ALLOW_CORS = True

    ##############
    # OTHER SETTINGS

    # skip checks
    if force:
        config.FORCE = force
    else:
        config.FORCE = False

    # Encoding
    if config.TESTCOIN:
        config.PREFIX = b'XX'                   # 2 bytes (possibly accidentally created)
    else:
        config.PREFIX = b'CNTRPRTY'             # 8 bytes

    # (more) Testnet
    if config.TESTNET:
        config.MAGIC_BYTES = config.MAGIC_BYTES_TESTNET
        if config.TESTCOIN:
            config.ADDRESSVERSION = config.ADDRESSVERSION_TESTNET
            config.BLOCK_FIRST = config.BLOCK_FIRST_TESTNET_TESTCOIN
            config.BURN_START = config.BURN_START_TESTNET_TESTCOIN
            config.BURN_END = config.BURN_END_TESTNET_TESTCOIN
            config.UNSPENDABLE = config.UNSPENDABLE_TESTNET
        else:
            config.ADDRESSVERSION = config.ADDRESSVERSION_TESTNET
            config.BLOCK_FIRST = config.BLOCK_FIRST_TESTNET
            config.BURN_START = config.BURN_START_TESTNET
            config.BURN_END = config.BURN_END_TESTNET
            config.UNSPENDABLE = config.UNSPENDABLE_TESTNET
    else:
        config.MAGIC_BYTES = config.MAGIC_BYTES_MAINNET
        if config.TESTCOIN:
            config.ADDRESSVERSION = config.ADDRESSVERSION_MAINNET
            config.BLOCK_FIRST = config.BLOCK_FIRST_MAINNET_TESTCOIN
            config.BURN_START = config.BURN_START_MAINNET_TESTCOIN
            config.BURN_END = config.BURN_END_MAINNET_TESTCOIN
            config.UNSPENDABLE = config.UNSPENDABLE_MAINNET
        else:
            config.ADDRESSVERSION = config.ADDRESSVERSION_MAINNET
            config.BLOCK_FIRST = config.BLOCK_FIRST_MAINNET
            config.BURN_START = config.BURN_START_MAINNET
            config.BURN_END = config.BURN_END_MAINNET
            config.UNSPENDABLE = config.UNSPENDABLE_MAINNET

    # Set up logging.
    root_logger = logging.getLogger()    # Get root logger.
    log.set_up(root_logger, verbose=verbose, logfile=config.LOG)
    # Log unhandled errors.
    def handle_exception(exc_type, exc_value, exc_traceback):
        logger.error("Unhandled Exception", exc_info=(exc_type, exc_value, exc_traceback))
    sys.excepthook = handle_exception

    logger.info('Running v{} of metronotes-lib.'.format(config.VERSION_STRING))

    if config.FORCE:
        logger.warning('THE OPTION `--force` IS NOT FOR USE ON PRODUCTION SYSTEMS.')

    # Lock
    if not config.FORCE:
        get_lock()

    # Database
    logger.info('Connecting to database.')
    db = database.get_connection(read_only=False)

    util.CURRENT_BLOCK_INDEX = blocks.last_db_index(db)

    return db
Beispiel #37
0
def test_extend(db, abi, icon, nwcsaf, fake_df, ts, caplog, fakearea):
    # TODO: rewrite test with less mocking
    #
    # function is probably mocking too much, the test passes but it fails in
    # the real world because the preconditions before calling .extract are not
    # met
    import fogtools.isd
    db.sat = abi
    db.sat.load = unittest.mock.MagicMock()
    db.sat.load.return_value = _mk_fakescene_realarea(
            fakearea,
            datetime.datetime(1899, 12, 31, 23, 55),
            "raspberry", "banana")
    db.nwp = icon
    db.nwp.load = unittest.mock.MagicMock()
    db.nwp.load.return_value = _mk_fakescene_realarea(
            fakearea,
            datetime.datetime(1899, 12, 31, 23, 0),
            "apricot", "pineapple")
    db.cmic.load = unittest.mock.MagicMock()
    db.cmic.load.return_value = _mk_fakescene_realarea(
            fakearea,
            datetime.datetime(1899, 12, 31, 23, 55),
            "peach", "redcurrant")
    db.dem.load = unittest.mock.MagicMock()
    db.dem.load.return_value = _mk_fakescene_realarea(
            fakearea,
            None,
            "damson", "prune")
    db.fog.load = unittest.mock.MagicMock()
    db.fog.load.return_value = _mk_fakescene_realarea(
            fakearea,
            datetime.datetime(1899, 12, 31, 23, 55),
            "aubergine", "shallot")
    loc = fogtools.isd.get_db_location()
    loc.parent.mkdir(parents=True)
    fake_df.to_parquet(fogtools.isd.get_db_location())
    db.ground.load(ts)
    with caplog.at_level(logging.DEBUG):
        db.extend(ts)
        assert "Loading data for 1900-01-01 00:00:00" in caplog.text
        # one of the duplicates is outside of the tolerance, so it repeats from
        # 9 to 5 not from 10 to 5
        assert "Reducing from 9 to 5 to avoid repeated lat/lons" in caplog.text
        # assert "Extracting data for [fogdb component ABI]
        # 1900-01-01 00:00:00" in caplog.text
    assert sorted(db.data.columns) == [
            "apricot", "aubergine", "banana", "damson", "date_cmic",
            "date_dem", "date_fog", "date_nwp", "date_synop", "peach",
            "pineapple", "prune", "raspberry", "redcurrant", "shallot",
            "values"]
    assert db.data.shape == (5, 16)
    db.extend(ts)
    assert db.data.shape == (10, 16)
    # check that messages were logged where we expect them
    f = (pathlib.Path(appdirs.user_log_dir("fogtools")) /
         f"{datetime.datetime.now():%Y-%m-%d}" / "fogdb-19000101-0000.log")
    with open(f, mode="r", encoding="utf-8") as fp:
        text = fp.read()
        assert text.split("\n")[0].endswith(f"Opening logfile at {f!s}")
        assert "Loading data for" in text
    db.sat.load.side_effect = fogtools.db.FogDBError
    with pytest.raises(fogtools.db.FogDBError):
        db.extend(ts, onerror="raise")
    with caplog.at_level(logging.ERROR):
        db.extend(ts, onerror="log")
        assert "Failed to extend database with data from 1900" in caplog.text
    with pytest.raises(ValueError):
        db.extend(ts, onerror="semprini")
Beispiel #38
0
def get_logs_path():
    return _get_path("paths", "logs", user_log_dir(appname, appauthor), True)
Beispiel #39
0
import appdirs


def mkdir_if_not_there(path):
    try:
        os.makedirs(path)
    except OSError as exc:
        if exc.errno == errno.EEXIST:
            pass
        else:
            raise


program_name = 'consensobot'
org_name = 'noisebridge'
metadata = get_distribution(program_name).metadata

# establish default storage directories
data_dir = appdirs.user_data_dir(program_name, org_name)
corpus_dir = os.path.join(data_dir, 'corpus')
foolscap_dir = os.path.join(data_dir, 'foolscap')
run_dir = os.path.join(data_dir, 'run')  # for pidfiles
log_dir = appdirs.user_log_dir(program_name, org_name)

mkdir_if_not_there(data_dir)
mkdir_if_not_there(corpus_dir)
mkdir_if_not_there(foolscap_dir)
mkdir_if_not_there(run_dir)
mkdir_if_not_there(log_dir)
# Logging configuration
FILE_FORMATTER = '[%(levelname)s][%(asctime)s][%(threadName)s][%(name)s]: %(message)s'

IGNORED_PATTERNS = [
    r'\.DS_Store',
    r'lost+found',
    r'Desktop\.ini',
    r'~\$.*',
    r'.*\.tmp',
    r'\..*\.swp',
]

OSF_STORAGE_FOLDER = 'OSF Storage'
COMPONENTS_FOLDER = 'Components'

# Variables used to control where application config data is stored
PROJECT_DB_DIR = user_data_dir(appname=PROJECT_NAME, appauthor=PROJECT_AUTHOR)
PROJECT_DB_FILE = os.path.join(PROJECT_DB_DIR, 'osf.db')

PROJECT_LOG_DIR = user_log_dir(appname=PROJECT_NAME, appauthor=PROJECT_AUTHOR)
PROJECT_LOG_FILE = os.path.join(PROJECT_LOG_DIR, 'osfoffline.log')

EVENT_DEBOUNCE = 3

# updater
REPO = 'CenterForOpenScience/OSF-Sync'
VERSION = '0.4.4'
NAME = 'OSF-Offline'
MIN_VERSION_URL = 'https://raw.githubusercontent.com/CenterForOpenScience/OSF-Sync/develop/deploy/Offline-version.json'
OFFLINE_PROJECT_ON_OSF = 'https://osf.io/v2y6z/files/'
Beispiel #41
0
# -*- coding: utf-8 -*-
"""orphan.data -- data files.
"""
import appdirs
from path import path

name = 'Orphan'
author = 'Worlds Enough Studios'

USER_DATA = path(appdirs.user_data_dir(name, author))
SITE_DATA = path(appdirs.site_data_dir(name, author))
CACHE_DATA = path(appdirs.user_cache_dir(name, author))
LOG_DATA = path(appdirs.user_log_dir(name, author))
APP_DATA = path(__file__).abspath().dirname()
Beispiel #42
0
    fh = logging.FileHandler(os.path.join(CWD, 'pyu.log'))
    fh.setLevel(logging.DEBUG)
    fh.setFormatter(log_formatter())
    log.addHandler(fh)

fmt = logging.Formatter('[%(levelname)s] %(message)s')
sh = logging.StreamHandler()
sh.setFormatter(fmt)

# Used for Development
# sh.setLevel(logging.DEBUG)

sh.setLevel(logging.INFO)
log.addHandler(sh)

LOG_DIR = user_log_dir(settings.APP_NAME, settings.APP_AUTHOR)
log_file = os.path.join(LOG_DIR, settings.LOG_FILENAME_DEBUG)
rfh = logging.handlers.RotatingFileHandler(log_file, maxBytes=35000,
                                           backupCount=2)
rfh.setFormatter(log_formatter())
rfh.setLevel(logging.DEBUG)
log.addHandler(rfh)


def _build(args, pyi_args):
    check_repo()
    builder = Builder(args, pyi_args)
    builder.build()


# Get permission before deleting PyUpdater repo
Beispiel #43
0
import wx.lib.agw.hyperlink as hl

from os import path, makedirs
from distutils.version import LooseVersion
from github3 import GitHub
from GUI import UploaderAppFrame, SettingsDialog
from appdirs import user_config_dir, user_log_dir
from wx.lib.pubsub import pub

path_to_module = path.dirname(__file__)
app_config = path.join(path_to_module, 'irida-uploader.cfg')

if not path.isfile(app_config):
    app_config = path.join(path_to_module, '..', 'irida-uploader.cfg')

if not path.exists(user_log_dir("iridaUploader")):
    makedirs(user_log_dir("iridaUploader"))

log_format = '%(asctime)s %(levelname)s\t%(filename)s:%(funcName)s:%(lineno)d - %(message)s'

# if any logging gets called before `basicConfig`, our attempts to configure the
# logging here will be clobbered. This removes any existing handlers that might
# have been set up when some other log message was printed, so that we can
# actually configure the logging the way we want.
logging.getLogger().handlers = []
logging.basicConfig(level=logging.DEBUG,
                    filename=path.join(user_log_dir("iridaUploader"), 'irida-uploader.log'),
                    format=log_format,
                    filemode='w')

console = logging.StreamHandler()
Beispiel #44
0
"""Constants used by OpsDroid."""
import os
from appdirs import user_log_dir, user_config_dir, user_data_dir
import opsdroid
from opsdroid import __version__  # noqa # pylint: disable=unused-import

NAME = "opsdroid"
MODULE_ROOT = os.path.dirname(os.path.abspath(opsdroid.__file__))
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
MODULES_DIRECTORY = "opsdroid_modules"
DEFAULT_ROOT_PATH = user_data_dir(NAME)
DEFAULT_LOG_FILENAME = os.path.join(user_log_dir(NAME, appauthor=False),
                                    "output.log")
DEFAULT_MODULES_PATH = user_data_dir(NAME, MODULES_DIRECTORY)
DEFAULT_MODULE_DEPS_PATH = os.path.join(user_data_dir(NAME, MODULES_DIRECTORY),
                                        "site-packages")
DEFAULT_CONFIG_PATH = os.path.join(user_config_dir(NAME, appauthor=False),
                                   "configuration.yaml")
DEFAULT_CONFIG_LOCATIONS = [
    "configuration.yaml",
    DEFAULT_CONFIG_PATH,
    "/etc/opsdroid/configuration.yaml",
]
DEFAULT_MODULE_BRANCH = "master"
DEFAULT_LANGUAGE = "en"
LOCALE_DIR = os.path.join(MODULE_ROOT, "locale")
EXAMPLE_CONFIG_FILE = os.path.join(
    os.path.dirname(os.path.abspath(__file__)),
    "configuration/example_configuration.yaml",
)
REGEX_PARSE_SCORE_FACTOR = 0.6
Beispiel #45
0
def main(ctx, log_level, pdb=False):
    """A client to support interactions with DANDI archive (http://dandiarchive.org).

    To see help for a specific command, run

        dandi COMMAND --help

    e.g. dandi upload --help
    """
    logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s")
    set_logger_level(get_logger(), log_level)

    # Ensure that certain log messages are only sent to the log file, not the
    # console:
    root = logging.getLogger()
    for h in root.handlers:
        h.addFilter(lambda r: not getattr(r, "file_only", False))

    logdir = appdirs.user_log_dir("dandi-cli", "dandi")
    logfile = os.path.join(
        logdir, "{:%Y%m%d%H%M%SZ}-{}.log".format(datetime.utcnow(),
                                                 os.getpid()))
    os.makedirs(logdir, exist_ok=True)
    handler = logging.FileHandler(logfile, encoding="utf-8")
    fmter = logging.Formatter(
        fmt=
        "%(asctime)s [%(levelname)-8s] %(name)s %(process)d:%(thread)d %(message)s",
        datefmt="%Y-%m-%dT%H:%M:%S%z",
    )
    handler.setFormatter(fmter)
    root.addHandler(handler)

    lgr.info(
        "dandi v%s, hdmf v%s, pynwb v%s, h5py v%s",
        __version__,
        get_module_version("hdmf"),
        get_module_version("pynwb"),
        get_module_version("h5py"),
        extra={"file_only": True},
    )
    lgr.info("sys.argv = %r", sys.argv, extra={"file_only": True})
    lgr.info("os.getcwd() = %s", os.getcwd(), extra={"file_only": True})

    ctx.obj = SimpleNamespace(logfile=logfile)

    if pdb:
        map_to_click_exceptions._do_map = False
        from ..utils import setup_exceptionhook

        setup_exceptionhook()
    try:
        import etelemetry

        etelemetry.check_available_version("dandi/dandi-cli",
                                           __version__,
                                           lgr=lgr)
    except Exception as exc:
        lgr.warning(
            "Failed to check for a more recent version available with etelemetry: %s",
            exc,
        )
Beispiel #46
0
Datei: log.py Projekt: aelred/nab
import logging
import logging.handlers
import appdirs
import os

log_dir = appdirs.user_log_dir('nab')
log_file = os.path.join(log_dir, 'log.txt')

def _init():
    log = logging.getLogger("nab")
    log.setLevel(logging.DEBUG)
    log.propagate = False
    formatter = logging.Formatter('%(asctime)s: %(levelname)s:\t'
                                  '%(name)s:\t%(message)s')

    # create log directory
    try:
        os.makedirs(log_dir)
    except OSError:
        pass

    file_handler = logging.handlers.RotatingFileHandler(
        log_file, maxBytes=1024*1024, backupCount=5)
    file_handler.setFormatter(formatter)
    log.addHandler(file_handler)

    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(logging.INFO)
    stream_handler.setFormatter(formatter)
    log.addHandler(stream_handler)
    return log
Beispiel #47
0
def get_file_path():
    path = appdirs.user_log_dir('FadedBotApp', 'Hen676')
    if not os.path.isdir(path):
        os.makedirs(path)
    return path