def configure_logging(config, log_name='transcribersofreddit.log'): logging.basicConfig( level=logging.INFO, format='[%(asctime)s] - [%(levelname)s] - [%(funcName)s] - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', filename=log_name) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter( '[%(asctime)s] - [%(funcName)s] - %(message)s') # tell the handler to use this format console.setFormatter(formatter) # add the handlers to the root logger logging.getLogger('').addHandler(console) # will intercept anything error level or above if config.bugsnag_api_key: bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) logging.getLogger('').addHandler(bs_handler) logging.info('Bugsnag enabled!') else: logging.info('Not running with Bugsnag!') if config.sentry_api_url: sentry_handler = SentryHandler(Client(config.sentry_api_url)) sentry_handler.setLevel(logging.ERROR) # I don't know what this line does but it seems required by raven setup_logging(sentry_handler) logging.getLogger('').addHandler(sentry_handler) logging.info('Sentry enabled!') else: logging.info('Not running with Sentry!') log_header('Starting!')
def main(): # set up logging monitor_logger = logging.getLogger('rfi_file_monitor') monitor_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt='%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') # log to stdout log_handler_stream = logging.StreamHandler(sys.stdout) log_handler_stream.setFormatter(log_fmt_long) log_handler_stream.setLevel(logging.DEBUG) monitor_logger.addHandler(log_handler_stream) # log to bugsnag log_handler_bugsnag = BugsnagHandler() log_handler_bugsnag.setLevel(logging.INFO) monitor_logger.addHandler(log_handler_bugsnag) logger.info('Started') app = Application() rv = app.run(sys.argv) logger.info('Finished') sys.exit(rv)
def main(): from .application import Application # set up logging monitor_logger = logging.getLogger("rfi_downloader") monitor_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt="%(asctime)s %(name)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) # log to stdout log_handler_stream = logging.StreamHandler(sys.stdout) log_handler_stream.setFormatter(log_fmt_long) log_handler_stream.setLevel(logging.DEBUG) monitor_logger.addHandler(log_handler_stream) # log to bugsnag log_handler_bugsnag = BugsnagHandler() log_handler_bugsnag.setLevel(logging.WARNING) monitor_logger.addHandler(log_handler_bugsnag) app = Application() rv = app.run(sys.argv) sys.exit(rv)
def configure_logging(config, log_name='transcribersofreddit.log'): logging.basicConfig( level=logging.INFO, format='%(levelname)s | %(funcName)s | %(message)s', datefmt='%Y-%m-%dT%H:%M:%S', ) # will intercept anything error level or above if config.bugsnag_api_key: bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) logging.getLogger('').addHandler(bs_handler) logging.info('Bugsnag enabled!') else: logging.info('Not running with Bugsnag!') if config.sentry_api_url: sentry_handler = SentryHandler(Client(config.sentry_api_url)) sentry_handler.setLevel(logging.ERROR) # I don't know what this line does but it seems required by raven setup_logging(sentry_handler) logging.getLogger('').addHandler(sentry_handler) logging.info('Sentry enabled!') else: logging.info('Not running with Sentry!') log_header('Starting!')
def configure_logging(config): logging.basicConfig( level=logging.INFO, format='[%(asctime)s] - [%(levelname)s] - [%(funcName)s] - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', filename='transcribersofreddit.log' ) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('[%(asctime)s] - [%(funcName)s] - %(message)s') # tell the handler to use this format console.setFormatter(formatter) # add the handlers to the root logger logging.getLogger('').addHandler(console) # will intercept anything error level or above if config.bs_api_key: bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) logging.getLogger('').addHandler(bs_handler) if config.bs_api_key: logging.info('Bugsnag enabled!') else: logging.info('Not running with Bugsnag!') log_header('Starting!')
def configure_bugsnag_error_monitoring(bugsnag_key, bugsnag_release_stage): bugsnag.configure(api_key=bugsnag_key, project_root="./", notify_release_stages=["production", "staging"], release_stage=bugsnag_release_stage) bugsnag_handler = BugsnagHandler() bugsnag_handler.setLevel(logging.ERROR) add_handler_to_logger(bugsnag_handler)
def init_bugsnag(app_version: str): bugsnag.configure(api_key=BUGSNAG_ID, project_root=Path(__file__).parent / 'pingou', notify_release_stages=['production'], release_stage=RELEASE_STAGE, app_version=app_version) bugsnag_handler = BugsnagHandler() bugsnag_handler.setLevel(logging.ERROR) return bugsnag_handler
def configure_logging(cfg, log_name='transcribersofreddit.log'): logging.basicConfig( level=logging.INFO, format='%(levelname)s | %(funcName)s | %(message)s', datefmt='%Y-%m-%dT%H:%M:%S', ) # will intercept anything error level or above if cfg.bugsnag_api_key: bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) logging.getLogger('').addHandler(bs_handler) logging.info('Bugsnag enabled!') else: logging.info('Not running with Bugsnag!') log_header('Starting!')
def _setup_logging(self): log_level = self._conf.get("app", "log_level") mdbx_logger = logging.getLogger("maestral") mdbx_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt="%(asctime)s %(name)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S") log_fmt_short = logging.Formatter(fmt="%(message)s") # log to file rfh_log_file = get_log_path("maestral", self._config_name + ".log") self._log_handler_file = logging.handlers.RotatingFileHandler( rfh_log_file, maxBytes=10**7, backupCount=1) self._log_handler_file.setFormatter(log_fmt_long) self._log_handler_file.setLevel(log_level) mdbx_logger.addHandler(self._log_handler_file) # log to journal when launched from systemd if INVOCATION_ID and journal: self._log_handler_journal = journal.JournalHandler() self._log_handler_journal.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_journal) # log to stdout (disabled by default) self._log_handler_stream = logging.StreamHandler(sys.stdout) self._log_handler_stream.setFormatter(log_fmt_long) self._log_handler_stream.setLevel(100) mdbx_logger.addHandler(self._log_handler_stream) # log to cached handlers for GUI and CLI self._log_handler_info_cache = CachedHandler(maxlen=1) self._log_handler_info_cache.setLevel(logging.INFO) self._log_handler_info_cache.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_info_cache) self._log_handler_error_cache = CachedHandler() self._log_handler_error_cache.setLevel(logging.ERROR) self._log_handler_error_cache.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_error_cache) # log to bugsnag (disabled by default) self._log_handler_bugsnag = BugsnagHandler() self._log_handler_bugsnag.setLevel(100) mdbx_logger.addHandler(self._log_handler_bugsnag)
def test_levelname_message(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) class MessageFilter(logging.Filter): def filter(self, record): record.levelname = None return True handler.addFilter(MessageFilter()) logger.info('The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual('LogMessage', exception['errorClass'])
def _config_bugsnag(self): # Configure Bugsnag if self.config.get( 'TESTING') or not self.config.get('BUGSNAG_API_KEY'): self.log.info('Bugsnag NOT configured.') return import bugsnag from bugsnag.flask import handle_exceptions from bugsnag.handlers import BugsnagHandler bugsnag.configure( api_key=self.config['BUGSNAG_API_KEY'], project_root="/data/git/pillar/pillar", ) handle_exceptions(self) bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) self.log.addHandler(bs_handler)
def configure_logging(cfg: Config, log_name="transcribersofreddit.log") -> None: # Set formatting and logging level. logging.basicConfig( level=logging.INFO, format="%(asctime)s | %(levelname)s | %(funcName)s | %(message)s", filename=log_name, ) if cfg.bugsnag_api_key: # will intercept anything error level or above bs_handler = BugsnagHandler() bs_handler.setLevel(logging.ERROR) logging.getLogger().addHandler(bs_handler) log.info("Bugsnag is successfully enabled!") else: log.info("No Bugsnag API Key found. Not running with Bugsnag!") log.info("*" * 50) log.info("Logging configured. Starting program!") log.info("*" * 50)
def _initialize(self): bugsnag.configure(api_key=self._bugsnag_key, project_root=self._app_dir) logging.config.dictConfig(self.get_logging_config()) logger = logging.getLogger('antbs') for logger_name in self._noisy_loggers: _logger = logging.getLogger(logger_name) _logger.setLevel(logging.ERROR) bs_handler_found = [ h for h in logger.handlers if isinstance(h, BugsnagHandler) ] if not bs_handler_found: bugsnag_handler = BugsnagHandler() bugsnag_handler.setLevel(logging.WARNING) logger.addHandler(bugsnag_handler) self.logger = logger
def test_custom_level(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) logger.log(341, 'The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual('LogLevel 341', exception['errorClass'])
def test_message(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) logger.critical('The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual('The system is down', exception['message'])
def test_extra_fields(self): handler = BugsnagHandler(extra_fields={'fruit': ['grapes', 'pears']}) logger = logging.getLogger(__name__) logger.addHandler(handler) logger.error('A wild tomato appeared', extra={ 'grapes': 8, 'pears': 2, 'tomatoes': 1 }) logger.removeHandler(handler) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] self.assertEqual(event['metaData']['fruit'], { 'grapes': 8, 'pears': 2 })
def test_exc_info(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) try: raise ScaryException('Oh no') except Exception: logger.exception('The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual(exception['errorClass'], 'tests.utils.ScaryException')
def test_severity_warning(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) logger.warning('The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual('LogWARNING', exception['errorClass']) self.assertEqual('warning', event['severity']) self.assertEqual(logging.WARNING, event['metaData']['extra data']['levelno']) self.assertEqual('WARNING', event['metaData']['extra data']['levelname'])
def test_severity_critical(self): handler = BugsnagHandler() logger = logging.getLogger(__name__) logger.addHandler(handler) logger.critical('The system is down') logger.removeHandler(handler) self.assertSentReportCount(1) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] exception = event['exceptions'][0] self.assertEqual('LogCRITICAL', exception['errorClass']) self.assertEqual('error', event['severity']) self.assertEqual(logging.CRITICAL, event['metaData']['extra data']['levelno']) self.assertEqual(u('CRITICAL'), event['metaData']['extra data']['levelname'])
def test_extra_fields(self): class FruitFilter(logging.Filter): def filter(self, record): record.grapes = 8 record.pears = 2 record.apricots = 90 return True handler = BugsnagHandler(api_key='new news', extra_fields={'fruit': ['grapes', 'pears']}) logger = logging.getLogger(__name__) logger.addHandler(handler) logger.addFilter(FruitFilter()) logger.error('A wild tomato appeared') logger.removeHandler(handler) json_body = self.server.received[0]['json_body'] event = json_body['events'][0] self.assertEqual(event['metaData']['fruit'], {'grapes': 8, 'pears': 2})
def setup_logging(): """ Configure our logger """ logger = logging.getLogger() logger.handlers = [] if cmd_line.debug: log_level = logging.DEBUG else: log_level = logging.INFO logger.setLevel(log_level) context_filter = ContextFilter() logger.addFilter(context_filter.filter) # Log format formatter = logging.Formatter( fmt= "%(asctime)s [%(levelname)s] %(filename)s(%(lineno)d) %(funcName)s(): %(message)s", datefmt="%Y-%m-%d %H:%M:%S") # File logger try: file_handler = logging.FileHandler('/tmp/cnchi.log', mode='w') file_handler.setLevel(log_level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) except PermissionError as permission_error: print("Can't open /tmp/cnchi.log : ", permission_error) # Stdout logger if cmd_line.verbose: # Show log messages to stdout stream_handler = logging.StreamHandler() stream_handler.setLevel(log_level) stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) if cmd_line.log_server: log_server = cmd_line.log_server if log_server == 'bugsnag': if not BUGSNAG_ERROR: # Bugsnag logger bugsnag_api = context_filter.api_key if bugsnag_api is not None: bugsnag.configure(api_key=bugsnag_api, app_version=info.CNCHI_VERSION, project_root='/usr/share/cnchi/cnchi', release_stage=info.CNCHI_RELEASE_STAGE) bugsnag_handler = BugsnagHandler(api_key=bugsnag_api) bugsnag_handler.setLevel(logging.WARNING) bugsnag_handler.setFormatter(formatter) bugsnag_handler.addFilter(context_filter.filter) bugsnag.before_notify( context_filter.bugsnag_before_notify_callback) logger.addHandler(bugsnag_handler) logging.info( "Sending Cnchi log messages to bugsnag server (using python-bugsnag)." ) else: logging.warning( "Cannot read the bugsnag api key, logging to bugsnag is not possible." ) else: logging.warning(BUGSNAG_ERROR) else: # Socket logger socket_handler = logging.handlers.SocketHandler( log_server, logging.handlers.DEFAULT_TCP_LOGGING_PORT) socket_formatter = logging.Formatter(formatter) socket_handler.setFormatter(socket_formatter) logger.addHandler(socket_handler) # Also add uuid filter to requests logs logger_requests = logging.getLogger( "requests.packages.urllib3.connectionpool") logger_requests.addFilter(context_filter.filter) uid = str(uuid.uuid1()).split("-") myuid = uid[3] + "-" + uid[1] + "-" + uid[2] + "-" + uid[4] logging.info("Sending Cnchi logs to {0} with id '{1}'".format( log_server, myuid))
def setup_logging(): """ Configure our logger """ logger = logging.getLogger() logger.handlers = [] if cmd_line.debug: log_level = logging.DEBUG else: log_level = logging.INFO logger.setLevel(log_level) context_filter = ContextFilter() logger.addFilter(context_filter.filter) # Log format formatter = logging.Formatter( fmt="%(asctime)s [%(levelname)s] %(filename)s(%(lineno)d) %(funcName)s(): %(message)s", datefmt="%Y-%m-%d %H:%M:%S", ) # File logger try: file_handler = logging.FileHandler("/tmp/DSGos_Installer.log", mode="w") file_handler.setLevel(log_level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) except PermissionError as permission_error: print("Can't open /tmp/DSGos_Installer.log : ", permission_error) # Stdout logger if cmd_line.verbose: # Show log messages to stdout stream_handler = logging.StreamHandler() stream_handler.setLevel(log_level) stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) if cmd_line.log_server: log_server = cmd_line.log_server if log_server == "bugsnag": if not BUGSNAG_ERROR: # Bugsnag logger bugsnag_api = context_filter.api_key if bugsnag_api is not None: bugsnag.configure( api_key=bugsnag_api, app_version=info.DSGos_Installer_VERSION, project_root="/usr/share/DSGos-Installer", release_stage=info.DSGos_Installer_RELEASE_STAGE, ) bugsnag_handler = BugsnagHandler(api_key=bugsnag_api) bugsnag_handler.setLevel(logging.WARNING) bugsnag_handler.setFormatter(formatter) bugsnag_handler.addFilter(context_filter.filter) bugsnag.before_notify(context_filter.bugsnag_before_notify_callback) logger.addHandler(bugsnag_handler) logging.info("Sending DSGos_Installer log messages to bugsnag server (using python-bugsnag).") else: logging.warning("Cannot read the bugsnag api key, logging to bugsnag is not possible.") else: logging.warning(BUGSNAG_ERROR) else: # Socket logger socket_handler = logging.handlers.SocketHandler(log_server, logging.handlers.DEFAULT_TCP_LOGGING_PORT) socket_formatter = logging.Formatter(formatter) socket_handler.setFormatter(socket_formatter) logger.addHandler(socket_handler) # Also add uuid filter to requests logs logger_requests = logging.getLogger("requests.packages.urllib3.connectionpool") logger_requests.addFilter(context_filter.filter) uid = str(uuid.uuid1()).split("-") myuid = uid[3] + "-" + uid[1] + "-" + uid[2] + "-" + uid[4] logging.info("Sending DSGos_Installer logs to {0} with id '{1}'".format(log_server, myuid))
def _setup_logging(self): """ Sets up logging to log files, status and error properties, desktop notifications, the systemd journal if available, bugsnag if error reports are enabled, and to stdout if requested. """ log_level = self._conf.get('app', 'log_level') mdbx_logger = logging.getLogger('maestral') mdbx_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt='%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') log_fmt_short = logging.Formatter(fmt='%(message)s') # log to file rfh_log_file = get_log_path('maestral', self._config_name + '.log') self.log_handler_file = logging.handlers.RotatingFileHandler( rfh_log_file, maxBytes=10**7, backupCount=1) self.log_handler_file.setFormatter(log_fmt_long) self.log_handler_file.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_file) # log to journal when launched from systemd if INVOCATION_ID and journal: self.log_handler_journal = journal.JournalHandler() self.log_handler_journal.setFormatter(log_fmt_short) self.log_handler_journal.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_journal) else: self.log_handler_journal = None # send systemd notifications when started as 'notify' daemon if NOTIFY_SOCKET: self.log_handler_sd = SdNotificationHandler() self.log_handler_sd.setFormatter(log_fmt_short) self.log_handler_sd.setLevel(logging.INFO) mdbx_logger.addHandler(self.log_handler_sd) else: self.log_handler_sd = None # log to stdout (disabled by default) level = log_level if self._log_to_stdout else 100 self.log_handler_stream = logging.StreamHandler(sys.stdout) self.log_handler_stream.setFormatter(log_fmt_long) self.log_handler_stream.setLevel(level) mdbx_logger.addHandler(self.log_handler_stream) # log to cached handlers for GUI and CLI self._log_handler_info_cache = CachedHandler(maxlen=1) self._log_handler_info_cache.setFormatter(log_fmt_short) self._log_handler_info_cache.setLevel(logging.INFO) mdbx_logger.addHandler(self._log_handler_info_cache) self._log_handler_error_cache = CachedHandler() self._log_handler_error_cache.setFormatter(log_fmt_short) self._log_handler_error_cache.setLevel(logging.ERROR) mdbx_logger.addHandler(self._log_handler_error_cache) # log to desktop notifications # 'file changed' events will be collated and sent as desktop # notifications by the monitor directly, we don't handle them here self.desktop_notifier = MaestralDesktopNotifier.for_config( self.config_name) self.desktop_notifier.setLevel(logging.WARNING) mdbx_logger.addHandler(self.desktop_notifier) # log to bugsnag (disabled by default) self._log_handler_bugsnag = BugsnagHandler() self._log_handler_bugsnag.setLevel(100) mdbx_logger.addHandler(self._log_handler_bugsnag) self.analytics = self._conf.get('app', 'analytics')
class Maestral(object): """An open source Dropbox client for macOS and Linux. All methods and properties return objects or raise exceptions which can safely be serialized, i.e., pure Python types. The only exception are MaestralApiErrors which need to be registered explicitly with the serpent serializer used by Pyro5 in order to be transmitted to a frontend. :param str config_name: Name of maestral configuration to run. This will create a new configuration file if none exists. :param bool run: If ``True``, Maestral will start syncing immediately. Defaults to ``True``. """ def __init__(self, config_name='maestral', run=True, log_to_stdout=False): self._daemon_running = True self._log_to_stdout = log_to_stdout self._config_name = config_name self._conf = MaestralConfig(self._config_name) self._state = MaestralState(self._config_name) self._setup_logging() self.client = MaestralApiClient(self._config_name) self.monitor = MaestralMonitor(self.client) self.sync = self.monitor.sync # periodically check for updates and refresh account info self.update_thread = Thread( name='maestral-update-check', target=self._periodic_refresh, daemon=True, ) self.update_thread.start() if run: self.run() def run(self): """ Runs setup if necessary, starts syncing, and starts systemd notifications if run as a systemd notify service. """ if self.pending_dropbox_folder: self.monitor.reset_sync_state() self.create_dropbox_directory() # start syncing self.start_sync() if NOTIFY_SOCKET: # notify systemd that we have started logger.debug('Running as systemd notify service') logger.debug('NOTIFY_SOCKET = %s', NOTIFY_SOCKET) sd_notifier.notify('READY=1') if IS_WATCHDOG: # notify systemd periodically if alive logger.debug('Running as systemd watchdog service') logger.debug('WATCHDOG_USEC = %s', WATCHDOG_USEC) logger.debug('WATCHDOG_PID = %s', WATCHDOG_PID) self.watchdog_thread = Thread( name='maestral-watchdog', target=self._periodic_watchdog, daemon=True, ) self.watchdog_thread.start() def _setup_logging(self): """ Sets up logging to log files, status and error properties, desktop notifications, the systemd journal if available, bugsnag if error reports are enabled, and to stdout if requested. """ log_level = self._conf.get('app', 'log_level') mdbx_logger = logging.getLogger('maestral') mdbx_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt='%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') log_fmt_short = logging.Formatter(fmt='%(message)s') # log to file rfh_log_file = get_log_path('maestral', self._config_name + '.log') self.log_handler_file = logging.handlers.RotatingFileHandler( rfh_log_file, maxBytes=10**7, backupCount=1) self.log_handler_file.setFormatter(log_fmt_long) self.log_handler_file.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_file) # log to journal when launched from systemd if INVOCATION_ID and journal: self.log_handler_journal = journal.JournalHandler() self.log_handler_journal.setFormatter(log_fmt_short) self.log_handler_journal.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_journal) else: self.log_handler_journal = None # send systemd notifications when started as 'notify' daemon if NOTIFY_SOCKET: self.log_handler_sd = SdNotificationHandler() self.log_handler_sd.setFormatter(log_fmt_short) self.log_handler_sd.setLevel(logging.INFO) mdbx_logger.addHandler(self.log_handler_sd) else: self.log_handler_sd = None # log to stdout (disabled by default) level = log_level if self._log_to_stdout else 100 self.log_handler_stream = logging.StreamHandler(sys.stdout) self.log_handler_stream.setFormatter(log_fmt_long) self.log_handler_stream.setLevel(level) mdbx_logger.addHandler(self.log_handler_stream) # log to cached handlers for GUI and CLI self._log_handler_info_cache = CachedHandler(maxlen=1) self._log_handler_info_cache.setFormatter(log_fmt_short) self._log_handler_info_cache.setLevel(logging.INFO) mdbx_logger.addHandler(self._log_handler_info_cache) self._log_handler_error_cache = CachedHandler() self._log_handler_error_cache.setFormatter(log_fmt_short) self._log_handler_error_cache.setLevel(logging.ERROR) mdbx_logger.addHandler(self._log_handler_error_cache) # log to desktop notifications # 'file changed' events will be collated and sent as desktop # notifications by the monitor directly, we don't handle them here self.desktop_notifier = MaestralDesktopNotifier.for_config( self.config_name) self.desktop_notifier.setLevel(logging.WARNING) mdbx_logger.addHandler(self.desktop_notifier) # log to bugsnag (disabled by default) self._log_handler_bugsnag = BugsnagHandler() self._log_handler_bugsnag.setLevel(100) mdbx_logger.addHandler(self._log_handler_bugsnag) self.analytics = self._conf.get('app', 'analytics') # ==== methods to access config and saved state ====================================== @property def config_name(self): """The selected configuration.""" return self._config_name def set_conf(self, section, name, value): """Sets a configuration option.""" self._conf.set(section, name, value) def get_conf(self, section, name): """Gets a configuration option.""" return self._conf.get(section, name) def set_state(self, section, name, value): """Sets a state value.""" self._state.set(section, name, value) def get_state(self, section, name): """Gets a state value.""" return self._state.get(section, name) # ==== getters / setters for config with side effects ================================ @property def dropbox_path(self): """Returns the path to the local Dropbox directory. Read only. Use :meth:`create_dropbox_directory` or :meth:`move_dropbox_directory` to set or change the Dropbox directory location instead. """ return self.sync.dropbox_path @property def excluded_items(self): """ Returns a list of excluded folders (read only). Use :meth:`exclude_item`, :meth:`include_item` or :meth:`set_excluded_items` to change which items are excluded from syncing. """ return self.sync.excluded_items @property def log_level(self): """Log level for log files, the stream handler and the systemd journal.""" return self._conf.get('app', 'log_level') @log_level.setter def log_level(self, level_num): """Setter: Log level for log files, the stream handler and the systemd journal.""" self.log_handler_file.setLevel(level_num) if self.log_handler_journal: self.log_handler_journal.setLevel(level_num) if self.log_to_stdout: self.log_handler_stream.setLevel(level_num) self._conf.set('app', 'log_level', level_num) @property def log_to_stdout(self): return self._log_to_stdout @log_to_stdout.setter def log_to_stdout(self, enabled=True): """Enables or disables logging to stdout.""" self._log_to_stdout = enabled level = self.log_level if enabled else 100 self.log_handler_stream.setLevel(level) @property def analytics(self): """Enables or disables logging of errors to bugsnag.""" return self._conf.get('app', 'analytics') @analytics.setter def analytics(self, enabled): """Setter: Enables or disables logging of errors to bugsnag.""" bugsnag.configuration.auto_notify = enabled bugsnag.configuration.auto_capture_sessions = enabled self._log_handler_bugsnag.setLevel(logging.ERROR if enabled else 100) self._conf.set('app', 'analytics', enabled) @property def notification_snooze(self): """Snoozed time for desktop notifications in minutes.""" return self.desktop_notifier.snoozed @notification_snooze.setter def notification_snooze(self, minutes): """Setter: Snoozed time for desktop notifications in minutes.""" self.desktop_notifier.snoozed = minutes @property def notification_level(self): """Level for desktop notifications.""" return self.desktop_notifier.notify_level @notification_level.setter def notification_level(self, level): """Setter: Level for desktop notifications.""" self.desktop_notifier.notify_level = level # ==== state information ============================================================ @property def pending_dropbox_folder(self): """Bool indicating if a local Dropbox directory has been created.""" return not osp.isdir(self._conf.get('main', 'path')) @property def pending_first_download(self): """Bool indicating if the initial download has already occurred.""" return (self._state.get('sync', 'lastsync') == 0 or self._state.get('sync', 'cursor') == '') @property def syncing(self): """ Bool indicating if Maestral is syncing. It will be ``True`` if syncing is not paused by the user *and* Maestral is connected to the internet. """ return self.monitor.syncing.is_set() or self.monitor.startup.is_set() @property def paused(self): """Bool indicating if syncing is paused by the user. This is set by calling :meth:`pause`.""" return self.monitor.paused_by_user.is_set( ) and not self.sync.lock.locked() @property def stopped(self): """Bool indicating if syncing is stopped, for instance because of an exception.""" return not self.monitor.running.is_set() and not self.sync.lock.locked( ) @property def connected(self): """Bool indicating if Dropbox servers can be reached.""" return self.monitor.connected.is_set() @property def status(self): """ Returns a string with the last status message. This can be displayed as information to the user but should not be relied on otherwise. """ return self._log_handler_info_cache.getLastMessage() @property def sync_errors(self): """Returns list containing the current sync errors as dicts.""" sync_errors = list(self.sync.sync_errors.queue) sync_errors_dicts = [error_to_dict(e) for e in sync_errors] return sync_errors_dicts @property def maestral_errors(self): """ Returns a list of Maestral's errors as dicts. This does not include lost internet connections or file sync errors which only emit warnings and are tracked and cleared separately. Errors listed here must be acted upon for Maestral to continue syncing. """ maestral_errors = [ r.exc_info[1] for r in self._log_handler_error_cache.cached_records ] maestral_errors_dicts = [error_to_dict(e) for e in maestral_errors] return maestral_errors_dicts def clear_maestral_errors(self): """ Manually clears all Maestral errors. This should be used after they have been resolved by the user through the GUI or CLI. """ self._log_handler_error_cache.clear() @property def account_profile_pic_path(self): """ Returns the path of the current account's profile picture. There may not be an actual file at that path, if the user did not set a profile picture or the picture has not yet been downloaded. """ return get_cache_path('maestral', self._config_name + '_profile_pic.jpeg') def get_file_status(self, local_path): """ Returns the sync status of an individual file. :param str local_path: Path to file on the local drive. May be relative to the current working directory. :returns: String indicating the sync status. Can be 'uploading', 'downloading', 'up to date', 'error', or 'unwatched' (for files outside of the Dropbox directory). :rtype: str """ if not self.syncing: return FileStatus.Unwatched.value local_path = osp.abspath(local_path) try: dbx_path = self.sync.to_dbx_path(local_path) except ValueError: return FileStatus.Unwatched.value if local_path in self.monitor.queued_for_upload: return FileStatus.Uploading.value elif local_path in self.monitor.queued_for_download: return FileStatus.Downloading.value elif any(local_path == err['local_path'] for err in self.sync_errors): return FileStatus.Error.value elif self.sync.get_local_rev(dbx_path): return FileStatus.Synced.value else: return FileStatus.Unwatched.value def get_activity(self): """ Gets current upload / download activity. :returns: A dictionary with lists of all files currently queued for or being uploaded or downloaded. Paths are given relative to the Dropbox folder. :rtype: dict(list, list) """ PathItem = namedtuple('PathItem', 'path status') uploading = [] downloading = [] for path in self.monitor.uploading: path.lstrip(self.dropbox_path) uploading.append(PathItem(path, 'uploading')) for path in self.monitor.queued_for_upload: path.lstrip(self.dropbox_path) uploading.append(PathItem(path, 'queued')) for path in self.monitor.downloading: path.lstrip(self.dropbox_path) downloading.append(PathItem(path, 'downloading')) for path in self.monitor.queued_for_download: path.lstrip(self.dropbox_path) downloading.append(PathItem(path, 'queued')) return dict(uploading=uploading, downloading=downloading) @handle_disconnect def get_account_info(self): """ Gets account information from Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] :raises: :class:`MaestralApiError` """ res = self.client.get_account_info() return dropbox_stone_to_dict(res) @handle_disconnect def get_space_usage(self): """ Gets the space usage stored by Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] :raises: :class:`MaestralApiError` """ res = self.client.get_space_usage() return dropbox_stone_to_dict(res) # ==== control methods for front ends ================================================ @handle_disconnect def get_profile_pic(self): """ Attempts to download the user's profile picture from Dropbox. The picture saved in Maestral's cache directory for retrieval when there is no internet connection. This function will fail silently in case of :class:`MaestralApiError`s. :returns: Path to saved profile picture or None if no profile picture is set. """ try: res = self.client.get_account_info() except MaestralApiError: pass else: if res.profile_photo_url: # download current profile pic res = requests.get(res.profile_photo_url) with open(self.account_profile_pic_path, 'wb') as f: f.write(res.content) return self.account_profile_pic_path else: # delete current profile pic self._delete_old_profile_pics() @handle_disconnect def list_folder(self, dbx_path, **kwargs): """ List all items inside the folder given by :param:`dbx_path`. :param dbx_path: Path to folder on Dropbox. :returns: List of Dropbox item metadata as dicts or ``False`` if listing failed due to connection issues. :rtype: list[dict] :raises: :class:`MaestralApiError` """ res = self.client.list_folder(dbx_path, **kwargs) entries = [dropbox_stone_to_dict(e) for e in res.entries] return entries def _delete_old_profile_pics(self): # delete all old pictures for file in os.listdir(get_cache_path('maestral')): if file.startswith(self._config_name + '_profile_pic'): try: os.unlink(osp.join(get_cache_path('maestral'), file)) except OSError: pass def rebuild_index(self): """ Rebuilds the rev file by comparing remote with local files and updating rev numbers from the Dropbox server. Files are compared by their content hashes and conflicting copies are created if the contents differ. File changes during the rebuild process will be queued and uploaded once rebuilding has completed. Rebuilding will be performed asynchronously. :raises: :class:`MaestralApiError` """ self.monitor.rebuild_index() def start_sync(self): """ Creates syncing threads and starts syncing. """ self.monitor.start() def resume_sync(self): """ Resumes the syncing threads if paused. """ self.monitor.resume() def pause_sync(self): """ Pauses the syncing threads if running. """ self.monitor.pause() def stop_sync(self): """ Stops the syncing threads if running, destroys observer thread. """ self.monitor.stop() def reset_sync_state(self): """ Resets the sync index and state. Only call this to clean up leftover state information if a Dropbox was improperly unlinked (e.g., auth token has been manually deleted). Otherwise leave state management to Maestral. """ self.monitor.reset_sync_state() def unlink(self): """ Unlinks the configured Dropbox account but leaves all downloaded files in place. All syncing metadata will be removed as well. Connection and API errors will be handled silently but the Dropbox access key will always be removed from the user's PC. """ self.stop_sync() try: self.client.unlink() except (ConnectionError, MaestralApiError): pass try: os.remove(self.sync.rev_file_path) except OSError: pass self.sync.clear_rev_index() delete(self.sync.rev_file_path) self._conf.cleanup() self._state.cleanup() logger.info('Unlinked Dropbox account.') def exclude_item(self, dbx_path): """ Excludes file or folder from sync and deletes it locally. It is safe to call this method with items which have already been excluded. :param str dbx_path: Dropbox path of item to exclude. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ # input validation md = self.client.get_metadata(dbx_path) if not md: raise ValueError(f'"{dbx_path}" does not exist on Dropbox') dbx_path = dbx_path.lower().rstrip(osp.sep) # add the path to excluded list if self.sync.is_excluded_by_user(dbx_path): logger.info('%s was already excluded', dbx_path) logger.info(IDLE) return excluded_items = self.sync.excluded_items excluded_items.append(dbx_path) self.sync.excluded_items = excluded_items logger.info('Excluded %s', dbx_path) self._remove_after_excluded(dbx_path) logger.info(IDLE) def _remove_after_excluded(self, dbx_path): # book keeping self.sync.clear_sync_error(dbx_path=dbx_path) self.sync.set_local_rev(dbx_path, None) # remove folder from local drive local_path = self.sync.to_local_path(dbx_path) # dbx_path will be lower-case, we there explicitly run `to_cased_path` local_path = to_cased_path(local_path) if local_path: with self.monitor.fs_event_handler.ignore( local_path, recursive=osp.isdir(local_path), event_types=(EVENT_TYPE_DELETED, )): delete(local_path) def include_item(self, dbx_path): """ Includes file or folder in sync and downloads in the background. It is safe to call this method with items which have already been included, they will not be downloaded again. :param str dbx_path: Dropbox path of item to include. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox or lies inside another excluded folder. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ # input validation md = self.client.get_metadata(dbx_path) if not md: raise ValueError(f'"{dbx_path}" does not exist on Dropbox') dbx_path = dbx_path.lower().rstrip(osp.sep) old_excluded_items = self.sync.excluded_items for folder in old_excluded_items: if is_child(dbx_path, folder): raise ValueError( f'"{dbx_path}" lies inside the excluded folder ' f'"{folder}". Please include "{folder}" first.') # Get items which will need to be downloaded, do not attempt to download # children of `dbx_path` which were already included. # `new_included_items` will either be empty (`dbx_path` was already # included), just contain `dbx_path` itself (the item was fully excluded) or # only contain children of `dbx_path` (`dbx_path` was partially included). new_included_items = tuple(x for x in old_excluded_items if x == dbx_path or is_child(x, dbx_path)) if new_included_items: # remove `dbx_path` or all excluded children from the excluded list excluded_items = list( set(old_excluded_items) - set(new_included_items)) else: logger.info('%s was already included', dbx_path) return self.sync.excluded_items = excluded_items logger.info('Included %s', dbx_path) # download items from Dropbox for folder in new_included_items: self.sync.queued_newly_included_downloads.put(folder) @handle_disconnect def set_excluded_items(self, items=None): """ Sets the list of excluded files or folders. If not given, gets all top level folder paths from Dropbox and asks user to include or exclude. Items which are no in ``items`` but were previously excluded will be downloaded. On initial sync, this does not trigger any downloads. :param list items: If given, list of excluded files or folders to set. :raises: :class:`MaestralApiError` """ if items is None: excluded_items = [] # get all top-level Dropbox folders result = self.client.list_folder('/', recursive=False) # paginate through top-level folders, ask to exclude for entry in result.entries: if isinstance(entry, files.FolderMetadata): yes = click.confirm( f'Exclude "{entry.path_display}" from sync?', prompt_suffix='') if yes: excluded_items.append(entry.path_lower) else: excluded_items = self.sync.clean_excluded_items_list(items) old_excluded_items = self.sync.excluded_items added_excluded_items = set(excluded_items) - set(old_excluded_items) added_included_items = set(old_excluded_items) - set(excluded_items) self.sync.excluded_items = excluded_items if not self.pending_first_download: # apply changes for path in added_excluded_items: logger.info('Excluded %s', path) self._remove_after_excluded(path) for path in added_included_items: if not self.sync.is_excluded_by_user(path): logger.info('Included %s', path) self.sync.queued_newly_included_downloads.put(path) logger.info(IDLE) def excluded_status(self, dbx_path): """ Returns 'excluded', 'partially excluded' or 'included'. This function will not check if the item actually exists on Dropbox. :param str dbx_path: Path to item on Dropbox. :returns: Excluded status. :rtype: str """ dbx_path = dbx_path.lower().rstrip(osp.sep) excluded_items = self._conf.get('main', 'excluded_items') if dbx_path in excluded_items: return 'excluded' elif any(is_child(f, dbx_path) for f in excluded_items): return 'partially excluded' else: return 'included' @with_sync_paused def move_dropbox_directory(self, new_path=None): """ Sets the local Dropbox directory. This moves all local files to the new location and resumes syncing afterwards. :param str new_path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :raises: ``OSError`` if moving the directory fails. """ # get old and new paths old_path = self.sync.dropbox_path new_path = new_path or select_dbx_path_dialog(self._config_name) try: if osp.samefile(old_path, new_path): return except FileNotFoundError: pass if osp.exists(new_path): raise FileExistsError(f'Path "{new_path}" already exists.') # move folder from old location or create a new one if no old folder exists if osp.isdir(old_path): shutil.move(old_path, new_path) else: os.makedirs(new_path) # update config file and client self.sync.dropbox_path = new_path @with_sync_paused def create_dropbox_directory(self, path=None): """ Creates a new Dropbox directory. Only call this during setup. :param str path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :raises: ``OSError`` if creation fails """ path = path or select_dbx_path_dialog(self._config_name, allow_merge=True) # create new folder os.makedirs(path, exist_ok=True) # update config file and client self.sync.dropbox_path = path # ==== utility methods for front ends ================================================ def to_local_path(self, dbx_path): """ Converts a path relative to the Dropbox folder to a correctly cased local file system path. :param str dbx_path: Path relative to Dropbox root. :returns: Corresponding path of a location in the local Dropbox folder. :rtype: str """ return self.sync.to_local_path(dbx_path) @staticmethod def check_for_updates(): """ Checks if an update is available. :returns: A dictionary with information about the latest release with the fields ``update_available`` (bool), ``latest_release`` (str), ``release_notes`` (str) and ``error`` (str or None). :rtype: dict """ return check_update_available() def shutdown_pyro_daemon(self): """ Sets the ``_daemon_running`` flag to ``False``. This will be checked by Pyro5 periodically to shut down the daemon when requested. """ self._daemon_running = False if NOTIFY_SOCKET: # notify systemd that we are shutting down sd_notifier.notify('STOPPING=1') # ==== private methods =============================================================== def _loop_condition(self): return self._daemon_running def _periodic_refresh(self): while True: # update account info self.get_account_info() self.get_space_usage() self.get_profile_pic() # check for maestral updates res = self.check_for_updates() if not res['error']: self._state.set('app', 'latest_release', res['latest_release']) time.sleep(60 * 60) # 60 min def _periodic_watchdog(self): while self.monitor._threads_alive(): sd_notifier.notify('WATCHDOG=1') time.sleep(int(WATCHDOG_USEC) / (2 * 10**6)) def __del__(self): try: self.monitor.stop() except Exception: pass def __repr__(self): email = self._state.get('account', 'email') account_type = self._state.get('account', 'type') return f'<{self.__class__.__name__}({email}, {account_type})>'
def log_handler(self, extra_fields=None): return BugsnagHandler(client=self, extra_fields=extra_fields)
from integrations.music_service_fetchers.spotify_fetcher import SpotifyFetcher from integrations.models import Release from django.core.mail import send_mail import datetime import os import requests import bugsnag import logging from bugsnag.handlers import BugsnagHandler bugsnag.configure( api_key=os.environ.get('BUGSNAG_API_KEY'), project_root='../../', ) logger = logging.getLogger('test.logger') handler = BugsnagHandler() handler.setLevel(logging.ERROR) logger.addHandler(handler) class Command(BaseCommand): help = 'Fetches latest releases' def handle(self, *args, **options): users = User.objects.all() for user in users: self.stdout.write( self.style.SUCCESS(f'starting fetching for {user.email}')) integration = None
# Any param key that contains one of these strings will be filtered out of all error reports. params_filters=["credit_card_number", "password", "ssn"], # We mark stacktrace lines as inProject if they come from files inside root: # project_root = "/path/to/your/app", # Useful if you are wrapping bugsnag.notify() in with your own library, to ensure errors group properly. # traceback_exclude_module = [myapp.custom_logging], ) # Create a logger logger = logging.getLogger("test.logger") # Create a Bugsnag handler. # Optionally, add 'extra_fields' which will attach meta_data to every Bugsnag report. The values should be attributes to pull off each log record. handler = BugsnagHandler(extra_fields={"logger": ["__repr__"]}) # Define which level of log you want to report to Bugsnag. Here, warning & above. handler.setLevel(logging.WARNING) # Attach the Bugsnag handler to your logger. logger.addHandler(handler) # You can define a callback function which, when attached to your Bugsnag client, will run right before each and every report is sent to the api. Here you can evaluate and modify the report data. def callback(notification): """This callback will evaluate and modify every exception report, handled and unhandled, that occurs within the app, right before it is sent to Bugsnag. """ # adding user info and metadata to every report: notification.user = { # in your app, you can pull these details from session.
type(e), e.args, e)) bugsnag.configure( api_key="e8f8e38698ff222ae1a9060aa443b75e", # Note: This is Jurek's private BugSnag account. # We might need to upgrade it later. project_root=current_dir, app_version=git_tag, ignore_classes=["django.http.Http404"], release_stage="development", # ! Note, asynchronous=False might be needed for this to work from inside Matlab. Enable otherwise to improve load performance. asynchronous=True) logger = logging.getLogger("basic") logger.setLevel(logging.INFO) logger.addHandler(BugsnagHandler()) login = None hostname = None node = None if enable_hostname_reporting: login = os.getlogin() hostname = socket.gethostname() node = platform.node() bugsnag.notify(Exception("I have been imported"), context="bugcatcher.start", meta_data={ "startup_data": { "login": login, "hostname": hostname,
# papertrail = logging.getLogger() # papertrail.addHandler(syslog) # papertrail.setLevel(logging.DEBUG) # papertrail.info("This is a message") #################################################################################################### # BUGSNAG LOGGING #################################################################################################### bugsnag.configure(api_key=os.environ.get('BUGSNAG_API_KEY'), project_root=os.environ.get('BUGSNAG_PROJECT_ROOT'), release_stage=os.environ.get('SERVER_ENV'), use_ssl=True) bslogger = logging.getLogger("test.logger") handler = BugsnagHandler() # send only WARN-level logs and above handler.setLevel(logging.WARN) bslogger.addHandler(handler) #################################################################################################### # CONSOLE LOGGING #################################################################################################### class Logger(object): def __init__(self): self.spinning = False self.terminal = sys.stdout self.writing_progress = False
def log_handler(self, extra_fields: List[str] = None) -> BugsnagHandler: return BugsnagHandler(client=self, extra_fields=extra_fields)
params_filters = ["credit_card_number", "password", "ssn"], # We mark stacktrace lines as inProject if they come from files inside root: # project_root = "/path/to/your/app", # Useful if you are wrapping bugsnag.notify() in with your own library, to ensure errors group properly. # traceback_exclude_module = [myapp.custom_logging], ) # Create a logger logger = logging.getLogger("test.logger") # Create a Bugsnag handler. # Optionally, add 'extra_fields' which will attach meta_data to every Bugsnag report. The values should be attributes to pull off each log record. handler = BugsnagHandler(extra_fields={"logger":["__repr__"]}) # Define which level of log you want to report to Bugsnag. Here, warning & above. handler.setLevel(logging.WARNING) # Attach the Bugsnag handler to your logger. logger.addHandler(handler) # You can define a callback function which, when attached to your Bugsnag client, will run right before each and every report is sent to the api. Here you can evaluate and modify the report data. def callback(notification): """This callback will evaluate and modify every exception report, handled and unhandled, that occurs within the app, right before it is sent to Bugsnag. """ # adding user info and metadata to every report: notification.user = { # in your app, you can pull these details from session.
class Maestral(object): """ An open source Dropbox client for macOS and Linux to syncing a local folder with your Dropbox account. All functions and properties return objects or raise exceptions which can safely serialized, i.e., pure Python types. The only exception are MaestralApiErrors which have been registered explicitly with the Pyro5 serializer. """ _daemon_running = True # for integration with Pyro def __init__(self, config_name='maestral', run=True): self._config_name = config_name self._conf = MaestralConfig(self._config_name) self._setup_logging() self.set_share_error_reports(self._conf.get("app", "analytics")) self.client = MaestralApiClient(config_name=self._config_name) self.monitor = MaestralMonitor(self.client, config_name=self._config_name) self.sync = self.monitor.sync # periodically check for updates and refresh account info self.update_thread = Thread( name="Maestral update check", target=self._periodic_refresh, daemon=True, ) self.update_thread.start() if run: self.run() def run(self): if self.pending_dropbox_folder(self._config_name): self.create_dropbox_directory() self.set_excluded_folders() self.sync.last_cursor = "" self.sync.last_sync = 0 # start syncing self.start_sync() if NOTIFY_SOCKET and system_notifier: # notify systemd that we have started logger.debug("Running as systemd notify service") logger.debug(f"NOTIFY_SOCKET = {NOTIFY_SOCKET}") system_notifier.notify("READY=1") if IS_WATCHDOG and system_notifier: # notify systemd periodically if alive logger.debug("Running as systemd watchdog service") logger.debug(f"WATCHDOG_USEC = {WATCHDOG_USEC}") logger.debug(f"WATCHDOG_PID = {WATCHDOG_PID}") self.watchdog_thread = Thread( name="Maestral watchdog", target=self._periodic_watchdog, daemon=True, ) self.watchdog_thread.start() def _setup_logging(self): log_level = self._conf.get("app", "log_level") mdbx_logger = logging.getLogger("maestral") mdbx_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt="%(asctime)s %(name)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S") log_fmt_short = logging.Formatter(fmt="%(message)s") # log to file rfh_log_file = get_log_path("maestral", self._config_name + ".log") self._log_handler_file = logging.handlers.RotatingFileHandler( rfh_log_file, maxBytes=10**7, backupCount=1) self._log_handler_file.setFormatter(log_fmt_long) self._log_handler_file.setLevel(log_level) mdbx_logger.addHandler(self._log_handler_file) # log to journal when launched from systemd if INVOCATION_ID and journal: self._log_handler_journal = journal.JournalHandler() self._log_handler_journal.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_journal) # log to stdout (disabled by default) self._log_handler_stream = logging.StreamHandler(sys.stdout) self._log_handler_stream.setFormatter(log_fmt_long) self._log_handler_stream.setLevel(100) mdbx_logger.addHandler(self._log_handler_stream) # log to cached handlers for GUI and CLI self._log_handler_info_cache = CachedHandler(maxlen=1) self._log_handler_info_cache.setLevel(logging.INFO) self._log_handler_info_cache.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_info_cache) self._log_handler_error_cache = CachedHandler() self._log_handler_error_cache.setLevel(logging.ERROR) self._log_handler_error_cache.setFormatter(log_fmt_short) mdbx_logger.addHandler(self._log_handler_error_cache) # log to bugsnag (disabled by default) self._log_handler_bugsnag = BugsnagHandler() self._log_handler_bugsnag.setLevel(100) mdbx_logger.addHandler(self._log_handler_bugsnag) @property def config_name(self): return self._config_name def set_conf(self, section, name, value): self._conf.set(section, name, value) def get_conf(self, section, name): return self._conf.get(section, name) def set_log_level(self, level_num): self._log_handler_file.setLevel(level_num) self._log_handler_stream.setLevel(level_num) self._conf.set("app", "log_level", level_num) def set_log_to_stdout(self, enabled=True): if enabled: log_level = self._conf.get("app", "log_level") self._log_handler_stream.setLevel(log_level) else: self._log_handler_stream.setLevel(100) def set_share_error_reports(self, enabled): bugsnag.configuration.auto_notify = enabled bugsnag.configuration.auto_capture_sessions = enabled self._log_handler_bugsnag.setLevel(logging.ERROR if enabled else 100) self._conf.set("app", "analytics", enabled) @staticmethod def pending_link(config_name): """ Bool indicating if auth tokens are stored in the system's keychain. This may raise a KeyringLocked exception if the user's keychain cannot be accessed. This exception will not be deserialized by Pyro5. You should check if Maestral is linked before instantiating a daemon. :param str config_name: Name of user config to check. :raises: :class:`keyring.errors.KeyringLocked` """ auth_session = OAuth2Session(config_name) return auth_session.load_token() is None @staticmethod def pending_dropbox_folder(config_name): """ Bool indicating if a local Dropbox directory has been set. :param str config_name: Name of user config to check. """ conf = MaestralConfig(config_name) return not osp.isdir(conf.get("main", "path")) def pending_first_download(self): """Bool indicating if the initial download has already occurred.""" return (self._conf.get("internal", "lastsync") == 0 or self._conf.get("internal", "cursor") == "") @property def syncing(self): """Bool indicating if Maestral is syncing. It will be ``True`` if syncing is not paused by the user *and* Maestral is connected to the internet.""" return self.monitor.syncing.is_set() @property def paused(self): """Bool indicating if syncing is paused by the user. This is set by calling :meth:`pause`.""" return not self.monitor._auto_resume_on_connect @property def stopped(self): """Bool indicating if syncing is stopped, for instance because of an exception.""" return not self.monitor.running.is_set() @property def connected(self): """Bool indicating if Dropbox servers can be reached.""" return self.monitor.connected.is_set() @property def status(self): """Returns a string with the last status message. This can be displayed as information to the user but should not be relied on otherwise.""" return self._log_handler_info_cache.getLastMessage() @property def notify(self): """Bool indicating if notifications are enabled or disabled.""" return self.sync.notify.enabled @notify.setter def notify(self, boolean): """Setter: Bool indicating if notifications are enabled.""" self.sync.notify.enabled = boolean @property def dropbox_path(self): """Returns the path to the local Dropbox directory. Read only. Use :meth:`create_dropbox_directory` or :meth:`move_dropbox_directory` to set or change the Dropbox directory location instead. """ return self.sync.dropbox_path @property def excluded_folders(self): """Returns a list of excluded folders (read only). Use :meth:`exclude_folder`, :meth:`include_folder` or :meth:`set_excluded_folders` change which folders are excluded from syncing.""" return self.sync.excluded_folders @property def sync_errors(self): """Returns list containing the current sync errors as dicts.""" sync_errors = list(self.sync.sync_errors.queue) sync_errors_dicts = [maestral_error_to_dict(e) for e in sync_errors] return sync_errors_dicts @property def maestral_errors(self): """Returns a list of Maestral's errors as dicts. This does not include lost internet connections or file sync errors which only emit warnings and are tracked and cleared separately. Errors listed here must be acted upon for Maestral to continue syncing. """ maestral_errors = [ r.exc_info[1] for r in self._log_handler_error_cache.cached_records ] maestral_errors_dicts = [ maestral_error_to_dict(e) for e in maestral_errors ] return maestral_errors_dicts def clear_maestral_errors(self): """Manually clears all Maestral errors. This should be used after they have been resolved by the user through the GUI or CLI. """ self._log_handler_error_cache.clear() @property def account_profile_pic_path(self): """Returns the path of the current account's profile picture. There may not be an actual file at that path, if the user did not set a profile picture or the picture has not yet been downloaded.""" return get_cache_path("maestral", self._config_name + "_profile_pic.jpeg") def get_file_status(self, local_path): """ Returns the sync status of an individual file. :param local_path: Path to file on the local drive. :return: String indicating the sync status. Can be "uploading", "downloading", "up to date", "error", or "unwatched" (for files outside of the Dropbox directory). :rtype: str """ if not self.syncing: return "unwatched" try: dbx_path = self.sync.to_dbx_path(local_path) except ValueError: return "unwatched" if local_path in self.monitor.queued_for_upload: return "uploading" elif local_path in self.monitor.queued_for_download: return "downloading" elif any(local_path == err["local_path"] for err in self.sync_errors): return "error" elif self.sync.get_local_rev(dbx_path): return "up to date" else: return "unwatched" def get_activity(self): """ Returns a dictionary with lists of all file currently queued for or being synced. :rtype: dict(list, list) """ PathItem = namedtuple("PathItem", "local_path status") uploading = [] downloading = [] for path in self.monitor.uploading: uploading.append(PathItem(path, "uploading")) for path in self.monitor.queued_for_upload: uploading.append(PathItem(path, "queued")) for path in self.monitor.downloading: downloading.append(PathItem(path, "downloading")) for path in self.monitor.queued_for_download: downloading.append(PathItem(path, "queued")) return dict(uploading=uploading, downloading=downloading) @handle_disconnect def get_account_info(self): """ Gets account information from Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] :raises: :class:`MaestralApiError` """ res = self.client.get_account_info() return dropbox_stone_to_dict(res) @handle_disconnect def get_space_usage(self): """ Gets the space usage stored by Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] """ res = self.client.get_space_usage() return dropbox_stone_to_dict(res) @handle_disconnect def get_profile_pic(self): """ Attempts to download the user's profile picture from Dropbox. The picture saved in Maestral's cache directory for retrieval when there is no internet connection. This function will fail silently in case of :class:`MaestralApiError`s. :returns: Path to saved profile picture or None if no profile picture is set. """ try: res = self.client.get_account_info() except MaestralApiError: pass else: if res.profile_photo_url: # download current profile pic res = requests.get(res.profile_photo_url) with open(self.account_profile_pic_path, "wb") as f: f.write(res.content) return self.account_profile_pic_path else: # delete current profile pic self._delete_old_profile_pics() @handle_disconnect def list_folder(self, dbx_path, **kwargs): """ List all items inside the folder given by :param:`dbx_path`. :param dbx_path: Path to folder on Dropbox. :return: List of Dropbox item metadata as dicts or ``False`` if listing failed due to connection issues. :rtype: list[dict] """ dbx_path = "" if dbx_path == "/" else dbx_path res = self.client.list_folder(dbx_path, **kwargs) entries = [dropbox_stone_to_dict(e) for e in res.entries] return entries def _delete_old_profile_pics(self): # delete all old pictures for file in os.listdir(get_cache_path("maestral")): if file.startswith(self._config_name + "_profile_pic"): try: os.unlink(osp.join(get_cache_path("maestral"), file)) except OSError: pass def rebuild_index(self): """ Rebuilds the Maestral index and resumes syncing afterwards if it has been running. :raises: :class:`MaestralApiError` """ self.monitor.rebuild_rev_file() def start_sync(self, overload=None): """ Creates syncing threads and starts syncing. """ self.monitor.start() def resume_sync(self, overload=None): """ Resumes the syncing threads if paused. """ self.monitor.resume() def pause_sync(self, overload=None): """ Pauses the syncing threads if running. """ self.monitor.pause() def stop_sync(self, overload=None): """ Stops the syncing threads if running, destroys observer thread. """ self.monitor.stop() def unlink(self): """ Unlinks the configured Dropbox account but leaves all downloaded files in place. All syncing metadata will be removed as well. Connection and API errors will be handled silently but the Dropbox access key will always be removed from the user's PC. """ self.stop_sync() try: self.client.unlink() except (ConnectionError, MaestralApiError): pass try: os.remove(self.sync.rev_file_path) except OSError: pass self.sync.dropbox_path = "" self.sync.last_cursor = "" self.sync.last_sync = 0.0 self._conf.reset_to_defaults() logger.info("Unlinked Dropbox account.") def exclude_folder(self, dbx_path): """ Excludes folder from sync and deletes local files. It is safe to call this method with folders which have already been excluded. :param str dbx_path: Dropbox folder to exclude. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ dbx_path = dbx_path.lower().rstrip(osp.sep) md = self.client.get_metadata(dbx_path) if not isinstance(md, files.FolderMetadata): raise ValueError( "No such folder on Dropbox: '{0}'".format(dbx_path)) # add the path to excluded list excluded_folders = self.sync.excluded_folders if dbx_path not in excluded_folders: excluded_folders.append(dbx_path) else: logger.info("Folder was already excluded, nothing to do.") return self.sync.excluded_folders = excluded_folders self.sync.set_local_rev(dbx_path, None) # remove folder from local drive local_path = self.sync.to_local_path(dbx_path) local_path_cased = path_exists_case_insensitive(local_path) logger.info(f"Deleting folder '{local_path_cased}'.") if osp.isdir(local_path_cased): shutil.rmtree(local_path_cased) def include_folder(self, dbx_path): """ Includes folder in sync and downloads in the background. It is safe to call this method with folders which have already been included, they will not be downloaded again. :param str dbx_path: Dropbox folder to include. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox or lies inside another excluded folder. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ dbx_path = dbx_path.lower().rstrip(osp.sep) md = self.client.get_metadata(dbx_path) old_excluded_folders = self.sync.excluded_folders if not isinstance(md, files.FolderMetadata): raise ValueError( "No such folder on Dropbox: '{0}'".format(dbx_path)) for folder in old_excluded_folders: if is_child(dbx_path, folder): raise ValueError( "'{0}' lies inside the excluded folder '{1}'. " "Please include '{1}' first.".format(dbx_path, folder)) # Get folders which will need to be downloaded, do not attempt to download # subfolders of `dbx_path` which were already included. # `new_included_folders` will either be empty (`dbx_path` was already # included), just contain `dbx_path` itself (the whole folder was excluded) or # only contain subfolders of `dbx_path` (`dbx_path` was partially included). new_included_folders = tuple(x for x in old_excluded_folders if x == dbx_path or is_child(x, dbx_path)) if new_included_folders: # remove `dbx_path` or all excluded children from the excluded list excluded_folders = list( set(old_excluded_folders) - set(new_included_folders)) else: logger.info("Folder was already included, nothing to do.") return self.sync.excluded_folders = excluded_folders # download folder contents from Dropbox logger.info(f"Downloading added folder '{dbx_path}'.") for folder in new_included_folders: self.sync.queued_folder_downloads.put(folder) @handle_disconnect def _include_folder_without_subfolders(self, dbx_path): """Sets a folder to included without explicitly including its subfolders. This is to be used internally, when a folder has been removed from the excluded list, but some of its subfolders may have been added.""" dbx_path = dbx_path.lower().rstrip(osp.sep) excluded_folders = self.sync.excluded_folders if dbx_path not in excluded_folders: return excluded_folders.remove(dbx_path) self.sync.excluded_folders = excluded_folders self.sync.queued_folder_downloads.put(dbx_path) @handle_disconnect def set_excluded_folders(self, folder_list=None): """ Sets the list of excluded folders to `folder_list`. If not given, gets all top level folder paths from Dropbox and asks user to include or exclude. Folders which are no in `folder_list` but exist on Dropbox will be downloaded. On initial sync, this does not trigger any downloads. :param list folder_list: If given, list of excluded folder to set. :return: List of excluded folders. :rtype: list :raises: :class:`MaestralApiError` """ if folder_list is None: excluded_folders = [] # get all top-level Dropbox folders result = self.client.list_folder("", recursive=False) # paginate through top-level folders, ask to exclude for entry in result.entries: if isinstance(entry, files.FolderMetadata): yes = click.confirm( f"Exclude '{entry.path_display}' from sync?") if yes: excluded_folders.append(entry.path_lower) else: excluded_folders = self.sync.clean_excluded_folder_list( folder_list) old_excluded_folders = self.sync.excluded_folders added_excluded_folders = set(excluded_folders) - set( old_excluded_folders) added_included_folders = set(old_excluded_folders) - set( excluded_folders) if not self.pending_first_download(): # apply changes for path in added_excluded_folders: self.exclude_folder(path) for path in added_included_folders: self._include_folder_without_subfolders(path) self.sync.excluded_folders = excluded_folders return excluded_folders def excluded_status(self, dbx_path): """ Returns 'excluded', 'partially excluded' or 'included'. This function will not check if the item actually exists on Dropbox. :param str dbx_path: Path to item on Dropbox. :returns: Excluded status. :rtype: str """ dbx_path = dbx_path.lower().rstrip(osp.sep) excluded_items = self._conf.get("main", "excluded_folders") + self._conf.get( "main", "excluded_files") if dbx_path in excluded_items: return "excluded" elif any(is_child(f, dbx_path) for f in excluded_items): return "partially excluded" else: return "included" @with_sync_paused def move_dropbox_directory(self, new_path=None): """ Change or set local dropbox directory. This moves all local files to the new location. If a file or folder already exists at this location, it will be overwritten. :param str new_path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. """ # get old and new paths old_path = self.sync.dropbox_path if new_path is None: new_path = self._ask_for_path(self._config_name) try: if osp.samefile(old_path, new_path): return except FileNotFoundError: pass # remove existing items at current location try: os.unlink(new_path) except IsADirectoryError: shutil.rmtree(new_path, ignore_errors=True) except FileNotFoundError: pass # move folder from old location or create a new one if no old folder exists if osp.isdir(old_path): shutil.move(old_path, new_path) else: os.makedirs(new_path) # update config file and client self.sync.dropbox_path = new_path @with_sync_paused def create_dropbox_directory(self, path=None, overwrite=True): """ Set a new local dropbox directory. :param str path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :param bool overwrite: If ``True``, any existing file or folder at ``new_path`` will be replaced. """ # ask for new path if path is None: path = self._ask_for_path(self._config_name) if overwrite: # remove any old items at the location try: shutil.rmtree(path) except NotADirectoryError: os.unlink(path) except FileNotFoundError: pass # create new folder os.makedirs(path, exist_ok=True) # update config file and client self.sync.dropbox_path = path @staticmethod def _ask_for_path(config_name): """ Asks for Dropbox path. """ conf = MaestralConfig(config_name) default = osp.join(get_home_dir(), conf.get("main", "default_dir_name")) while True: msg = f"Please give Dropbox folder location or press enter for default ['{default}']:" res = input(msg).strip("'\" ") dropbox_path = osp.expanduser(res or default) old_path = osp.expanduser(conf.get("main", "path")) same_path = False try: if osp.samefile(old_path, dropbox_path): same_path = True except FileNotFoundError: pass if osp.exists(dropbox_path) and not same_path: msg = f"Directory '{dropbox_path}' already exist. Do you want to overwrite it?" yes = click.confirm(msg) if yes: return dropbox_path else: pass else: return dropbox_path def to_local_path(self, dbx_path): return self.sync.to_local_path(dbx_path) @staticmethod def check_for_updates(): return check_update_available() def _periodic_refresh(self): while True: # update account info self.get_account_info() self.get_space_usage() self.get_profile_pic() # check for maestral updates res = self.check_for_updates() if not res["error"]: self._conf.set("app", "latest_release", res["latest_release"]) time.sleep(60 * 60) # 60 min def _periodic_watchdog(self): while self.monitor._threads_alive(): system_notifier.notify("WATCHDOG=1") time.sleep(int(WATCHDOG_USEC) / (2 * 10**6)) def shutdown_pyro_daemon(self): """Does nothing except for setting the _daemon_running flag to ``False``. This will be checked by Pyro periodically to shut down the daemon when requested.""" self._daemon_running = False if NOTIFY_SOCKET and system_notifier: # notify systemd that we are shutting down system_notifier.notify("STOPPING=1") def _loop_condition(self): return self._daemon_running def __del__(self): try: self.monitor.stop() except: pass def __repr__(self): email = self._conf.get("account", "email") account_type = self._conf.get("account", "type") return f"<{self.__class__}({email}, {account_type})>"