def get_logs(self): try: handler = get_handler(get_logger(None), "memory") return str(handler.get_buffer(MAX_LOG_DISPLAYED)) except Exception as e: log.exception(e) return None
def test_logs(): log = get_logger(__name__) folder = tempfile.mkdtemp(u'-nxdrive-tests') Options.nxdrive_home = folder manager = Manager() try: log.debug("Strange encoding \xe9") log.debug(u"Unicode encoding \xe8") # Crafted problematic logRecord try: raise ValueError(u'[tests] folder/\xeatre ou ne pas \xeatre.odt') except ValueError as e: log.exception('Oups!') log.exception(repr(e)) log.exception(unicode(e)) # Works but not recommended with pytest.raises(UnicodeEncodeError): log.exception(str(e)) # Using the syntax below will raise the same UnicodeEncodeError # but the logging module takes care of it and just prints out # the exception without raising it. So I let it there FI. # log.exception(e) report = Report(manager, os.path.join(folder, 'report')) report.generate() finally: manager.dispose_db() Manager._singleton = None
def _export_logs(self): logs = "" logger = get_logger(None) handler = get_handler(logger, "memory") log_buffer = handler.get_buffer(MAX_LOG_DISPLAYED) for record in log_buffer: logs = logs + handler.format(record) + "\n" return logs
def _get_logs(self, limit=MAX_LOG_DISPLAYED): logs = [] handler = get_handler(get_logger(None), "memory") log_buffer = handler.get_buffer(limit) for record in log_buffer: logs.append(self._export_log_record(record)) limit = limit - 1 if limit == 0: return logs return logs
def _export_logs(self): logs = u"" logger = get_logger(None) handler = get_handler(logger, "memory") log_buffer = handler.get_buffer(MAX_LOG_DISPLAYED) for record in log_buffer: try: log = handler.format(record).decode("utf-8", errors="replace") except UnicodeEncodeError: log = handler.format(record) logs = logs + log + u"\n" return logs
def _export_logs(): """ Export all lines from the memory logger. :return bytes: bytes needed by zipfile.writestr() """ logger = get_logger(None) handler = get_handler(logger, 'memory') log_buffer = handler.get_buffer(MAX_LOG_DISPLAYED) for record in log_buffer: line = handler.format(record) if not isinstance(line, bytes): line = line.encode('utf-8', errors='replace') yield line
def handle(self, argv): """ Parse options, setup logs and manager and dispatch execution. """ options = self.parse_cli(argv) if hasattr(options, 'local_folder'): options.local_folder = normalized_path(options.local_folder) # 'launch' is the default command if None is provided command = getattr(options, 'command', 'launch') if command != 'uninstall': # Configure the logging framework, except for the tests as they # configure their own. # Don't need uninstall logs either for now. self._configure_logger(command, options) self.log = get_logger(__name__) self.log.debug("Command line: argv=%r, options=%r", ' '.join(argv), options) # Update default options Options.update(options, setter='cli') if command != 'uninstall': # Install utility to help debugging segmentation faults self._install_faulthandler() # Initialize a manager for this process self.manager = self.get_manager() # Find the command to execute based on the handler = getattr(self, command, None) if not handler: raise NotImplementedError('No handler implemented for command ' + command) try: return handler(options) except Exception as e: if Options.debug: # Make it possible to use the postmortem debugger raise msg = e.msg if hasattr(e, 'msg') else e self.log.error("Error executing '%s': %s", command, msg, exc_info=True)
def handle(self, argv): """Parse options, setup logs and controller and dispatch execution.""" options = self.parse_cli(argv) # 'start' is the default command if None is provided command = options.command = getattr(options, 'command', 'launch') if command != 'test' and command != 'uninstall': # Configure the logging framework, except for the tests as they # configure their own. # Don't need uninstall logs either for now. self._configure_logger(options) self.log = get_logger(__name__) self.log.debug("Command line: command=%s, argv=%s", command, ' '.join(argv)) if command != 'test' and command != 'uninstall': # Install utility to help debugging segmentation faults self._install_faulthandler(options) if command != 'test': # Initialize a controller for this process, except for the tests # as they initialize their own self.controller = self.get_controller(options) # Find the command to execute based on the handler = getattr(self, command, None) if handler is None: raise NotImplementedError( 'No handler implemented for command ' + options.command) if command == 'launch': try: # Ensure that the protocol handler are registered: # this is useful for the edit/open link in the Nuxeo interface register_protocol_handlers(self.controller) # Ensure that ndrive is registered as a startup application register_startup() # Ensure that ndrive is registered as a contextual menu entry. # Only under win32 for now, for OS X Finder implementation see # https://jira.nuxeo.com/browse/NXDRIVE-119 register_contextual_menu() except Exception, e: self.log.warn(e)
def handle(self, argv): """ Parse options, setup logs and manager and dispatch execution. """ options = self.parse_cli(argv) if hasattr(options, 'local_folder'): options.local_folder = normalized_path(options.local_folder) # 'launch' is the default command if None is provided command = getattr(options, 'command', 'launch') if command != 'uninstall': # Configure the logging framework, except for the tests as they # configure their own. # Don't need uninstall logs either for now. self._configure_logger(command, options) self.log = get_logger(__name__) self.log.debug("Command line: argv=%r, options=%r", ' '.join(argv), options) # Update default options Options.update(options, setter='cli') if command != 'uninstall': # Install utility to help debugging segmentation faults self._install_faulthandler() # Initialize a manager for this process self.manager = self.get_manager() # Find the command to execute based on the handler = getattr(self, command, None) if not handler: raise NotImplementedError( 'No handler implemented for command ' + command) try: return handler(options) except Exception as e: if Options.debug: # Make it possible to use the postmortem debugger raise msg = e.msg if hasattr(e, 'msg') else e self.log.error("Error executing '%s': %s", command, msg, exc_info=True)
"""API to access a remote file system for synchronization.""" import unicodedata from collections import namedtuple from datetime import datetime import urllib2 import os from nxdrive.logging_config import get_logger from nxdrive.client.common import NotFound from nxdrive.client.common import BUFFER_SIZE from nxdrive.client.base_automation_client import Unauthorized from nxdrive.client.base_automation_client import BaseAutomationClient log = get_logger(__name__) DOWNLOAD_TMP_FILE_PREFIX = '.' DOWNLOAD_TMP_FILE_SUFFIX = '.part' # Data transfer objects BaseRemoteFileInfo = namedtuple('RemoteFileInfo', [ 'name', # title of the file (not guaranteed to be locally unique) 'uid', # id of the file 'parent_uid', # id of the parent file 'path', # abstract file system path: useful for ordering folder trees 'folderish', # True is can host children 'last_modification_time', # last update time 'digest', # digest of the file 'digest_algorithm', # digest algorithm of the file 'download_url', # download URL of the file
# coding: utf-8 """ Console mode application. """ from PyQt4 import QtCore from PyQt4.QtCore import QCoreApplication from nxdrive.logging_config import get_logger from nxdrive.options import Options log = get_logger(__name__) class ConsoleApplication(QCoreApplication): """Console mode Nuxeo Drive application""" def __init__(self, manager, argv=()): super(ConsoleApplication, self).__init__(list(argv)) self.manager = manager self.mainEngine = None for engine in self.manager.get_engines().values(): self.mainEngine = engine break if self.mainEngine is not None and Options.debug: from nxdrive.engine.engine import EngineLogger self.engineLogger = EngineLogger(self.mainEngine) # Make sure manager is stopped before quitting self.aboutToQuit.connect(self.manager.stop) self.quit_if_done = Options.quit_if_done if self.quit_if_done: # Connect engines to a signal allowing to quit application if synchronization is over
def setUp(self): super(TestTokenBucket, self).setUp() self.log = get_logger(self.__class__.__name__)
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository=DEFAULT_REPOSITORY_NAME, ignored_prefixes=None, ignored_suffixes=None, timeout=20, blob_timeout=60, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): global log log = get_logger(__name__) # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended if timeout is None or timeout < 0: timeout = 20 self.timeout = timeout # Dont allow null timeout if blob_timeout is None or blob_timeout < 0: blob_timeout = 60 self.blob_timeout = blob_timeout if ignored_prefixes is not None: self.ignored_prefixes = ignored_prefixes else: self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES if ignored_suffixes is not None: self.ignored_suffixes = ignored_suffixes else: self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor( cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False opener_proxies = get_opener_proxies(self.opener) log.trace('Proxy configuration: %s, effective proxy list: %r', get_proxy_config(proxies), opener_proxies) if opener_proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' # New batch upload API self.new_upload_api_available = True self.rest_api_url = server_url + 'api/v1/' self.batch_upload_path = 'upload' self.fetch_api()
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository=Options.remote_repo, timeout=20, blob_timeout=60, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): global log log = get_logger(__name__) # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended if timeout is None or timeout < 0: timeout = 20 self.timeout = timeout # Dont allow null timeout if blob_timeout is None or blob_timeout < 0: blob_timeout = 60 self.blob_timeout = blob_timeout self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor(cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False opener_proxies = get_opener_proxies(self.opener) log.trace('Proxy configuration: %s, effective proxy list: %r', get_proxy_config(proxies), opener_proxies) if opener_proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' # New batch upload API self.new_upload_api_available = True self.rest_api_url = server_url + 'api/v1/' self.batch_upload_path = 'upload' self.is_event_log_id = True self.check_access()