Example #1
0
        def set_rate_limit_changed(new_upload_rate, new_download_rate): 
            try:
                current_upload_rate = int(Manager.get()._dao.get_config('upload_rate', -1))
            except ValueError:
                current_upload_rate = -1

            try:
                current_download_rate = int(Manager.get()._dao.get_config('download_rate', -1))
            except ValueError:
                current_download_rate = -1

            change = not (new_upload_rate == current_upload_rate and new_download_rate == current_download_rate)
            if not change:
                return

            slower = new_upload_rate < current_upload_rate or new_download_rate < current_download_rate
            if slower:
                # change processors first, then update the rate(s)
                ConfigWatcher.ConfigModifiedEventHandler.change_processors(new_upload_rate, new_download_rate)

            if new_upload_rate != current_upload_rate:
                BaseAutomationClient.set_upload_rate_limit(new_upload_rate)
                Manager.get()._dao.update_config('upload_rate', new_upload_rate)
                log.trace('update upload rate: %s', str(BaseAutomationClient.upload_token_bucket))

            if new_download_rate != current_download_rate:
                BaseAutomationClient.set_download_rate_limit(new_download_rate)
                Manager.get()._dao.update_config('download_rate', new_download_rate)
                log.trace('update download rate: %s', str(BaseAutomationClient.download_token_bucket))

            if change and new_upload_rate > current_upload_rate and new_download_rate > current_download_rate:
                # changed rate(s) first, then change processors
                ConfigWatcher.ConfigModifiedEventHandler.change_processors(new_upload_rate, new_download_rate)
Example #2
0
 def load(self):
     if self._engine_definitions is None:
         self._engine_definitions = self._dao.get_engines()
     in_error = dict()
     self._engines = dict()
     for engine in self._engine_definitions:
         if not engine.engine in self._engine_types:
             log.warn("Can't find engine %s anymore", engine.engine)
             if not engine.engine in in_error:
                 in_error[engine.engine] = True
                 self.engineNotFound.emit(engine)
         self._engines[engine.uid] = self._engine_types[engine.engine](
             self, engine,
             remote_watcher_delay=self.remote_watcher_delay,
             processors=get_number_of_processors(BaseAutomationClient.get_upload_rate_limit(),
                                                 BaseAutomationClient.get_download_rate_limit()))
         self._engines[engine.uid].online.connect(self._force_autoupdate)
         self.initEngine.emit(self._engines[engine.uid])
Example #3
0
 def bind_engine(self, engine_type, local_folder, name, binder, starts=True):
     """Bind a local folder to a remote nuxeo server"""
     if name is None and hasattr(binder, 'url'):
         name = self._get_engine_name(binder.url)
     if hasattr(binder, 'url'):
         url = binder.url
         if '#' in url:
             # Last part of the url is the engine type
             engine_type = url.split('#')[1]
             binder.url = url.split('#')[0]
             log.debug("Engine type has been specified in the url: %s will be used", engine_type)
     if not self.check_local_folder_available(local_folder):
         raise FolderAlreadyUsed()
     if not engine_type in self._engine_types:
         raise EngineTypeMissing()
     if self._engines is None:
         self.load()
     local_folder = normalized_path(local_folder)
     if local_folder == self.get_configuration_folder():
         # Prevent from binding in the configuration folder
         raise FolderAlreadyUsed()
     uid = uuid.uuid1().hex
     # TODO Check that engine is not inside another or same position
     engine_def = self._dao.add_engine(engine_type, local_folder, uid, name)
     try:
         self._engines[uid] = self._engine_types[engine_type](
             self, engine_def, binder=binder,
             remote_watcher_delay=self.remote_watcher_delay,
             processors=get_number_of_processors(BaseAutomationClient.get_upload_rate_limit(),
                                                 BaseAutomationClient.get_download_rate_limit()))
         self._engine_definitions.append(engine_def)
     except Exception as e:
         log.exception(e)
         if uid in self._engines:
             del self._engines[uid]
         self._dao.delete_engine(uid)
         # TODO Remove the db ?
         raise e
     # As new engine was just bound, refresh application update status
     self.refresh_update_status()
     if starts:
         self._engines[uid].start()
     self.newEngine.emit(self._engines[uid])
     return self._engines[uid]
Example #4
0
    def __init__(self, options):
        '''
        Constructor
        '''
        if Manager._singleton is not None:
            raise Exception("Only one instance of Manager can be create")
        Manager._singleton = self
        super(Manager, self).__init__()

        # Let's bypass HTTPS verification unless --consider-ssl-errors is passed
        # since many servers unfortunately have invalid certificates.
        # See https://www.python.org/dev/peps/pep-0476/
        # and https://jira.nuxeo.com/browse/NXDRIVE-506
        if not options.consider_ssl_errors:
            log.warn("--consider-ssl-errors option is False, won't verify HTTPS certificates")
            import ssl
            try:
                _create_unverified_https_context = ssl._create_unverified_context
            except AttributeError:
                log.info("Legacy Python that doesn't verify HTTPS certificates by default")
            else:
                log.info("Handle target environment that doesn't support HTTPS verification:"
                         " globally disable verification by monkeypatching the ssl module though highly discouraged")
                ssl._create_default_https_context = _create_unverified_https_context
        else:
            log.info("--consider-ssl-errors option is True, will verify HTTPS certificates")

        self._autolock_service = None
        self.nxdrive_home = os.path.expanduser(options.nxdrive_home)
        self.nxdrive_home = os.path.realpath(self.nxdrive_home)
        if not os.path.exists(self.nxdrive_home):
            os.mkdir(self.nxdrive_home)
        self.remote_watcher_delay = options.delay
        self._nofscheck = options.nofscheck
        self._debug = options.debug
        self._engine_definitions = None
        self._engine_types = dict()
        from nxdrive.engine.next.engine_next import EngineNext
        from nxdrive.engine.engine import Engine
        self._engine_types["NXDRIVE"] = Engine
        self._engine_types["NXDRIVENEXT"] = EngineNext
        self._engines = None
        self.proxies = None
        self.proxy_exceptions = None
        self._app_updater = None
        self._dao = None
        self._create_dao()
        if options.proxy_server is not None:
            proxy = ProxySettings()
            proxy.from_url(options.proxy_server)
            proxy.save(self._dao)
        # Now we can update the logger if needed
        if options.log_level_file is not None:
            # Set the log_level_file option
            handler = self._get_file_log_handler()
            if handler is not None:
                handler.setLevel(options.log_level_file)
                # Store it in the database
                self._dao.update_config("log_level_file", str(handler.level))
        else:
            # No log_level provide, use the one from db default is INFO
            self._update_logger(int(self._dao.get_config("log_level_file", "20")))
        # Add auto lock on edit
        res = self._dao.get_config("direct_edit_auto_lock")
        if res is None:
            self._dao.update_config("direct_edit_auto_lock", "1")
        # Persist update URL infos
        self._dao.update_config("update_url", options.update_site_url)
        self._dao.update_config("beta_update_url", options.beta_update_site_url)
        self.refresh_proxies()
        self._os = AbstractOSIntegration.get(self)

        # setup the bandwidth rate limits
        upload_rate = self._dao.get_config('upload_rate') or options.upload_rate or -1
        BaseAutomationClient.set_upload_rate_limit(upload_rate)
        log.debug('upload rate: %s', str(BaseAutomationClient.upload_token_bucket))

        download_rate = self._dao.get_config('download_rate') or options.download_rate or -1
        BaseAutomationClient.set_download_rate_limit(download_rate)
        log.debug('download rate: %s', str(BaseAutomationClient.download_token_bucket))
        self.config_watcher = ConfigWatcher()

        # Create DirectEdit
        self._create_autolock_service()
        self._create_direct_edit(options.protocol_url)
        # Create notification service
        self._script_engine = None
        self._script_object = None
        self._create_notification_service()
        self._started = False
        # Pause if in debug
        self._pause = self.is_debug()
        self.device_id = self._dao.get_config("device_id")
        self.updated = False  # self.update_version()
        if self.device_id is None:
            self.generate_device_id()

        self.load()

        # Create the application update verification thread
        self._create_updater(options.update_check_delay)

        # Force language
        if options.force_locale is not None:
            self.set_config("locale", options.force_locale)
        # Setup analytics tracker
        self._tracker = None
        if self.get_tracking():
            self._create_tracker()