class Maestral(object): """An open source Dropbox client for macOS and Linux. All methods and properties return objects or raise exceptions which can safely be serialized, i.e., pure Python types. The only exception are MaestralApiErrors which need to be registered explicitly with the serpent serializer used by Pyro5 in order to be transmitted to a frontend. :param str config_name: Name of maestral configuration to run. This will create a new configuration file if none exists. :param bool run: If ``True``, Maestral will start syncing immediately. Defaults to ``True``. """ def __init__(self, config_name='maestral', run=True, log_to_stdout=False): self._daemon_running = True self._log_to_stdout = log_to_stdout self._config_name = config_name self._conf = MaestralConfig(self._config_name) self._state = MaestralState(self._config_name) self._setup_logging() self.client = MaestralApiClient(self._config_name) self.monitor = MaestralMonitor(self.client) self.sync = self.monitor.sync # periodically check for updates and refresh account info self.update_thread = Thread( name='maestral-update-check', target=self._periodic_refresh, daemon=True, ) self.update_thread.start() if run: self.run() def run(self): """ Runs setup if necessary, starts syncing, and starts systemd notifications if run as a systemd notify service. """ if self.pending_dropbox_folder: self.monitor.reset_sync_state() self.create_dropbox_directory() # start syncing self.start_sync() if NOTIFY_SOCKET: # notify systemd that we have started logger.debug('Running as systemd notify service') logger.debug('NOTIFY_SOCKET = %s', NOTIFY_SOCKET) sd_notifier.notify('READY=1') if IS_WATCHDOG: # notify systemd periodically if alive logger.debug('Running as systemd watchdog service') logger.debug('WATCHDOG_USEC = %s', WATCHDOG_USEC) logger.debug('WATCHDOG_PID = %s', WATCHDOG_PID) self.watchdog_thread = Thread( name='maestral-watchdog', target=self._periodic_watchdog, daemon=True, ) self.watchdog_thread.start() def _setup_logging(self): """ Sets up logging to log files, status and error properties, desktop notifications, the systemd journal if available, bugsnag if error reports are enabled, and to stdout if requested. """ log_level = self._conf.get('app', 'log_level') mdbx_logger = logging.getLogger('maestral') mdbx_logger.setLevel(logging.DEBUG) log_fmt_long = logging.Formatter( fmt='%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') log_fmt_short = logging.Formatter(fmt='%(message)s') # log to file rfh_log_file = get_log_path('maestral', self._config_name + '.log') self.log_handler_file = logging.handlers.RotatingFileHandler( rfh_log_file, maxBytes=10**7, backupCount=1) self.log_handler_file.setFormatter(log_fmt_long) self.log_handler_file.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_file) # log to journal when launched from systemd if INVOCATION_ID and journal: self.log_handler_journal = journal.JournalHandler() self.log_handler_journal.setFormatter(log_fmt_short) self.log_handler_journal.setLevel(log_level) mdbx_logger.addHandler(self.log_handler_journal) else: self.log_handler_journal = None # send systemd notifications when started as 'notify' daemon if NOTIFY_SOCKET: self.log_handler_sd = SdNotificationHandler() self.log_handler_sd.setFormatter(log_fmt_short) self.log_handler_sd.setLevel(logging.INFO) mdbx_logger.addHandler(self.log_handler_sd) else: self.log_handler_sd = None # log to stdout (disabled by default) level = log_level if self._log_to_stdout else 100 self.log_handler_stream = logging.StreamHandler(sys.stdout) self.log_handler_stream.setFormatter(log_fmt_long) self.log_handler_stream.setLevel(level) mdbx_logger.addHandler(self.log_handler_stream) # log to cached handlers for GUI and CLI self._log_handler_info_cache = CachedHandler(maxlen=1) self._log_handler_info_cache.setFormatter(log_fmt_short) self._log_handler_info_cache.setLevel(logging.INFO) mdbx_logger.addHandler(self._log_handler_info_cache) self._log_handler_error_cache = CachedHandler() self._log_handler_error_cache.setFormatter(log_fmt_short) self._log_handler_error_cache.setLevel(logging.ERROR) mdbx_logger.addHandler(self._log_handler_error_cache) # log to desktop notifications # 'file changed' events will be collated and sent as desktop # notifications by the monitor directly, we don't handle them here self.desktop_notifier = MaestralDesktopNotifier.for_config( self.config_name) self.desktop_notifier.setLevel(logging.WARNING) mdbx_logger.addHandler(self.desktop_notifier) # log to bugsnag (disabled by default) self._log_handler_bugsnag = BugsnagHandler() self._log_handler_bugsnag.setLevel(100) mdbx_logger.addHandler(self._log_handler_bugsnag) self.analytics = self._conf.get('app', 'analytics') # ==== methods to access config and saved state ====================================== @property def config_name(self): """The selected configuration.""" return self._config_name def set_conf(self, section, name, value): """Sets a configuration option.""" self._conf.set(section, name, value) def get_conf(self, section, name): """Gets a configuration option.""" return self._conf.get(section, name) def set_state(self, section, name, value): """Sets a state value.""" self._state.set(section, name, value) def get_state(self, section, name): """Gets a state value.""" return self._state.get(section, name) # ==== getters / setters for config with side effects ================================ @property def dropbox_path(self): """Returns the path to the local Dropbox directory. Read only. Use :meth:`create_dropbox_directory` or :meth:`move_dropbox_directory` to set or change the Dropbox directory location instead. """ return self.sync.dropbox_path @property def excluded_items(self): """ Returns a list of excluded folders (read only). Use :meth:`exclude_item`, :meth:`include_item` or :meth:`set_excluded_items` to change which items are excluded from syncing. """ return self.sync.excluded_items @property def log_level(self): """Log level for log files, the stream handler and the systemd journal.""" return self._conf.get('app', 'log_level') @log_level.setter def log_level(self, level_num): """Setter: Log level for log files, the stream handler and the systemd journal.""" self.log_handler_file.setLevel(level_num) if self.log_handler_journal: self.log_handler_journal.setLevel(level_num) if self.log_to_stdout: self.log_handler_stream.setLevel(level_num) self._conf.set('app', 'log_level', level_num) @property def log_to_stdout(self): return self._log_to_stdout @log_to_stdout.setter def log_to_stdout(self, enabled=True): """Enables or disables logging to stdout.""" self._log_to_stdout = enabled level = self.log_level if enabled else 100 self.log_handler_stream.setLevel(level) @property def analytics(self): """Enables or disables logging of errors to bugsnag.""" return self._conf.get('app', 'analytics') @analytics.setter def analytics(self, enabled): """Setter: Enables or disables logging of errors to bugsnag.""" bugsnag.configuration.auto_notify = enabled bugsnag.configuration.auto_capture_sessions = enabled self._log_handler_bugsnag.setLevel(logging.ERROR if enabled else 100) self._conf.set('app', 'analytics', enabled) @property def notification_snooze(self): """Snoozed time for desktop notifications in minutes.""" return self.desktop_notifier.snoozed @notification_snooze.setter def notification_snooze(self, minutes): """Setter: Snoozed time for desktop notifications in minutes.""" self.desktop_notifier.snoozed = minutes @property def notification_level(self): """Level for desktop notifications.""" return self.desktop_notifier.notify_level @notification_level.setter def notification_level(self, level): """Setter: Level for desktop notifications.""" self.desktop_notifier.notify_level = level # ==== state information ============================================================ @property def pending_dropbox_folder(self): """Bool indicating if a local Dropbox directory has been created.""" return not osp.isdir(self._conf.get('main', 'path')) @property def pending_first_download(self): """Bool indicating if the initial download has already occurred.""" return (self._state.get('sync', 'lastsync') == 0 or self._state.get('sync', 'cursor') == '') @property def syncing(self): """ Bool indicating if Maestral is syncing. It will be ``True`` if syncing is not paused by the user *and* Maestral is connected to the internet. """ return self.monitor.syncing.is_set() or self.monitor.startup.is_set() @property def paused(self): """Bool indicating if syncing is paused by the user. This is set by calling :meth:`pause`.""" return self.monitor.paused_by_user.is_set( ) and not self.sync.lock.locked() @property def stopped(self): """Bool indicating if syncing is stopped, for instance because of an exception.""" return not self.monitor.running.is_set() and not self.sync.lock.locked( ) @property def connected(self): """Bool indicating if Dropbox servers can be reached.""" return self.monitor.connected.is_set() @property def status(self): """ Returns a string with the last status message. This can be displayed as information to the user but should not be relied on otherwise. """ return self._log_handler_info_cache.getLastMessage() @property def sync_errors(self): """Returns list containing the current sync errors as dicts.""" sync_errors = list(self.sync.sync_errors.queue) sync_errors_dicts = [error_to_dict(e) for e in sync_errors] return sync_errors_dicts @property def maestral_errors(self): """ Returns a list of Maestral's errors as dicts. This does not include lost internet connections or file sync errors which only emit warnings and are tracked and cleared separately. Errors listed here must be acted upon for Maestral to continue syncing. """ maestral_errors = [ r.exc_info[1] for r in self._log_handler_error_cache.cached_records ] maestral_errors_dicts = [error_to_dict(e) for e in maestral_errors] return maestral_errors_dicts def clear_maestral_errors(self): """ Manually clears all Maestral errors. This should be used after they have been resolved by the user through the GUI or CLI. """ self._log_handler_error_cache.clear() @property def account_profile_pic_path(self): """ Returns the path of the current account's profile picture. There may not be an actual file at that path, if the user did not set a profile picture or the picture has not yet been downloaded. """ return get_cache_path('maestral', self._config_name + '_profile_pic.jpeg') def get_file_status(self, local_path): """ Returns the sync status of an individual file. :param str local_path: Path to file on the local drive. May be relative to the current working directory. :returns: String indicating the sync status. Can be 'uploading', 'downloading', 'up to date', 'error', or 'unwatched' (for files outside of the Dropbox directory). :rtype: str """ if not self.syncing: return FileStatus.Unwatched.value local_path = osp.abspath(local_path) try: dbx_path = self.sync.to_dbx_path(local_path) except ValueError: return FileStatus.Unwatched.value if local_path in self.monitor.queued_for_upload: return FileStatus.Uploading.value elif local_path in self.monitor.queued_for_download: return FileStatus.Downloading.value elif any(local_path == err['local_path'] for err in self.sync_errors): return FileStatus.Error.value elif self.sync.get_local_rev(dbx_path): return FileStatus.Synced.value else: return FileStatus.Unwatched.value def get_activity(self): """ Gets current upload / download activity. :returns: A dictionary with lists of all files currently queued for or being uploaded or downloaded. Paths are given relative to the Dropbox folder. :rtype: dict(list, list) """ PathItem = namedtuple('PathItem', 'path status') uploading = [] downloading = [] for path in self.monitor.uploading: path.lstrip(self.dropbox_path) uploading.append(PathItem(path, 'uploading')) for path in self.monitor.queued_for_upload: path.lstrip(self.dropbox_path) uploading.append(PathItem(path, 'queued')) for path in self.monitor.downloading: path.lstrip(self.dropbox_path) downloading.append(PathItem(path, 'downloading')) for path in self.monitor.queued_for_download: path.lstrip(self.dropbox_path) downloading.append(PathItem(path, 'queued')) return dict(uploading=uploading, downloading=downloading) @handle_disconnect def get_account_info(self): """ Gets account information from Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] :raises: :class:`MaestralApiError` """ res = self.client.get_account_info() return dropbox_stone_to_dict(res) @handle_disconnect def get_space_usage(self): """ Gets the space usage stored by Dropbox and returns it as a dictionary. The entries will either be of type ``str`` or ``bool``. :returns: Dropbox account information. :rtype: dict[str, bool] :raises: :class:`MaestralApiError` """ res = self.client.get_space_usage() return dropbox_stone_to_dict(res) # ==== control methods for front ends ================================================ @handle_disconnect def get_profile_pic(self): """ Attempts to download the user's profile picture from Dropbox. The picture saved in Maestral's cache directory for retrieval when there is no internet connection. This function will fail silently in case of :class:`MaestralApiError`s. :returns: Path to saved profile picture or None if no profile picture is set. """ try: res = self.client.get_account_info() except MaestralApiError: pass else: if res.profile_photo_url: # download current profile pic res = requests.get(res.profile_photo_url) with open(self.account_profile_pic_path, 'wb') as f: f.write(res.content) return self.account_profile_pic_path else: # delete current profile pic self._delete_old_profile_pics() @handle_disconnect def list_folder(self, dbx_path, **kwargs): """ List all items inside the folder given by :param:`dbx_path`. :param dbx_path: Path to folder on Dropbox. :returns: List of Dropbox item metadata as dicts or ``False`` if listing failed due to connection issues. :rtype: list[dict] :raises: :class:`MaestralApiError` """ res = self.client.list_folder(dbx_path, **kwargs) entries = [dropbox_stone_to_dict(e) for e in res.entries] return entries def _delete_old_profile_pics(self): # delete all old pictures for file in os.listdir(get_cache_path('maestral')): if file.startswith(self._config_name + '_profile_pic'): try: os.unlink(osp.join(get_cache_path('maestral'), file)) except OSError: pass def rebuild_index(self): """ Rebuilds the rev file by comparing remote with local files and updating rev numbers from the Dropbox server. Files are compared by their content hashes and conflicting copies are created if the contents differ. File changes during the rebuild process will be queued and uploaded once rebuilding has completed. Rebuilding will be performed asynchronously. :raises: :class:`MaestralApiError` """ self.monitor.rebuild_index() def start_sync(self): """ Creates syncing threads and starts syncing. """ self.monitor.start() def resume_sync(self): """ Resumes the syncing threads if paused. """ self.monitor.resume() def pause_sync(self): """ Pauses the syncing threads if running. """ self.monitor.pause() def stop_sync(self): """ Stops the syncing threads if running, destroys observer thread. """ self.monitor.stop() def reset_sync_state(self): """ Resets the sync index and state. Only call this to clean up leftover state information if a Dropbox was improperly unlinked (e.g., auth token has been manually deleted). Otherwise leave state management to Maestral. """ self.monitor.reset_sync_state() def unlink(self): """ Unlinks the configured Dropbox account but leaves all downloaded files in place. All syncing metadata will be removed as well. Connection and API errors will be handled silently but the Dropbox access key will always be removed from the user's PC. """ self.stop_sync() try: self.client.unlink() except (ConnectionError, MaestralApiError): pass try: os.remove(self.sync.rev_file_path) except OSError: pass self.sync.clear_rev_index() delete(self.sync.rev_file_path) self._conf.cleanup() self._state.cleanup() logger.info('Unlinked Dropbox account.') def exclude_item(self, dbx_path): """ Excludes file or folder from sync and deletes it locally. It is safe to call this method with items which have already been excluded. :param str dbx_path: Dropbox path of item to exclude. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ # input validation md = self.client.get_metadata(dbx_path) if not md: raise ValueError(f'"{dbx_path}" does not exist on Dropbox') dbx_path = dbx_path.lower().rstrip(osp.sep) # add the path to excluded list if self.sync.is_excluded_by_user(dbx_path): logger.info('%s was already excluded', dbx_path) logger.info(IDLE) return excluded_items = self.sync.excluded_items excluded_items.append(dbx_path) self.sync.excluded_items = excluded_items logger.info('Excluded %s', dbx_path) self._remove_after_excluded(dbx_path) logger.info(IDLE) def _remove_after_excluded(self, dbx_path): # book keeping self.sync.clear_sync_error(dbx_path=dbx_path) self.sync.set_local_rev(dbx_path, None) # remove folder from local drive local_path = self.sync.to_local_path(dbx_path) # dbx_path will be lower-case, we there explicitly run `to_cased_path` local_path = to_cased_path(local_path) if local_path: with self.monitor.fs_event_handler.ignore( local_path, recursive=osp.isdir(local_path), event_types=(EVENT_TYPE_DELETED, )): delete(local_path) def include_item(self, dbx_path): """ Includes file or folder in sync and downloads in the background. It is safe to call this method with items which have already been included, they will not be downloaded again. :param str dbx_path: Dropbox path of item to include. :raises: :class:`ValueError` if ``dbx_path`` is not on Dropbox or lies inside another excluded folder. :raises: :class:`ConnectionError` if connection to Dropbox fails. """ # input validation md = self.client.get_metadata(dbx_path) if not md: raise ValueError(f'"{dbx_path}" does not exist on Dropbox') dbx_path = dbx_path.lower().rstrip(osp.sep) old_excluded_items = self.sync.excluded_items for folder in old_excluded_items: if is_child(dbx_path, folder): raise ValueError( f'"{dbx_path}" lies inside the excluded folder ' f'"{folder}". Please include "{folder}" first.') # Get items which will need to be downloaded, do not attempt to download # children of `dbx_path` which were already included. # `new_included_items` will either be empty (`dbx_path` was already # included), just contain `dbx_path` itself (the item was fully excluded) or # only contain children of `dbx_path` (`dbx_path` was partially included). new_included_items = tuple(x for x in old_excluded_items if x == dbx_path or is_child(x, dbx_path)) if new_included_items: # remove `dbx_path` or all excluded children from the excluded list excluded_items = list( set(old_excluded_items) - set(new_included_items)) else: logger.info('%s was already included', dbx_path) return self.sync.excluded_items = excluded_items logger.info('Included %s', dbx_path) # download items from Dropbox for folder in new_included_items: self.sync.queued_newly_included_downloads.put(folder) @handle_disconnect def set_excluded_items(self, items=None): """ Sets the list of excluded files or folders. If not given, gets all top level folder paths from Dropbox and asks user to include or exclude. Items which are no in ``items`` but were previously excluded will be downloaded. On initial sync, this does not trigger any downloads. :param list items: If given, list of excluded files or folders to set. :raises: :class:`MaestralApiError` """ if items is None: excluded_items = [] # get all top-level Dropbox folders result = self.client.list_folder('/', recursive=False) # paginate through top-level folders, ask to exclude for entry in result.entries: if isinstance(entry, files.FolderMetadata): yes = click.confirm( f'Exclude "{entry.path_display}" from sync?', prompt_suffix='') if yes: excluded_items.append(entry.path_lower) else: excluded_items = self.sync.clean_excluded_items_list(items) old_excluded_items = self.sync.excluded_items added_excluded_items = set(excluded_items) - set(old_excluded_items) added_included_items = set(old_excluded_items) - set(excluded_items) self.sync.excluded_items = excluded_items if not self.pending_first_download: # apply changes for path in added_excluded_items: logger.info('Excluded %s', path) self._remove_after_excluded(path) for path in added_included_items: if not self.sync.is_excluded_by_user(path): logger.info('Included %s', path) self.sync.queued_newly_included_downloads.put(path) logger.info(IDLE) def excluded_status(self, dbx_path): """ Returns 'excluded', 'partially excluded' or 'included'. This function will not check if the item actually exists on Dropbox. :param str dbx_path: Path to item on Dropbox. :returns: Excluded status. :rtype: str """ dbx_path = dbx_path.lower().rstrip(osp.sep) excluded_items = self._conf.get('main', 'excluded_items') if dbx_path in excluded_items: return 'excluded' elif any(is_child(f, dbx_path) for f in excluded_items): return 'partially excluded' else: return 'included' @with_sync_paused def move_dropbox_directory(self, new_path=None): """ Sets the local Dropbox directory. This moves all local files to the new location and resumes syncing afterwards. :param str new_path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :raises: ``OSError`` if moving the directory fails. """ # get old and new paths old_path = self.sync.dropbox_path new_path = new_path or select_dbx_path_dialog(self._config_name) try: if osp.samefile(old_path, new_path): return except FileNotFoundError: pass if osp.exists(new_path): raise FileExistsError(f'Path "{new_path}" already exists.') # move folder from old location or create a new one if no old folder exists if osp.isdir(old_path): shutil.move(old_path, new_path) else: os.makedirs(new_path) # update config file and client self.sync.dropbox_path = new_path @with_sync_paused def create_dropbox_directory(self, path=None): """ Creates a new Dropbox directory. Only call this during setup. :param str path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :raises: ``OSError`` if creation fails """ path = path or select_dbx_path_dialog(self._config_name, allow_merge=True) # create new folder os.makedirs(path, exist_ok=True) # update config file and client self.sync.dropbox_path = path # ==== utility methods for front ends ================================================ def to_local_path(self, dbx_path): """ Converts a path relative to the Dropbox folder to a correctly cased local file system path. :param str dbx_path: Path relative to Dropbox root. :returns: Corresponding path of a location in the local Dropbox folder. :rtype: str """ return self.sync.to_local_path(dbx_path) @staticmethod def check_for_updates(): """ Checks if an update is available. :returns: A dictionary with information about the latest release with the fields ``update_available`` (bool), ``latest_release`` (str), ``release_notes`` (str) and ``error`` (str or None). :rtype: dict """ return check_update_available() def shutdown_pyro_daemon(self): """ Sets the ``_daemon_running`` flag to ``False``. This will be checked by Pyro5 periodically to shut down the daemon when requested. """ self._daemon_running = False if NOTIFY_SOCKET: # notify systemd that we are shutting down sd_notifier.notify('STOPPING=1') # ==== private methods =============================================================== def _loop_condition(self): return self._daemon_running def _periodic_refresh(self): while True: # update account info self.get_account_info() self.get_space_usage() self.get_profile_pic() # check for maestral updates res = self.check_for_updates() if not res['error']: self._state.set('app', 'latest_release', res['latest_release']) time.sleep(60 * 60) # 60 min def _periodic_watchdog(self): while self.monitor._threads_alive(): sd_notifier.notify('WATCHDOG=1') time.sleep(int(WATCHDOG_USEC) / (2 * 10**6)) def __del__(self): try: self.monitor.stop() except Exception: pass def __repr__(self): email = self._state.get('account', 'email') account_type = self._state.get('account', 'type') return f'<{self.__class__.__name__}({email}, {account_type})>'
class MaestralApiClient: """Client for the Dropbox SDK. This client defines basic methods to wrap Dropbox Python SDK calls, such as creating, moving, modifying and deleting files and folders on Dropbox and downloading files from Dropbox. All Dropbox SDK exceptions and :class:`OSError`s related to accessing or saving local files will be caught and reraised as :class:`errors.MaestralApiError`s. Connection errors from requests will be caught and reraised as :class:`ConnectionError`. :param str config_name: Name of config file and state file to use. :param int timeout: Timeout for individual requests in sec. Defaults to 60 sec. """ SDK_VERSION = "2.0" _timeout = 60 def __init__(self, config_name='maestral', timeout=_timeout): self.config_name = config_name self._state = MaestralState(config_name) # get Dropbox session self.auth = OAuth2Session(config_name) if not self.auth.load_token(): self.auth.link() self._timeout = timeout self._last_longpoll = None self._backoff = 0 self._retry_count = 0 # initialize API client self.dbx = dropbox.Dropbox( self.auth.access_token, session=SESSION, user_agent=USER_AGENT, timeout=self._timeout ) @to_maestral_error() def get_account_info(self, dbid=None): """ Gets current account information. :param str dbid: Dropbox ID of account. If not given, will get the info of our own account. :returns: :class:`dropbox.users.FullAccount` instance or `None` if failed. :rtype: dropbox.users.FullAccount """ if dbid: res = self.dbx.users_get_account(dbid) else: res = self.dbx.users_get_current_account() if not dbid: # save our own account info to config if res.account_type.is_basic(): account_type = "basic" elif res.account_type.is_business(): account_type = "business" elif res.account_type.is_pro(): account_type = "pro" else: account_type = "" self._state.set("account", "email", res.email) self._state.set("account", "display_name", res.name.display_name) self._state.set("account", "abbreviated_name", res.name.abbreviated_name) self._state.set("account", "type", account_type) return res @to_maestral_error() def get_space_usage(self): """ Gets current account space usage. :returns: :class:`SpaceUsage` instance. :rtype: :class:`SpaceUsage` """ res = self.dbx.users_get_space_usage() # convert from dropbox.users.SpaceUsage to SpaceUsage res.__class__ = SpaceUsage # save results to config self._state.set("account", "usage", str(res)) self._state.set("account", "usage_type", res.allocation_type()) return res @to_maestral_error() def unlink(self): """ Unlinks the Dropbox account and deletes local sync information. """ self.auth.delete_creds() self.dbx.auth_token_revoke() # should only raise auth errors @to_maestral_error(dbx_path_arg=1) def get_metadata(self, dbx_path, **kwargs): """ Gets metadata for an item on Dropbox or returns ``False`` if no metadata is available. Keyword arguments are passed on to Dropbox SDK files_get_metadata call. :param str dbx_path: Path of folder on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_download_to_file. :returns: Metadata of item at the given path. :rtype: :class:`dropbox.files.FileMetadata` | :class:`dropbox.files.FolderMetadata` | bool """ try: md = self.dbx.files_get_metadata(dbx_path, **kwargs) logger.debug(f"Retrieved metadata for '{md.path_display}'") except dropbox.exceptions.ApiError as exc: # DropboxAPI error is only raised when the item does not exist on Dropbox # this is handled on a DEBUG level since we use call `get_metadata` to check # if a file exists logger.debug(f"Could not get metadata for '{dbx_path}': {exc}") md = False return md @to_maestral_error(dbx_path_arg=1) def list_revisions(self, dbx_path, mode="path", limit=10): """ Lists all file revisions for the given file. :param str dbx_path: Path to file on Dropbox. :param str mode: Must be "path" or "id". If "id", specify the Dropbox file ID instead of the file path to get revisions across move and rename events. :param int limit: Maximum number of revisions to list. Defaults to 10. :returns: File revision history. :rtype: :class:`dropbox.files.ListRevisionsResult` """ mode = dropbox.files.ListRevisionsMode(mode) return self.dbx.files_list_revisions(dbx_path, mode=mode, limit=limit) @to_maestral_error(dbx_path_arg=1) def download(self, dbx_path, dst_path, **kwargs): """ Downloads file from Dropbox to our local folder. :param str dbx_path: Path to file on Dropbox. :param str dst_path: Path to local download destination. :param kwargs: Keyword arguments for Dropbox SDK files_download_to_file. :returns: Metadata of downloaded item. :rtype: :class:`dropbox.files.FileMetadata` """ # create local directory if not present dst_path_directory = osp.dirname(dst_path) try: os.makedirs(dst_path_directory) except FileExistsError: pass md = self.dbx.files_download_to_file(dst_path, dbx_path, **kwargs) logger.debug(f"File '{md.path_display}' (rev {md.rev}) " f"was successfully downloaded as '{dst_path}'") return md @to_maestral_error(dbx_path_arg=2) def upload(self, local_path, dbx_path, chunk_size_mb=5, **kwargs): """ Uploads local file to Dropbox. :param str local_path: Path of local file to upload. :param str dbx_path: Path to save file on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_upload. :param int chunk_size_mb: Maximum size for individual uploads in MB. If larger than 150 MB, it will be set to 150 MB. :returns: Metadata of uploaded file. :rtype: :class:`dropbox.files.FileMetadata` """ chunk_size_mb = min(chunk_size_mb, 150) chunk_size = chunk_size_mb * 10**6 # convert to bytes size = osp.getsize(local_path) size_str = bytes_to_str(size) mtime = osp.getmtime(local_path) mtime_dt = datetime.datetime(*time.gmtime(mtime)[:6]) if size <= chunk_size: with open(local_path, "rb") as f: md = self.dbx.files_upload( f.read(), dbx_path, client_modified=mtime_dt, **kwargs ) else: # Note: We currently do not support resuming interrupted uploads. Dropbox # keeps upload sessions open for 48h so this could be done in the future. with open(local_path, "rb") as f: session_start = self.dbx.files_upload_session_start(f.read(chunk_size)) cursor = dropbox.files.UploadSessionCursor( session_id=session_start.session_id, offset=f.tell() ) commit = dropbox.files.CommitInfo( path=dbx_path, client_modified=mtime_dt, **kwargs ) while f.tell() < size: try: if size - f.tell() <= chunk_size: md = self.dbx.files_upload_session_finish( f.read(chunk_size), cursor, commit ) else: self.dbx.files_upload_session_append_v2(f.read(chunk_size), cursor) cursor.offset = f.tell() logger.info(f"Uploading {bytes_to_str(f.tell())}/{size_str}...") except dropbox.exceptions.DropboxException as exc: error = exc.error if (isinstance(error, dropbox.files.UploadSessionFinishError) and error.is_lookup_failed()): session_lookup_error = error.get_lookup_failed() elif isinstance(error, dropbox.files.UploadSessionLookupError): session_lookup_error = error else: raise exc if session_lookup_error.is_incorrect_offset(): o = session_lookup_error.get_incorrect_offset().correct_offset # reset position in file f.seek(o) cursor.offset = f.tell() else: raise exc logger.debug(f"File '{md.path_display}' (rev {md.rev}) uploaded to Dropbox") return md @to_maestral_error(dbx_path_arg=1) def remove(self, dbx_path, **kwargs): """ Removes a file / folder from Dropbox. :param str dbx_path: Path to file on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_delete_v2. :returns: Metadata of deleted item. :rtype: :class:`dropbox.files.FileMetadata` | :class:`dropbox.files.FolderMetadata` """ # try to move file (response will be metadata, probably) res = self.dbx.files_delete_v2(dbx_path, **kwargs) md = res.metadata logger.debug(f"Item '{dbx_path}' removed from Dropbox") return md @to_maestral_error(dbx_path_arg=2) def move(self, dbx_path, new_path, **kwargs): """ Moves / renames files or folders on Dropbox. :param str dbx_path: Path to file/folder on Dropbox. :param str new_path: New path on Dropbox to move to. :param kwargs: Keyword arguments for Dropbox SDK files_move_v2. :returns: Metadata of moved item. :rtype: :class:`dropbox.files.FileMetadata` | :class:`dropbox.files.FolderMetadata` """ res = self.dbx.files_move_v2( dbx_path, new_path, allow_shared_folder=True, allow_ownership_transfer=True, **kwargs ) md = res.metadata logger.debug(f"Item moved from '{dbx_path}' to '{md.path_display}' on Dropbox") return md @to_maestral_error(dbx_path_arg=1) def make_dir(self, dbx_path, **kwargs): """ Creates a folder on Dropbox. :param str dbx_path: Path o fDropbox folder. :param kwargs: Keyword arguments for Dropbox SDK files_create_folder_v2. :returns: Metadata of created folder. :rtype: :class:`dropbox.files.FolderMetadata` """ res = self.dbx.files_create_folder_v2(dbx_path, **kwargs) md = res.metadata logger.debug(f"Created folder '{md.path_display}' on Dropbox") return md @to_maestral_error(dbx_path_arg=1) def get_latest_cursor(self, dbx_path, include_non_downloadable_files=False, **kwargs): """ Gets the latest cursor for the given folder and subfolders. :param str dbx_path: Path of folder on Dropbox. :param bool include_non_downloadable_files: If ``True``, files that cannot be downloaded (at the moment only G-suite files on Dropbox) will be included. :param kwargs: Other keyword arguments for Dropbox SDK files_list_folder. :returns: The latest cursor representing a state of a folder and its subfolders. :rtype: str """ dbx_path = "" if dbx_path == "/" else dbx_path res = self.dbx.files_list_folder_get_latest_cursor( dbx_path, include_non_downloadable_files=include_non_downloadable_files, recursive=True, **kwargs, ) return res.cursor @to_maestral_error(dbx_path_arg=1) def list_folder(self, dbx_path, retry=3, include_non_downloadable_files=False, **kwargs): """ Lists the contents of a folder on Dropbox. :param str dbx_path: Path of folder on Dropbox. :param int retry: Number of times to try again call fails because cursor is reset. :param bool include_non_downloadable_files: If ``True``, files that cannot be downloaded (at the moment only G-suite files on Dropbox) will be included. :param kwargs: Other keyword arguments for Dropbox SDK files_list_folder. :returns: Content of given folder. :rtype: :class:`dropbox.files.ListFolderResult` """ dbx_path = "" if dbx_path == "/" else dbx_path results = [] res = self.dbx.files_list_folder( dbx_path, include_non_downloadable_files=include_non_downloadable_files, **kwargs ) results.append(res) idx = 0 while results[-1].has_more: idx += len(results[-1].entries) logger.info(f"Indexing {idx}...") try: more_results = self.dbx.files_list_folder_continue(results[-1].cursor) results.append(more_results) except dropbox.exceptions.DropboxException as exc: new_exc = api_to_maestral_error(exc, dbx_path) if isinstance(new_exc, CursorResetError) and self._retry_count < retry: # retry up to three times, then raise self._retry_count += 1 self.list_folder(dbx_path, include_non_downloadable_files, **kwargs) else: self._retry_count = 0 raise exc logger.debug(f"Listed contents of folder '{dbx_path}'") self._retry_count = 0 return self.flatten_results(results) @staticmethod def flatten_results(results): """ Flattens a list of :class:`dropbox.files.ListFolderResult` instances to a single instance with the cursor of the last entry in the list. :param list results: List of :class:`dropbox.files.ListFolderResult` instances. :returns: Single :class:`dropbox.files.ListFolderResult` instance. :rtype: :class:`dropbox.files.ListFolderResult` """ entries_all = [] for result in results: entries_all += result.entries results_flattened = dropbox.files.ListFolderResult( entries=entries_all, cursor=results[-1].cursor, has_more=False ) return results_flattened @to_maestral_error() def wait_for_remote_changes(self, last_cursor, timeout=40): """ Waits for remote changes since :param:`last_cursor`. Call this method after starting the Dropbox client and periodically to get the latest updates. :param str last_cursor: Last to cursor to compare for changes. :param int timeout: Seconds to wait until timeout. Must be between 30 and 480. :returns: ``True`` if changes are available, ``False`` otherwise. :rtype: bool """ if not 30 <= timeout <= 480: raise ValueError("Timeout must be in range [30, 480]") logger.debug(f"Waiting for remote changes since cursor:\n{last_cursor}") # honour last request to back off if self._last_longpoll is not None: while time.time() - self._last_longpoll < self._backoff: time.sleep(1) result = self.dbx.files_list_folder_longpoll(last_cursor, timeout=timeout) # keep track of last long poll, back off if requested by SDK if result.backoff: self._backoff = result.backoff + 5 else: self._backoff = 0 logger.debug(f"Detected remote changes: {result.changes}") self._last_longpoll = time.time() return result.changes # will be True or False @to_maestral_error() def list_remote_changes(self, last_cursor): """ Lists changes to remote Dropbox since :param:`last_cursor`. Call this after :method:`wait_for_remote_changes` returns ``True``. :param str last_cursor: Last to cursor to compare for changes. :returns: Remote changes since given cursor. :rtype: :class:`dropbox.files.ListFolderResult` """ results = [self.dbx.files_list_folder_continue(last_cursor)] while results[-1].has_more: more_results = self.dbx.files_list_folder_continue(results[-1].cursor) results.append(more_results) # combine all results into one results = self.flatten_results(results) logger.debug(f"Listed remote changes: {results.entries}") return results
class MaestralApiClient: """Client for the Dropbox SDK. This client defines basic methods to wrap Dropbox Python SDK calls, such as creating, moving, modifying and deleting files and folders on Dropbox and downloading files from Dropbox. All Dropbox SDK exceptions and :class:`OSError`s related to accessing or saving local files will be caught and reraised as :class:`errors.MaestralApiError`s. Connection errors from requests will be caught and reraised as :class:`ConnectionError`. :param str config_name: Name of config file and state file to use. :param int timeout: Timeout for individual requests in sec. Defaults to 60 sec. """ SDK_VERSION = '2.0' _timeout = 60 def __init__(self, config_name='maestral', timeout=_timeout): self.config_name = config_name self._state = MaestralState(config_name) # get Dropbox session self.auth = OAuth2Session(config_name) if not self.auth.load_token(): self.auth.link() self._timeout = timeout self._last_longpoll = None self._backoff = 0 self._retry_count = 0 # initialize API client self.dbx = dropbox.Dropbox(self.auth.access_token, session=SESSION, user_agent=USER_AGENT, timeout=self._timeout) @to_maestral_error() def get_account_info(self, dbid=None): """ Gets current account information. :param str dbid: Dropbox ID of account. If not given, will get the info of the currently linked account. :returns: Account info. :rtype: :class:`dropbox.users.FullAccount` """ if dbid: res = self.dbx.users_get_account(dbid) else: res = self.dbx.users_get_current_account() if not dbid: # save our own account info to config if res.account_type.is_basic(): account_type = 'basic' elif res.account_type.is_business(): account_type = 'business' elif res.account_type.is_pro(): account_type = 'pro' else: account_type = '' self._state.set('account', 'email', res.email) self._state.set('account', 'display_name', res.name.display_name) self._state.set('account', 'abbreviated_name', res.name.abbreviated_name) self._state.set('account', 'type', account_type) return res @to_maestral_error() def get_space_usage(self): """ Gets current account space usage. :returns: :class:`SpaceUsage` instance. :rtype: :class:`SpaceUsage` """ res = self.dbx.users_get_space_usage() # convert from dropbox.users.SpaceUsage to SpaceUsage res.__class__ = SpaceUsage # save results to config self._state.set('account', 'usage', str(res)) self._state.set('account', 'usage_type', res.allocation_type()) return res @to_maestral_error() def unlink(self): """ Unlinks the Dropbox account and deletes local sync information. """ self.auth.delete_creds() self.dbx.auth_token_revoke() # should only raise auth errors @to_maestral_error(dbx_path_arg=1) def get_metadata(self, dbx_path, **kwargs): """ Gets metadata for an item on Dropbox or returns ``False`` if no metadata is available. Keyword arguments are passed on to Dropbox SDK files_get_metadata call. :param str dbx_path: Path of folder on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_download_to_file. :returns: Metadata of item at the given path or ``None``. :rtype: :class:`dropbox.files.Metadata` """ try: return self.dbx.files_get_metadata(dbx_path, **kwargs) except dropbox.exceptions.ApiError: # DropboxAPI error is only raised when the item does not exist on Dropbox # this is handled on a DEBUG level since we use call `get_metadata` to check # if a file exists pass @to_maestral_error(dbx_path_arg=1) def list_revisions(self, dbx_path, mode='path', limit=10): """ Lists all file revisions for the given file. :param str dbx_path: Path to file on Dropbox. :param str mode: Must be 'path' or 'id'. If 'id', specify the Dropbox file ID instead of the file path to get revisions across move and rename events. :param int limit: Maximum number of revisions to list. Defaults to 10. :returns: File revision history. :rtype: :class:`dropbox.files.ListRevisionsResult` """ mode = dropbox.files.ListRevisionsMode(mode) return self.dbx.files_list_revisions(dbx_path, mode=mode, limit=limit) @to_maestral_error(dbx_path_arg=1) def download(self, dbx_path, dst_path, **kwargs): """ Downloads file from Dropbox to our local folder. :param str dbx_path: Path to file on Dropbox. :param str dst_path: Path to local download destination. :param kwargs: Keyword arguments for Dropbox SDK files_download_to_file. :returns: Metadata of downloaded item. :rtype: :class:`dropbox.files.FileMetadata` """ # create local directory if not present dst_path_directory = osp.dirname(dst_path) try: os.makedirs(dst_path_directory) except FileExistsError: pass md, http_resp = self.dbx.files_download(dbx_path, **kwargs) chunksize = 2**16 size_str = bytes_to_str(md.size) downloaded = 0 with open(dst_path, 'wb') as f: with contextlib.closing(http_resp): for c in http_resp.iter_content(chunksize): if md.size > 5 * 10**6: # 5 MB logger.info( f'Downloading {bytes_to_str(downloaded)}/{size_str}...' ) f.write(c) downloaded += chunksize return md @to_maestral_error(dbx_path_arg=2) def upload(self, local_path, dbx_path, chunk_size_mb=5, **kwargs): """ Uploads local file to Dropbox. :param str local_path: Path of local file to upload. :param str dbx_path: Path to save file on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_upload. :param int chunk_size_mb: Maximum size for individual uploads in MB. If larger than 150 MB, it will be set to 150 MB. :returns: Metadata of uploaded file. :rtype: :class:`dropbox.files.FileMetadata` """ chunk_size_mb = clamp(chunk_size_mb, 0.1, 150) chunk_size = chunk_size_mb * 10**6 # convert to bytes size = osp.getsize(local_path) size_str = bytes_to_str(size) mtime = osp.getmtime(local_path) mtime_dt = datetime.datetime(*time.gmtime(mtime)[:6]) if size <= chunk_size: with open(local_path, 'rb') as f: md = self.dbx.files_upload(f.read(), dbx_path, client_modified=mtime_dt, **kwargs) return md else: # Note: We currently do not support resuming interrupted uploads. Dropbox # keeps upload sessions open for 48h so this could be done in the future. with open(local_path, 'rb') as f: session_start = self.dbx.files_upload_session_start( f.read(chunk_size)) cursor = dropbox.files.UploadSessionCursor( session_id=session_start.session_id, offset=f.tell()) commit = dropbox.files.CommitInfo(path=dbx_path, client_modified=mtime_dt, **kwargs) while True: try: if size - f.tell() <= chunk_size: md = self.dbx.files_upload_session_finish( f.read(chunk_size), cursor, commit) return md else: self.dbx.files_upload_session_append_v2( f.read(chunk_size), cursor) cursor.offset = f.tell() logger.info( f'Uploading {bytes_to_str(f.tell())}/{size_str}...' ) except dropbox.exceptions.DropboxException as exc: error = exc.error if (isinstance(error, dropbox.files.UploadSessionFinishError) and error.is_lookup_failed()): session_lookup_error = error.get_lookup_failed() elif isinstance( error, dropbox.files.UploadSessionLookupError): session_lookup_error = error else: raise exc if session_lookup_error.is_incorrect_offset(): o = session_lookup_error.get_incorrect_offset( ).correct_offset # reset position in file f.seek(o) cursor.offset = f.tell() else: raise exc @to_maestral_error(dbx_path_arg=1) def remove(self, dbx_path, **kwargs): """ Removes a file / folder from Dropbox. :param str dbx_path: Path to file on Dropbox. :param kwargs: Keyword arguments for Dropbox SDK files_delete_v2. :returns: Metadata of deleted item. :rtype: :class:`dropbox.files.Metadata` """ # try to remove file (response will be metadata, probably) res = self.dbx.files_delete_v2(dbx_path, **kwargs) md = res.metadata return md @to_maestral_error() def remove_batch(self, dbx_paths, batch_size=900): """ Delete multiple items on Dropbox in a batch job. :param list[str] dbx_paths: List of dropbox paths to delete. :param int batch_size: Number of folders to create in each batch. Dropbox allows batches of up to 1,000 folders. Larger values will be capped automatically. :returns: List of Metadata for created folders or SyncError for failures. Entries will be in the same order as given paths. :rtype: list """ batch_size = clamp(batch_size, 1, 1000) check_interval = round(0.5 + batch_size / 1000, 2) entries = [] result_list = [] # up two ~ 1,000 entries allowed per batch according to # https://www.dropbox.com/developers/reference/data-ingress-guide for chunk in chunks(dbx_paths, n=batch_size): res = self.dbx.files_delete_batch(chunk) if res.is_complete(): batch_res = res.get_complete() entries.extend(batch_res.entries) elif res.is_async_job_id(): async_job_id = res.get_async_job_id() res = self.dbx.files_delete_batch_check(async_job_id) while res.is_in_progress(): time.sleep(check_interval) res = self.dbx.files_delete_batch_check(async_job_id) if res.is_complete(): batch_res = res.get_complete() entries.extend(batch_res.entries) for i, entry in enumerate(entries): if entry.is_success(): result_list.append(entry.get_success().metadata) elif entry.is_failure(): exc = dropbox.exceptions.ApiError( error=entry.get_failure(), user_message_text=None, user_message_locale=None, request_id=None, ) sync_err = dropbox_to_maestral_error(exc, dbx_path=dbx_paths[i]) result_list.append(sync_err) return result_list @to_maestral_error(dbx_path_arg=2) def move(self, dbx_path, new_path, **kwargs): """ Moves / renames files or folders on Dropbox. :param str dbx_path: Path to file/folder on Dropbox. :param str new_path: New path on Dropbox to move to. :param kwargs: Keyword arguments for Dropbox SDK files_move_v2. :returns: Metadata of moved item. :rtype: :class:`dropbox.files.Metadata` """ res = self.dbx.files_move_v2(dbx_path, new_path, allow_shared_folder=True, allow_ownership_transfer=True, **kwargs) md = res.metadata return md @to_maestral_error(dbx_path_arg=1) def make_dir(self, dbx_path, **kwargs): """ Creates a folder on Dropbox. :param str dbx_path: Path of Dropbox folder. :param kwargs: Keyword arguments for Dropbox SDK files_create_folder_v2. :returns: Metadata of created folder. :rtype: :class:`dropbox.files.FolderMetadata` """ res = self.dbx.files_create_folder_v2(dbx_path, **kwargs) md = res.metadata return md @to_maestral_error() def make_dir_batch(self, dbx_paths, batch_size=900, **kwargs): """ Creates multiple folders on Dropbox in a batch job. :param list[str] dbx_paths: List of dropbox folder paths. :param int batch_size: Number of folders to create in each batch. Dropbox allows batches of up to 1,000 folders. Larger values will be capped automatically. :param kwargs: Keyword arguments for Dropbox SDK files_create_folder_batch. :returns: List of Metadata for created folders or SyncError for failures. Entries will be in the same order as given paths. :rtype: list """ batch_size = clamp(batch_size, 1, 1000) check_interval = round(0.5 + batch_size / 1000, 2) entries = [] result_list = [] # up two ~ 1,000 entries allowed per batch according to # https://www.dropbox.com/developers/reference/data-ingress-guide for chunk in chunks(dbx_paths, n=batch_size): res = self.dbx.files_create_folder_batch(chunk, **kwargs) if res.is_complete(): batch_res = res.get_complete() entries.extend(batch_res.entries) elif res.is_async_job_id(): async_job_id = res.get_async_job_id() res = self.dbx.files_create_folder_batch_check(async_job_id) while res.is_in_progress(): time.sleep(check_interval) res = self.dbx.files_create_folder_batch_check( async_job_id) if res.is_complete(): batch_res = res.get_complete() entries.extend(batch_res.entries) elif res.is_failed(): error = res.get_failed() if error.is_too_many_files(): res_list = self.make_dir_batch(chunk, batch_size=round( batch_size / 2), **kwargs) result_list.extend(res_list) for i, entry in enumerate(entries): if entry.is_success(): result_list.append(entry.get_success().metadata) elif entry.is_failure(): exc = dropbox.exceptions.ApiError( error=entry.get_failure(), user_message_text=None, user_message_locale=None, request_id=None, ) sync_err = dropbox_to_maestral_error(exc, dbx_path=dbx_paths[i]) result_list.append(sync_err) return result_list @to_maestral_error(dbx_path_arg=1) def get_latest_cursor(self, dbx_path, include_non_downloadable_files=False, **kwargs): """ Gets the latest cursor for the given folder and subfolders. :param str dbx_path: Path of folder on Dropbox. :param bool include_non_downloadable_files: If ``True``, files that cannot be downloaded (at the moment only G-suite files on Dropbox) will be included. :param kwargs: Other keyword arguments for Dropbox SDK files_list_folder. :returns: The latest cursor representing a state of a folder and its subfolders. :rtype: str """ dbx_path = '' if dbx_path == '/' else dbx_path res = self.dbx.files_list_folder_get_latest_cursor( dbx_path, include_non_downloadable_files=include_non_downloadable_files, recursive=True, **kwargs, ) return res.cursor @to_maestral_error(dbx_path_arg=1) def list_folder(self, dbx_path, retry=3, include_non_downloadable_files=False, **kwargs): """ Lists the contents of a folder on Dropbox. :param str dbx_path: Path of folder on Dropbox. :param int retry: Number of times to try again call fails because cursor is reset. :param bool include_non_downloadable_files: If ``True``, files that cannot be downloaded (at the moment only G-suite files on Dropbox) will be included. :param kwargs: Other keyword arguments for Dropbox SDK files_list_folder. :returns: Content of given folder. :rtype: :class:`dropbox.files.ListFolderResult` """ dbx_path = '' if dbx_path == '/' else dbx_path results = [] res = self.dbx.files_list_folder( dbx_path, include_non_downloadable_files=include_non_downloadable_files, **kwargs) results.append(res) idx = 0 while results[-1].has_more: idx += len(results[-1].entries) logger.info(f'Indexing {idx}...') try: more_results = self.dbx.files_list_folder_continue( results[-1].cursor) results.append(more_results) except dropbox.exceptions.DropboxException as exc: new_exc = dropbox_to_maestral_error(exc, dbx_path) if isinstance(new_exc, CursorResetError) and self._retry_count < retry: # retry up to three times, then raise self._retry_count += 1 self.list_folder(dbx_path, include_non_downloadable_files, **kwargs) else: self._retry_count = 0 raise exc self._retry_count = 0 return self.flatten_results(results) @staticmethod def flatten_results(results): """ Flattens a list of :class:`dropbox.files.ListFolderResult` instances to a single instance with the cursor of the last entry in the list. :param list results: List of :class:`dropbox.files.ListFolderResult` instances. :returns: Single :class:`dropbox.files.ListFolderResult` instance. :rtype: :class:`dropbox.files.ListFolderResult` """ entries_all = [] for result in results: entries_all += result.entries results_flattened = dropbox.files.ListFolderResult( entries=entries_all, cursor=results[-1].cursor, has_more=False) return results_flattened @to_maestral_error() def wait_for_remote_changes(self, last_cursor, timeout=40): """ Waits for remote changes since :param:`last_cursor`. Call this method after starting the Dropbox client and periodically to get the latest updates. :param str last_cursor: Last to cursor to compare for changes. :param int timeout: Seconds to wait until timeout. Must be between 30 and 480. :returns: ``True`` if changes are available, ``False`` otherwise. :rtype: bool """ if not 30 <= timeout <= 480: raise ValueError('Timeout must be in range [30, 480]') # honour last request to back off if self._last_longpoll is not None: while time.time() - self._last_longpoll < self._backoff: time.sleep(1) result = self.dbx.files_list_folder_longpoll(last_cursor, timeout=timeout) # keep track of last long poll, back off if requested by SDK if result.backoff: self._backoff = result.backoff + 5 else: self._backoff = 0 self._last_longpoll = time.time() return result.changes # will be True or False @to_maestral_error() def list_remote_changes(self, last_cursor): """ Lists changes to remote Dropbox since :param:`last_cursor`. Call this after :method:`wait_for_remote_changes` returns ``True``. :param str last_cursor: Last to cursor to compare for changes. :returns: Remote changes since given cursor. :rtype: :class:`dropbox.files.ListFolderResult` """ results = [self.dbx.files_list_folder_continue(last_cursor)] while results[-1].has_more: more_results = self.dbx.files_list_folder_continue( results[-1].cursor) results.append(more_results) # combine all results into one results = self.flatten_results(results) return results
class OAuth2Session: """ OAuth2Session provides OAuth 2 login and token store in the preferred system keyring. To authenticate with Dropbox, run :meth:`get_auth_url` first and direct the user to visit that URL and retrieve an auth token. Verify the provided auth token with :meth:`verify_auth_token` and save it in the system keyring together with the corresponding Dropbox ID by calling :meth:`save_creds`. Supported keyring backends are, in order of preference: * MacOS Keychain * Any keyring implementing the SecretService Dbus specification * KWallet * Gnome Keyring * Plain text storage When the auth flow is completed, a short-lived access token and a long-lived refresh token are generated. Only the long-lived refresh token will be saved in the system keychain for future sessions, it can be used to generate short-lived access tokens as needed. If the auth flow was previously completed before Dropbox migrated to short-lived tokens, the ``token_access_type`` will be 'legacy' and only a long-lived access token will be available. .. warning:: Unlike MacOS Keychain, Gnome Keyring and KWallet do not support app-specific access to passwords. If the user unlocks those keyrings, we and any other application in the same user session get access to *all* saved passwords. :param config_name: Name of maestral config. :cvar int Success: Exit code for successful auth. :cvar int InvalidToken: Exit code for invalid token. :cvar int ConnectionFailed: Exit code for connection errors. """ Success = 0 InvalidToken = 1 ConnectionFailed = 2 default_token_access_type = "offline" _lock = RLock() def __init__(self, config_name: str, app_key: str = DROPBOX_APP_KEY) -> None: self._app_key = app_key self._config_name = config_name self._conf = MaestralConfig(config_name) self._state = MaestralState(config_name) self._auth_flow = DropboxOAuth2FlowNoRedirect( self._app_key, use_pkce=True, token_access_type=self.default_token_access_type, ) self._account_id = self._conf.get("account", "account_id") or None self._token_access_type = (self._state.get( "account", "token_access_type") or None) self.keyring = self._get_keyring_backend() # defer keyring access until token requested by user self.loaded = False self._access_token: Optional[str] = None self._refresh_token: Optional[str] = None self._expires_at: Optional[datetime] = None def _get_keyring_backend(self) -> KeyringBackend: """ Returns the keyring backend currently used. If none is used because we are not yet linked, use the backend specified in the config file (if valid) or choose the most secure of the available and supported keyring backends. """ import keyring.backends keyring_class = self._conf.get("app", "keyring").strip() if self._account_id and keyring_class != "automatic": # we are already linked and have a keyring set try: ring = load_keyring(keyring_class) except Exception as exc: # reset the keyring and prompt to relink # them bomb out with an exception self._conf.set("app", "keyring", "automatic") title = f"Cannot load keyring {keyring_class}" message = "Please relink Maestral to get new access token" new_exc = KeyringAccessError(title, message).with_traceback( exc.__traceback__) logger.error(title, exc_info=_exc_info(new_exc)) raise new_exc else: return ring else: try: ring = load_keyring(keyring_class) except Exception: # get preferred keyring backends for platform available_rings = keyring.backend.get_all_keyring() supported_rings = [ k for k in available_rings if isinstance(k, supported_keyring_backends) ] ring = max(supported_rings, key=lambda x: x.priority) self._conf.set( "app", "keyring", f"{ring.__class__.__module__}.{ring.__class__.__name__}", ) return ring @property def linked(self) -> bool: """Returns ``True`` if we have full auth credentials, ``False`` otherwise.""" if self.account_id: legacy = self._token_access_type == "legacy" and self.access_token offline = self._token_access_type == "offline" and self.refresh_token if legacy or offline: return True return False @property def account_id(self) -> Optional[str]: """Returns the account ID (read only). This call may block until the keyring is unlocked.""" return self._account_id @property def token_access_type(self) -> Optional[str]: """Returns the type of access token. If 'legacy', we have a long-lived access token. If 'offline', we have a short-lived access token with an expiry time and a long-lived refresh token to generate new access tokens.""" with self._lock: if not self.loaded: self.load_token() return self._token_access_type @property def access_token(self) -> Optional[str]: """Returns the access token (read only). This will always be set for a 'legacy' token. For an 'offline' token, this will only be set if we completed the auth flow in the current session. In case of an 'offline' token, use the refresh token to retrieve a short-lived access token through the Dropbox API instead. The call may block until the keyring is unlocked.""" with self._lock: if not self.loaded: self.load_token() return self._access_token @property def refresh_token(self) -> Optional[str]: """Returns the refresh token (read only). This will only be set for an 'offline' token. The call may block until the keyring is unlocked.""" with self._lock: if not self.loaded: self.load_token() return self._refresh_token @property def access_token_expiration(self) -> Optional[datetime]: """Returns the expiry time for the short-lived access token. This will only be set for an 'offline' token and if we completed the flow during the current session.""" # this will only be set if we linked in the current session return self._expires_at def load_token(self) -> None: """ Loads auth token from system keyring. This will be called automatically when accessing of the properties :attr:`linked`, :attr:`access_token`, :attr:`refresh_token` or :attr:`token_access_type`. :raises: :class:`keyring.errors.KeyringLocked` if the system keyring is locked. """ logger.debug(f"Using keyring: {self.keyring}") if not self._account_id: return try: token = self.keyring.get_password("Maestral", self._account_id) access_type = self._state.get("account", "token_access_type") if not access_type: # if no token type was saved, we linked with a version < 1.2.0 # default to legacy token access type access_type = "legacy" self._state.set("account", "token_access_type", access_type) self.loaded = True if token: if access_type == "legacy": self._access_token = token elif access_type == "offline": self._refresh_token = token else: msg = "Invalid token access type in state file." err = RuntimeError( "Invalid token access type in state file.") logger.error(msg, exc_info=_exc_info(err)) raise err self._token_access_type = access_type except KeyringLocked: title = f"Could not load auth token, {self.keyring.name} is locked" msg = "Please unlock the keyring and try again." exc = KeyringAccessError(title, msg) logger.error(title, exc_info=_exc_info(exc)) raise exc def get_auth_url(self) -> str: """ Gets the auth URL to start the OAuth2 implicit grant flow. :returns: Dropbox auth URL. """ authorize_url = self._auth_flow.start() return authorize_url def verify_auth_token(self, token) -> int: """ Verify the provided authorization token with Dropbox servers. :returns: :attr:`Success`, :attr:`InvalidToken`, or :attr:`ConnectionFailed`. """ with self._lock: try: res = self._auth_flow.finish(token) self._access_token = res.access_token self._refresh_token = res.refresh_token self._expires_at = res.expires_at self._account_id = res.account_id self._token_access_type = self.default_token_access_type self.loaded = True return self.Success except requests.exceptions.HTTPError: return self.InvalidToken except CONNECTION_ERRORS: return self.ConnectionFailed def save_creds(self) -> None: """ Saves the auth token to system keyring. Falls back to plain text storage if the user denies access to keyring. """ with self._lock: self._conf.set("account", "account_id", self._account_id) self._state.set("account", "token_access_type", self._token_access_type) if self._token_access_type == "offline": token = self.refresh_token else: token = self.access_token try: self.keyring.set_password("Maestral", self._account_id, token) click.echo(" > Credentials written.") if isinstance(self.keyring, keyrings.alt.file.PlaintextKeyring): click.echo(" > Warning: No supported keyring found, " "Dropbox credentials stored in plain text.") except KeyringLocked: # switch to plain text keyring if user won't unlock self.keyring = keyrings.alt.file.PlaintextKeyring() self._conf.set("app", "keyring", "keyrings.alt.file.PlaintextKeyring") self.save_creds() def delete_creds(self) -> None: """ Deletes auth token from system keyring. :raises: :class:`keyring.errors.KeyringLocked` if the system keyring is locked. """ with self._lock: if not self._account_id: # when keyring.delete_password is called without a username, # it may delete all passwords stored by Maestral on some backends return try: self.keyring.delete_password("Maestral", self._account_id) click.echo(" > Credentials removed.") except KeyringLocked: title = f"Could not delete auth token, {self.keyring.name} is locked" msg = "Please unlock the keyring and try again." exc = KeyringAccessError(title, msg) logger.error(title, exc_info=_exc_info(exc)) raise exc except PasswordDeleteError as exc: # password does not exist in keyring logger.info(exc.args[0]) self._conf.set("account", "account_id", "") self._state.set("account", "token_access_type", "") self._conf.set("app", "keyring", "automatic") self._account_id = None self._access_token = None self._refresh_token = None self._token_access_type = None def __repr__(self) -> str: return (f"<{self.__class__.__name__}(config={self._config_name!r}, " f"account_id={self._account_id})>")