Пример #1
0
class DropboxWriter(FilebaseBaseWriter):
    """
    Writes items to dropbox folder.
    options available

        - access_token (str)
            Oauth access token for Dropbox api.

        - filebase (str)
            Base path to store the items in the share.

    """
    supported_options = {
        'access_token': {
            'type': six.string_types,
            'env_fallback': 'EXPORTERS_DROPBOXWRITER_TOKEN'
        },
    }

    def __init__(self, *args, **kw):
        from dropbox import Dropbox
        super(DropboxWriter, self).__init__(*args, **kw)
        access_token = self.read_option('access_token')
        self.set_metadata('files_counter', Counter())
        self.client = Dropbox(access_token)

    def write(self, dump_path, group_key=None, file_name=False):
        if group_key is None:
            group_key = []
        self._write_file(dump_path, group_key, file_name)

    @retry_long
    def _upload_file(self, input_file, filepath):
        from dropbox import files
        session_id = self.client.files_upload_session_start('')
        current_offset = 0
        while True:
            data = input_file.read(2**20)
            if not data:
                break
            self.client.files_upload_session_append(data,
                                                    session_id.session_id,
                                                    current_offset)
            current_offset += len(data)
        cursor = files.UploadSessionCursor(session_id.session_id,
                                           current_offset)
        self.client.files_upload_session_finish(
            '', cursor, files.CommitInfo(path='{}'.format(filepath)))

    def _write_file(self, dump_path, group_key, file_name=None):
        filebase_path, file_name = self.create_filebase_name(
            group_key, file_name=file_name)
        with open(dump_path, 'r') as f:
            self._upload_file(f, '{}/{}'.format(filebase_path, file_name))
        self.get_metadata('files_counter')[filebase_path] += 1

    def get_file_suffix(self, path, prefix):
        number_of_keys = self.get_metadata('files_counter').get(path, 0)
        suffix = '{}'.format(str(number_of_keys))
        return suffix
Пример #2
0
def _upload_large_file(dbx: dropbox.Dropbox, path: Path,
                       chunk_size: int) -> Iterator[Tuple[int, int]]:
    file_size = path.stat().st_size
    num_chunks = math.ceil(file_size / chunk_size)
    curr_chunk = 0
    yield curr_chunk, num_chunks
    with path.open(mode="rb") as f:
        upload_session_start_result = dbx.files_upload_session_start(
            f.read(chunk_size))
        curr_chunk += 1
        yield curr_chunk, num_chunks
        logger.debug(f"Uploaded chunk {curr_chunk}/{num_chunks} of {path}")
        cursor = dropbox.files.UploadSessionCursor(
            session_id=upload_session_start_result.session_id, offset=f.tell())
        commit = dropbox.files.CommitInfo(
            path=_upload_path(path), mode=dropbox.files.WriteMode("overwrite"))

        while f.tell() < file_size:
            if (file_size - f.tell()) <= chunk_size:
                dbx.files_upload_session_finish(f.read(chunk_size), cursor,
                                                commit)
            else:
                dbx.files_upload_session_append_v2(f.read(chunk_size), cursor)
                cursor.offset = f.tell()
            curr_chunk += 1
            logger.debug(f"Uploaded chunk {curr_chunk}/{num_chunks} of {path}")
            yield curr_chunk, num_chunks
Пример #3
0
class DropboxWriter(FilebaseBaseWriter):
    """
    Writes items to dropbox folder.
    options available

        - access_token (str)
            Oauth access token for Dropbox api.

        - filebase (str)
            Base path to store the items in the share.

    """
    supported_options = {
        'access_token': {'type': six.string_types, 'env_fallback': 'EXPORTERS_DROPBOXWRITER_TOKEN'},
    }

    def __init__(self, *args, **kw):
        from dropbox import Dropbox
        super(DropboxWriter, self).__init__(*args, **kw)
        access_token = self.read_option('access_token')
        self.set_metadata('files_counter', Counter())
        self.client = Dropbox(access_token)

    def write(self, dump_path, group_key=None, file_name=False):
        if group_key is None:
            group_key = []
        self._write_file(dump_path, group_key, file_name)

    @retry_long
    def _upload_file(self, input_file, filepath):
        from dropbox import files
        session_id = self.client.files_upload_session_start('')
        current_offset = 0
        while True:
            data = input_file.read(2**20)
            if not data:
                break
            self.client.files_upload_session_append(data, session_id.session_id, current_offset)
            current_offset += len(data)
        cursor = files.UploadSessionCursor(session_id.session_id, current_offset)
        self.client.files_upload_session_finish(
            '', cursor, files.CommitInfo(path='{}'.format(filepath)))

    def _write_file(self, dump_path, group_key, file_name=None):
        filebase_path, file_name = self.create_filebase_name(group_key, file_name=file_name)
        with open(dump_path, 'r') as f:
            self._upload_file(f, '{}/{}'.format(filebase_path, file_name))
        self.get_metadata('files_counter')[filebase_path] += 1

    def get_file_suffix(self, path, prefix):
        number_of_keys = self.get_metadata('files_counter').get(path, 0)
        suffix = '{}'.format(str(number_of_keys))
        return suffix
Пример #4
0
def upload_dropbox_file_chucks(file_path, file_size):
    dbx = Dropbox(DROPBOX_API_KEY)
    f = open(file_path, 'rb')
    session = dbx.files_upload_session_start(f.read(FILE_CHUNK_SIZE))
    cursor = dbx_files.UploadSessionCursor(session_id=session.session_id,
                                           offset=f.tell())
    commit = dbx_files.CommitInfo(path=file_path)
    while f.tell() < file_size:
        if ((file_size - f.tell()) <= FILE_CHUNK_SIZE):
            dbx.files_upload_session_finish(f.read(FILE_CHUNK_SIZE), cursor,
                                            commit)
        else:
            dbx.files_upload_session_append(f.read(FILE_CHUNK_SIZE),
                                            cursor.session_id, cursor.offset)
            cursor.offset = f.tell()
Пример #5
0
def upload_chunked(dropbox_client: dropbox.Dropbox, local_file_path: str,
                   remote_file_path: str) -> None:
    """ Uploads a file in chucks to Dropbox, allowing it to resume on (connection) failure. """
    logger.info('Dropbox: Syncing file %s', remote_file_path)

    write_mode = dropbox.files.WriteMode.overwrite

    file_handle = open(local_file_path, 'rb')
    file_size = os.path.getsize(local_file_path)

    # Many thanks to https://stackoverflow.com/documentation/dropbox-api/409/uploading-a-file/1927/uploading-a-file-usin
    # g-the-dropbox-python-sdk#t=201610181733061624381
    CHUNK_SIZE = 2 * 1024 * 1024

    # Small uploads should be transfers at one go.
    if file_size <= CHUNK_SIZE:
        dropbox_client.files_upload(file_handle.read(),
                                    remote_file_path,
                                    mode=write_mode)

    # Large uploads can be sent in chunks, by creating a session allowing multiple separate uploads.
    else:
        upload_session_start_result = dropbox_client.files_upload_session_start(
            file_handle.read(CHUNK_SIZE))

        cursor = dropbox.files.UploadSessionCursor(
            session_id=upload_session_start_result.session_id,
            offset=file_handle.tell())
        commit = dropbox.files.CommitInfo(path=remote_file_path,
                                          mode=write_mode)

        # We keep sending the data in chunks, until we reach the last one, then we instruct Dropbox to finish the upload
        # by combining all the chunks sent previously.
        while file_handle.tell() < file_size:
            if (file_size - file_handle.tell()) <= CHUNK_SIZE:
                dropbox_client.files_upload_session_finish(
                    file_handle.read(CHUNK_SIZE), cursor, commit)
            else:
                dropbox_client.files_upload_session_append_v2(
                    file_handle.read(CHUNK_SIZE), cursor)
                cursor.offset = file_handle.tell()

    file_handle.close()
Пример #6
0
def upload_to_dropbox(access_token,
                      dropbox_path,
                      file_path,
                      progress_callback=None):
    dbx = Dropbox(access_token)
    with open(file_path, 'rb') as file:
        chunk = file.read(CHUNK_SIZE)
        offset = len(chunk)

        upload_session = dbx.files_upload_session_start(chunk)
        progress_callback and progress_callback(offset)

        while True:
            chunk = file.read(CHUNK_SIZE)
            if not chunk:
                break
            dbx.files_upload_session_append_v2(
                chunk,
                UploadSessionCursor(
                    upload_session.session_id,
                    offset,
                ),
            )
            offset += len(chunk)
            progress_callback and progress_callback(offset)

        file_metadata = dbx.files_upload_session_finish(
            b'',
            UploadSessionCursor(
                upload_session.session_id,
                offset=offset,
            ),
            CommitInfo(
                dropbox_path,
                # When writing the file it won't overwrite an existing file, just add
                # another file like "filename (2).txt"
                WriteMode('add'),
            ),
        )
        progress_callback and progress_callback(offset)
        return file_metadata.path_display
Пример #7
0
def upload_to_dropbox(access_token, dropbox_path, file_path, progress_callback=None):
    dbx = Dropbox(access_token)
    with open(file_path, 'rb') as file:
        chunk = file.read(CHUNK_SIZE)
        offset = len(chunk)

        upload_session = dbx.files_upload_session_start(chunk)
        progress_callback and progress_callback(offset)

        while True:
            chunk = file.read(CHUNK_SIZE)
            if not chunk:
                break
            dbx.files_upload_session_append_v2(
                chunk,
                UploadSessionCursor(
                    upload_session.session_id,
                    offset,
                ),
            )
            offset += len(chunk)
            progress_callback and progress_callback(offset)

        file_metadata = dbx.files_upload_session_finish(
            b'',
            UploadSessionCursor(
                upload_session.session_id,
                offset=offset,
            ),
            CommitInfo(
                dropbox_path,
                # When writing the file it won't overwrite an existing file, just add
                # another file like "filename (2).txt"
                WriteMode('add'),
            ),
        )
        progress_callback and progress_callback(offset)
        return file_metadata.path_display
Пример #8
0
class DPBXBackend(duplicity.backend.Backend):
    """Connect to remote store using Dr*pB*x service"""
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.api_account = None
        self.api_client = None
        self.auth_flow = None

        self.login()

    def user_authenticated(self):
        try:
            account = self.api_client.users_get_current_account()
            log.Debug("User authenticated as ,%s" % account)
            return True
        except:
            log.Debug('User not authenticated')
            return False

    def load_access_token(self):
        return os.environ.get('DPBX_ACCESS_TOKEN', None)

    def save_access_token(self, access_token):
        raise BackendException(
            'dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' %
            access_token)

    def obtain_access_token(self):
        log.Info("dpbx: trying to obtain access token")
        for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
            if env_var not in os.environ:
                raise BackendException(
                    'dpbx: %s environment variable not set' % env_var)

        app_key = os.environ['DPBX_APP_KEY']
        app_secret = os.environ['DPBX_APP_SECRET']

        if not sys.stdout.isatty() or not sys.stdin.isatty():
            log.FatalError(
                'dpbx error: cannot interact, but need human attention',
                log.ErrorCode.backend_command_error)

        auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
        log.Debug('dpbx,auth_flow.start()')
        authorize_url = auth_flow.start()
        print
        print '-' * 72
        print "1. Go to: " + authorize_url
        print "2. Click \"Allow\" (you might have to log in first)."
        print "3. Copy the authorization code."
        print '-' * 72
        auth_code = raw_input("Enter the authorization code here: ").strip()
        try:
            log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
            authresult = auth_flow.finish(auth_code)
        except Exception as e:
            raise BackendException('dpbx: Unable to obtain access token: %s' %
                                   e)
        log.Info("dpbx: Authentication successfull")
        self.save_access_token(authresult.access_token)

    def login(self):
        if self.load_access_token() is None:
            self.obtain_access_token()

        self.api_client = Dropbox(self.load_access_token())
        self.api_account = None
        try:
            log.Debug('dpbx,users_get_current_account([token])')
            self.api_account = self.api_client.users_get_current_account()
            log.Debug("dpbx,%s" % self.api_account)

        except (BadInputError, AuthError) as e:
            log.Debug('dpbx,exception: %s' % e)
            log.Info(
                "dpbx: Authentication failed. Trying to obtain new access token"
            )

            self.obtain_access_token()

            # We're assuming obtain_access_token will throw exception.
            # So this line should not be reached
            raise BackendException(
                "dpbx: Please update DPBX_ACCESS_TOKEN and try again")

        log.Info("dpbx: Successfully authenticated as %s" %
                 self.api_account.name.display_name)

    def _error_code(self, operation, e):
        if isinstance(e, ApiError):
            err = e.error

            if isinstance(err, GetMetadataError) and err.is_path():
                if err.get_path().is_not_found():
                    return log.ErrorCode.backend_not_found
            elif isinstance(err, DeleteError) and err.is_path_lookup():
                lookup = e.error.get_path_lookup()
                if lookup.is_not_found():
                    return log.ErrorCode.backend_not_found

    @command()
    def _put(self, source_path, remote_filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        file_size = os.path.getsize(source_path.name)
        progress.report_transfer(0, file_size)

        if file_size < DPBX_UPLOAD_CHUNK_SIZE:
            # Upload whole file at once to avoid extra server request
            res_metadata = self.put_file_small(source_path, remote_path)
        else:
            res_metadata = self.put_file_chunked(source_path, remote_path)

        # A few sanity checks
        if res_metadata.path_display != remote_path:
            raise BackendException(
                'dpbx: result path mismatch: %s (expected: %s)' %
                (res_metadata.path_display, remote_path))
        if res_metadata.size != file_size:
            raise BackendException(
                'dpbx: result size mismatch: %s (expected: %s)' %
                (res_metadata.size, file_size))

    def put_file_small(self, source_path, remote_path):
        if not self.user_authenticated():
            self.login()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            log.Debug('dpbx,files_upload(%s, [%d bytes])' %
                      (remote_path, file_size))

            res_metadata = self.api_client.files_upload(
                f.read(),
                remote_path,
                mode=WriteMode.overwrite,
                autorename=False,
                client_modified=None,
                mute=True)
            log.Debug('dpbx,files_upload(): %s' % res_metadata)
            progress.report_transfer(file_size, file_size)
            return res_metadata
        finally:
            f.close()

    def put_file_chunked(self, source_path, remote_path):
        if not self.user_authenticated():
            self.login()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            buf = f.read(DPBX_UPLOAD_CHUNK_SIZE)
            log.Debug(
                'dpbx,files_upload_session_start([%d bytes]), total: %d' %
                (len(buf), file_size))
            upload_sid = self.api_client.files_upload_session_start(buf)
            log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid)
            upload_cursor = UploadSessionCursor(upload_sid.session_id,
                                                f.tell())
            commit_info = CommitInfo(remote_path,
                                     mode=WriteMode.overwrite,
                                     autorename=False,
                                     client_modified=None,
                                     mute=True)
            res_metadata = None
            progress.report_transfer(f.tell(), file_size)

            requested_offset = None
            current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
            retry_number = globals.num_retries
            is_eof = False

            # We're doing our own error handling and retrying logic because
            # we can benefit from Dpbx chunked upload and retry only failed
            # chunk
            while not is_eof or not res_metadata:
                try:
                    if requested_offset is not None:
                        upload_cursor.offset = requested_offset

                    if f.tell() != upload_cursor.offset:
                        f.seek(upload_cursor.offset)
                    buf = f.read(current_chunk_size)

                    is_eof = f.tell() >= file_size
                    if not is_eof and len(buf) == 0:
                        continue

                    # reset temporary status variables
                    requested_offset = None
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
                    retry_number = globals.num_retries

                    if not is_eof:
                        assert len(buf) != 0
                        log.Debug(
                            'dpbx,files_upload_sesssion_append([%d bytes], offset=%d)'
                            % (len(buf), upload_cursor.offset))
                        self.api_client.files_upload_session_append(
                            buf, upload_cursor.session_id,
                            upload_cursor.offset)
                    else:
                        log.Debug(
                            'dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)'
                            % (len(buf), upload_cursor.offset))
                        res_metadata = self.api_client.files_upload_session_finish(
                            buf, upload_cursor, commit_info)

                    upload_cursor.offset = f.tell()
                    log.Debug('progress: %d of %d' %
                              (upload_cursor.offset, file_size))
                    progress.report_transfer(upload_cursor.offset, file_size)
                except ApiError as e:
                    error = e.error
                    if isinstance(error, UploadSessionLookupError
                                  ) and error.is_incorrect_offset():
                        # Server reports that we should send another chunk.
                        # Most likely this is caused by network error during
                        # previous upload attempt. In such case we'll get
                        # expected offset from server and it's enough to just
                        # seek() and retry again
                        new_offset = error.get_incorrect_offset(
                        ).correct_offset
                        log.Debug(
                            'dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)'
                            % (upload_cursor.offset, new_offset))
                        if requested_offset is not None:
                            # chunk failed even after seek attempt. Something
                            # strange and no safe way to recover
                            raise BackendException(
                                "dpbx: unable to chunk upload")
                        else:
                            # will seek and retry
                            requested_offset = new_offset
                        continue
                    raise
                except ConnectionError as e:
                    log.Debug('dpbx,files_upload_session_append: %s' % e)

                    retry_number -= 1

                    if not self.user_authenticated():
                        self.login()

                    if retry_number == 0:
                        raise

                    # We don't know for sure, was partial upload successful or
                    # not. So it's better to retry smaller amount to avoid extra
                    # reupload
                    log.Info('dpbx: sleeping a bit before chunk retry')
                    time.sleep(30)
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
                    requested_offset = None
                    continue

            if f.tell() != file_size:
                raise BackendException('dpbx: something wrong')

            log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata)
            progress.report_transfer(f.tell(), file_size)

            return res_metadata

        finally:
            f.close()

    @command()
    def _get(self, remote_filename, local_path):
        if not self.user_authenticated():
            self.login()

        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        log.Debug('dpbx,files_download(%s)' % remote_path)
        res_metadata, http_fd = self.api_client.files_download(remote_path)
        log.Debug('dpbx,files_download(%s): %s, %s' %
                  (remote_path, res_metadata, http_fd))
        file_size = res_metadata.size
        to_fd = None
        progress.report_transfer(0, file_size)
        try:
            to_fd = local_path.open('wb')
            for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE):
                to_fd.write(c)
                progress.report_transfer(to_fd.tell(), file_size)

        finally:
            if to_fd:
                to_fd.close()
            http_fd.close()

        # It's different from _query() check because we're not querying metadata
        # again. Since this check is free, it's better to have it here
        local_size = os.path.getsize(local_path.name)
        if local_size != file_size:
            raise BackendException("dpbx: wrong file size: %d (expected: %d)" %
                                   (local_size, file_size))

        local_path.setdata()

    @command()
    def _list(self):
        # Do a long listing to avoid connection reset
        if not self.user_authenticated():
            self.login()
        remote_dir = '/' + urllib.unquote(
            self.parsed_url.path.lstrip('/')).rstrip()

        log.Debug('dpbx.files_list_folder(%s)' % remote_dir)
        res = []
        try:
            resp = self.api_client.files_list_folder(remote_dir)
            log.Debug('dpbx.list(%s): %s' % (remote_dir, resp))

            while True:
                res.extend([entry.name for entry in resp.entries])
                if not resp.has_more:
                    break
                resp = self.api_client.files_list_folder_continue(resp.cursor)
        except ApiError as e:
            if (isinstance(e.error, ListFolderError) and e.error.is_path()
                    and e.error.get_path().is_not_found()):
                log.Debug('dpbx.list(%s): ignore missing folder (%s)' %
                          (remote_dir, e))
            else:
                raise

        # Warn users of old version dpbx about automatically renamed files
        self.check_renamed_files(res)

        return res

    @command()
    def _delete(self, filename):
        if not self.user_authenticated():
            self.login()

        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_delete(%s)' % remote_path)
        self.api_client.files_delete(remote_path)

        # files_permanently_delete seems to be better for backup purpose
        # but it's only available for Business accounts
        # self.api_client.files_permanently_delete(remote_path)

    @command()
    def _close(self):
        """close backend session? no! just "flush" the data"""
        log.Debug('dpbx.close():')

    @command()
    def _query(self, filename):
        if not self.user_authenticated():
            self.login()
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_get_metadata(%s)' % remote_path)
        info = self.api_client.files_get_metadata(remote_path)
        log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info))
        return {'size': info.size}

    def check_renamed_files(self, file_list):
        if not self.user_authenticated():
            self.login()
        bad_list = [
            x for x in file_list
            if DPBX_AUTORENAMED_FILE_RE.search(x) is not None
        ]
        if len(bad_list) == 0:
            return
        log.Warn('-' * 72)
        log.Warn(
            'Warning! It looks like there are automatically renamed files on backend'
        )
        log.Warn(
            'They were probably created when using older version of duplicity.'
        )
        log.Warn('')
        log.Warn(
            'Please check your backup consistency. Most likely you will need to choose'
        )
        log.Warn(
            'largest file from duplicity-* (number).gpg and remove brackets from its name.'
        )
        log.Warn('')
        log.Warn(
            'These files are not managed by duplicity at all and will not be')
        log.Warn('removed/rotated automatically.')
        log.Warn('')
        log.Warn('Affected files:')
        for x in bad_list:
            log.Warn('\t%s' % x)
        log.Warn('')
        log.Warn('In any case it\'s better to create full backup.')
        log.Warn('-' * 72)
Пример #9
0
def upload(dropbox_helper_id, access_token, size, max_retries):
    from .models import DropboxUploadHelper
    helper = DropboxUploadHelper.objects.get(id=dropbox_helper_id)
    dbx = Dropbox(access_token)

    try:
        with open(helper.src, 'rb') as f:
            chunk = f.read(CHUNK_SIZE)
            offset = len(chunk)

            upload_session = dbx.files_upload_session_start(chunk)

            while True:
                chunk = f.read(CHUNK_SIZE)
                if not chunk:
                    break
                helper.progress = offset / size
                helper.save()
                dbx.files_upload_session_append_v2(
                    chunk,
                    UploadSessionCursor(
                        upload_session.session_id,
                        offset,
                    ),
                )
                offset += len(chunk)

            file_metadata = dbx.files_upload_session_finish(
                b'',
                UploadSessionCursor(
                    upload_session.session_id,
                    offset=offset,
                ),
                CommitInfo(
                    '/{}'.format(os.path.basename(helper.src)),
                    # When writing the file it won't overwrite an existing file, just add
                    # another file like "filename (2).txt"
                    WriteMode('add'),
                ),
            )
    except Exception as e:
        helper.failure_reason = str(e)
        helper.save()

    couch_user = CouchUser.get_by_username(helper.user.username)
    if helper.failure_reason is None:
        path_link_metadata = dbx.sharing_create_shared_link_with_settings(
            file_metadata.path_display,
            SharedLinkSettings(
                requested_visibility=RequestedVisibility.team_only, ),
        )
        context = {
            'share_url':
            path_link_metadata.url,
            'path':
            os.path.join(
                u'Apps',
                settings.DROPBOX_APP_NAME,
                path_link_metadata.name,
            )
        }
        with localize(couch_user.get_language_code()):
            subject = _(u'{} has been uploaded to dropbox!'.format(
                helper.dest))
            html_content = render_to_string(
                'dropbox/emails/upload_success.html', context)
            text_content = render_to_string(
                'dropbox/emails/upload_success.txt', context)
    else:
        context = {'reason': helper.failure_reason, 'path': helper.dest}
        with localize(couch_user.get_language_code()):
            subject = _(u'{} has failed to upload to dropbox'.format(
                helper.dest))
            html_content = render_to_string('dropbox/emails/upload_error.html',
                                            context)
            text_content = render_to_string('dropbox/emails/upload_error.txt',
                                            context)

    send_HTML_email(
        subject,
        helper.user.email,
        html_content,
        text_content=text_content,
    )
Пример #10
0
class DPBXBackend(duplicity.backend.Backend):
    """Connect to remote store using Dr*pB*x service"""

    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.api_account = None
        self.api_client = None
        self.auth_flow = None

        self.login()

    def load_access_token(self):
        return os.environ.get('DPBX_ACCESS_TOKEN', None)

    def save_access_token(self, access_token):
        raise BackendException('dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' % access_token)

    def obtain_access_token(self):
        log.Info("dpbx: trying to obtain access token")
        for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
            if env_var not in os.environ:
                raise BackendException('dpbx: %s environment variable not set' % env_var)

        app_key = os.environ['DPBX_APP_KEY']
        app_secret = os.environ['DPBX_APP_SECRET']

        if not sys.stdout.isatty() or not sys.stdin.isatty():
            log.FatalError('dpbx error: cannot interact, but need human attention', log.ErrorCode.backend_command_error)

        auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
        log.Debug('dpbx,auth_flow.start()')
        authorize_url = auth_flow.start()
        print
        print '-' * 72
        print "1. Go to: " + authorize_url
        print "2. Click \"Allow\" (you might have to log in first)."
        print "3. Copy the authorization code."
        print '-' * 72
        auth_code = raw_input("Enter the authorization code here: ").strip()
        try:
            log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
            access_token, _ = auth_flow.finish(auth_code)
        except Exception as e:
            raise BackendException('dpbx: Unable to obtain access token: %s' % e)
        log.Info("dpbx: Authentication successfull")
        self.save_access_token(access_token)

    def login(self):
        if self.load_access_token() is None:
            self.obtain_access_token()

        self.api_client = Dropbox(self.load_access_token())
        self.api_account = None
        try:
            log.Debug('dpbx,users_get_current_account([token])')
            self.api_account = self.api_client.users_get_current_account()
            log.Debug("dpbx,%s" % self.api_account)

        except (BadInputError, AuthError) as e:
            log.Debug('dpbx,exception: %s' % e)
            log.Info("dpbx: Authentication failed. Trying to obtain new access token")

            self.obtain_access_token()

            # We're assuming obtain_access_token will throw exception. So this line should not be reached
            raise BackendException("dpbx: Please update DPBX_ACCESS_TOKEN and try again")

        log.Info("dpbx: Successfully authenticated as %s" % self.api_account.name.display_name)

    def _error_code(self, operation, e):
        if isinstance(e, ApiError):
            err = e.error

            if isinstance(err, GetMetadataError) and err.is_path():
                if err.get_path().is_not_found():
                    return log.ErrorCode.backend_not_found
            elif isinstance(err, DeleteError) and err.is_path_lookup():
                lookup = e.error.get_path_lookup()
                if lookup.is_not_found():
                    return log.ErrorCode.backend_not_found

    @command()
    def _put(self, source_path, remote_filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            progress.report_transfer(0, file_size)
            buf = f.read(DPBX_UPLOAD_CHUNK_SIZE)
            log.Debug('dpbx,files_upload_session_start([%d bytes]), total: %d' % (len(buf), file_size))
            upload_sid = self.api_client.files_upload_session_start(buf)
            log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid)
            upload_cursor = UploadSessionCursor(upload_sid.session_id, f.tell())
            commit_info = CommitInfo(remote_path, mode=WriteMode.overwrite, autorename=False, client_modified=None, mute=True)
            res_metadata = None
            progress.report_transfer(f.tell(), file_size)

            requested_offset = None
            current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
            retry_number = globals.num_retries

            # We're doing our own error handling and retrying logic because
            # we can benefit from Dpbx chunked upload and retry only failed chunk
            while (f.tell() < file_size) or not res_metadata:
                try:
                    if requested_offset is not None:
                        upload_cursor.offset = requested_offset

                    if f.tell() != upload_cursor.offset:
                        f.seek(upload_cursor.offset)
                    buf = f.read(current_chunk_size)

                    # reset temporary status variables
                    requested_offset = None
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
                    retry_number = globals.num_retries

                    if len(buf) != 0:
                        log.Debug('dpbx,files_upload_sesssion_append([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset))
                        self.api_client.files_upload_session_append(buf, upload_cursor.session_id, upload_cursor.offset)
                    else:
                        log.Debug('dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset))
                        res_metadata = self.api_client.files_upload_session_finish(buf, upload_cursor, commit_info)

                    upload_cursor.offset = f.tell()
                    log.Debug('progress: %d of %d' % (upload_cursor.offset, file_size))
                    progress.report_transfer(upload_cursor.offset, file_size)
                except ApiError as e:
                    error = e.error
                    if isinstance(error, UploadSessionLookupError) and error.is_incorrect_offset():
                        # Server reports that we should send another chunk. Most likely this is caused by
                        # network error during previous upload attempt. In such case we'll get expected offset
                        # from server and it's enough to just seek() and retry again
                        new_offset = error.get_incorrect_offset().correct_offset
                        log.Debug('dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)' % (upload_cursor.offset, new_offset))
                        if requested_offset is not None:
                            # chunk failed even after seek attempt. Something strange and no safe way to recover
                            raise BackendException("dpbx: unable to chunk upload")
                        else:
                            # will seek and retry
                            requested_offset = new_offset
                        continue
                    raise
                except ConnectionError as e:
                    log.Debug('dpbx,files_upload_session_append: %s' % e)

                    retry_number -= 1
                    if retry_number == 0:
                        raise

                    # We don't know for sure, was partial upload successfull or not. So it's better to retry smaller amount to avoid extra reupload
                    log.Info('dpbx: sleeping a bit before chunk retry')
                    time.sleep(30)
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
                    requested_offset = None
                    continue

            if f.tell() != file_size:
                raise BackendException('dpbx: something wrong')

            log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata)
            progress.report_transfer(f.tell(), file_size)

            # A few sanity checks
            if res_metadata.path_display != remote_path:
                raise BackendException('dpbx: result path mismatch: %s (expected: %s)' % (res_metadata.path_display, remote_path))
            if res_metadata.size != file_size:
                raise BackendException('dpbx: result size mismatch: %s (expected: %s)' % (res_metadata.size, file_size))

        finally:
            f.close()

    @command()
    def _get(self, remote_filename, local_path):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        log.Debug('dpbx,files_download(%s)' % remote_path)
        res_metadata, http_fd = self.api_client.files_download(remote_path)
        log.Debug('dpbx,files_download(%s): %s, %s' % (remote_path, res_metadata, http_fd))
        file_size = res_metadata.size
        to_fd = None
        progress.report_transfer(0, file_size)
        try:
            to_fd = local_path.open('wb')
            for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE):
                to_fd.write(c)
                progress.report_transfer(to_fd.tell(), file_size)

        finally:
            if to_fd:
                to_fd.close()
            http_fd.close()

        # It's different from _query() check because we're not querying metadata again.
        # Since this check is free, it's better to have it here
        local_size = os.path.getsize(local_path.name)
        if local_size != file_size:
            raise BackendException("dpbx: wrong file size: %d (expected: %d)" % (local_size, file_size))

        local_path.setdata()

    @command()
    def _list(self):
        # Do a long listing to avoid connection reset
        remote_dir = '/' + urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()

        log.Debug('dpbx.files_list_folder(%s)' % remote_dir)
        resp = self.api_client.files_list_folder(remote_dir)
        log.Debug('dpbx.list(%s): %s' % (remote_dir, resp))

        res = []
        while True:
            res.extend([entry.name for entry in resp.entries])
            if not resp.has_more:
                break
            resp = self.api_client.files_list_folder_continue(resp.cursor)

        # Warn users of old version dpbx about automatically renamed files
        self.check_renamed_files(res)

        return res

    @command()
    def _delete(self, filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_delete(%s)' % remote_path)
        self.api_client.files_delete(remote_path)

        # files_permanently_delete seems to be better for backup purpose
        # but it's only available for Business accounts
        # self.api_client.files_permanently_delete(remote_path)

    @command()
    def _close(self):
        """close backend session? no! just "flush" the data"""
        log.Debug('dpbx.close():')

    @command()
    def _query(self, filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_get_metadata(%s)' % remote_path)
        info = self.api_client.files_get_metadata(remote_path)
        log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info))
        return {'size': info.size}

    def check_renamed_files(self, file_list):
        bad_list = [x for x in file_list if DPBX_AUTORENAMED_FILE_RE.search(x) is not None]
        if len(bad_list) == 0:
            return
        log.Warn('-' * 72)
        log.Warn('Warning! It looks like there are automatically renamed files on backend')
        log.Warn('They were probably created when using older version of duplicity.')
        log.Warn('')
        log.Warn('Please check your backup consistency. Most likely you will need to choose')
        log.Warn('largest file from duplicity-* (number).gpg and remove brackets from its name.')
        log.Warn('')
        log.Warn('These files are not managed by duplicity at all and will not be')
        log.Warn('removed/rotated automatically.')
        log.Warn('')
        log.Warn('Affected files:')
        for x in bad_list:
            log.Warn('\t%s' % x)
        log.Warn('')
        log.Warn('In any case it\'s better to create full backup.')
        log.Warn('-' * 72)
Пример #11
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""
    location = setting('DROPBOX_ROOT_PATH', '/')
    oauth2_access_token = setting('DROPBOX_OAUTH2_TOKEN')
    timeout = setting('DROPBOX_TIMEOUT', _DEFAULT_TIMEOUT)
    write_mode = setting('DROPBOX_WRITE_MODE', _DEFAULT_MODE)

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=oauth2_access_token, root_path=location, timeout=timeout,
                 write_mode=write_mode):
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure an auth token at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")

        self.root_path = root_path
        self.write_mode = write_mode
        self.client = Dropbox(oauth2_access_token, timeout=timeout)

    def _full_path(self, name):
        if name == '/':
            name = ''
        
        # If the machine is windows do not append the drive letter to file path
        if os.name == 'nt':
            final_path = os.path.join(self.root_path, name).replace('\\', '/')
            
            # Separator on linux system
            sep = '//'
            base_path = self.root_path

            if (not os.path.normcase(final_path).startswith(os.path.normcase(base_path + sep)) and
                    os.path.normcase(final_path) != os.path.normcase(base_path) and
                    os.path.dirname(os.path.normcase(base_path)) != os.path.normcase(base_path)):
                raise SuspiciousFileOperation(
                    'The joined path ({}) is located outside of the base path '
                    'component ({})'.format(final_path, base_path))
            
            return final_path
        
        else:
            return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)

        if full_path == '/':
            full_path = ''

        metadata = self.client.files_list_folder(full_path)
        for entry in metadata.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.client_modified

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name), mode=WriteMode(self.write_mode))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE)
        )
        cursor = UploadSessionCursor(
            session_id=upload_session.session_id,
            offset=content.tell()
        )
        commit = CommitInfo(path=dest_path, mode=WriteMode(self.write_mode))

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit
                )
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor
                )
                cursor.offset = content.tell()

    def get_available_name(self, name, max_length=None):
        """Overwrite existing file with the same name."""
        name = self._full_path(name)
        if self.write_mode == 'overwrite':
            return get_available_overwrite_name(name, max_length)
        return super().get_available_name(name, max_length)
Пример #12
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting('DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        metadata = self.client.files_get_metadata(full_path)
        for entry in metadata['contents']:
            entry['path'] = entry['path'].replace(full_path, '', 1)
            entry['path'] = entry['path'].replace('/', '', 1)
            if entry['is_dir']:
                directories.append(entry['path'])
            else:
                files.append(entry['path'])
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata['bytes']

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT)
        return mod_time

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT)
        return acc_time

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE)
        )
        cursor = UploadSessionCursor(
            session_id=upload_session.session_id,
            offset=content.tell()
        )
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit
                )
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor
                )
                cursor.offset = content.tell()
Пример #13
0
class DropboxConnection(object):
    class Parameters(object):
        def __init__(self, auth_token, remote_dir_path):
            self._auth_token = auth_token
            self._remote_dir_path = remote_dir_path

        @property
        def auth_token(self):
            return self._auth_token

        @property
        def remote_dir_path(self):
            return constants.DROPBOX_APP_PATH_PREFIX / self._remote_dir_path

        def __str__(self):
            return """
                        remote_dir_path: {}
                   """.format(self.remote_dir_path)

    @classmethod
    def get_client_from_params(cls, dropbox_parameters):
        #Dropbox connection placeholder
        dropbox = None

        if dropbox_parameters:
            dropbox_params = DropboxConnection.Parameters(
                dropbox_parameters[0], dropbox_parameters[1])
            dropbox = DropboxConnection(dropbox_params)

        return dropbox

    @classmethod
    def get_client(cls, auth_token, remote_dir_path):
        #Initialize the paramters
        params = DropboxConnection.Parameters(auth_token, remote_dir_path)

        #Create dropbox client
        client = DropboxConnection(params)

        return client

    def __init__(self, params):
        #Required parameters
        self._params = params

        #Derived parameters
        self._client = Dropbox(self._params.auth_token)

        #Logging
        self._logger = logging.get_logger(__name__)

    def upload(self, source_file_path):
        """It upload the source files to the dropbox.

        Arguments:
            source_file_path {string} -- The source file path.

        Raises:
            ValueError -- It raise value error for invalid files.
        """

        #Validate parameters
        if not source_file_path:
            raise ValueError("Invalid source file path")

        with open(source_file_path, 'rb') as handle:
            #Source file name
            source_file_name = Path(source_file_path).name

            #Remote path
            remote_file_path = (self._params.remote_dir_path /
                                source_file_name).as_posix()

            #File size
            upload_size = path.getsize(source_file_path)

            #Upload the files based on the upload size
            if upload_size <= constants.DROPBOX_CHUNK_SIZE:
                self._logger.info(
                    'Preparing to upload small file: %s with size: %d to: %s',
                    source_file_path, upload_size, remote_file_path)

                self._upload_small_file(handle, remote_file_path)
            else:
                self._logger.info(
                    'Preparing to upload large file: %s with size: %d to: %s',
                    source_file_path, upload_size, remote_file_path)

                self._upload_large_file(handle, upload_size, remote_file_path)

            self._logger.info('Uploaded: %s', source_file_path)

    def _upload_small_file(self, handle, remote_file_path):
        """It uploads a small source files to the dropbox.

        Arguments:
            handle {A File handle} -- The source file handle.
            remote_file_path {string} -- The destination path of the file.
        """
        self._client.files_upload(handle.read(),
                                  remote_file_path,
                                  mode=Dropbox_WriteMode.overwrite)

    def _upload_large_file(self, handle, upload_size, remote_file_path):
        """It uploads a large source files to the dropbox.

        Arguments:
            handle {A File handle} -- The source file handle.
            upload_size {int} -- The number of bytes to be uploaded.
            remote_file_path {string} -- The destination path of the file.
        """
        #Upload session
        session = self._client.files_upload_session_start(
            handle.read(constants.DROPBOX_CHUNK_SIZE))
        cursor = Dropbox_UploadSessionCursor(session_id=session.session_id,
                                             offset=handle.tell())

        #Upload look
        with tqdm(desc='Uploading: {}'.format(remote_file_path),
                  total=upload_size) as pbar:
            #Update the progress bar for the session start reads
            pbar.update(handle.tell())

            while handle.tell() < upload_size:
                #Calculate remaining bytes
                remaining_bytes = upload_size - handle.tell()

                #If it is the last chunk, finalize the upload
                if remaining_bytes <= constants.DROPBOX_CHUNK_SIZE:
                    #Commit info
                    commit = Dropbox_CommitInfo(
                        path=remote_file_path,
                        mode=Dropbox_WriteMode.overwrite)

                    #Finish upload
                    self._client.files_upload_session_finish(
                        handle.read(remaining_bytes), cursor, commit)

                    #Update progress
                    pbar.update(remaining_bytes)
                #More than chunk size remaining to upload
                else:
                    self._client.files_upload_session_append_v2(
                        handle.read(constants.DROPBOX_CHUNK_SIZE), cursor)

                    #Update the cursor
                    cursor.offset = handle.tell()

                    #Update the progress
                    pbar.update(constants.DROPBOX_CHUNK_SIZE)

                #Refresh the progress bar
                pbar.refresh()

    def download(self, remote_file_path):
        """It downloads the remote files from the dropbox.

        Arguments:
            remote_file_path {Path} -- The path to the remote file.

        Raises:
            ValueError -- It raise value error for invalid file name.
        """
        #Validate parameters
        if not remote_file_path:
            raise ValueError("Invalid remote file path")

        #Destination file path
        dest_file_path = remote_file_path

        #Full remote file path
        remote_file_path = self._params.remote_dir_path / remote_file_path

        #Download file size placeholder
        download_size = 0

        try:
            download_size = self._client.files_get_metadata(
                remote_file_path.as_posix()).size
        except ApiError as e:
            raise FileNotFoundError(
                'File: {} is not found'.format(remote_file_path.as_posix()), e)

        self._logger.info('Preparing file download: %s with size: %d to: %s',
                          remote_file_path, download_size, dest_file_path)

        #Download the file
        self._download_file(dest_file_path, remote_file_path, download_size)

        self._logger.info('Completed the file download: %s to: %s',
                          remote_file_path, dest_file_path)

    def _download_file(self, dest_file_path, remote_file_path, download_size):
        """It downloads the remote files from the dropbox.

        Arguments:
            remote_file_path {A Path object} -- The path of the remote file.
            dest_file_path {string} -- The destination file path.
            download_size {int} -- The number of bytes to be downloaded.
        """
        #Download
        _, result = self._client.files_download(remote_file_path.as_posix())

        #Temporary dest_file_name
        tmp_dest_file_path = "{}.tmp".format(dest_file_path)

        with open(tmp_dest_file_path, 'wb') as handle:
            with tqdm(desc='Downloading: {}'.format(
                    remote_file_path.as_posix()),
                      total=download_size) as pbar:
                for bytes_read in result.iter_content(
                        constants.DROPBOX_CHUNK_SIZE):
                    handle.write(bytes_read)

                    #Update the progress
                    pbar.update(len(bytes_read))

        if Path(tmp_dest_file_path).exists():
            rename(tmp_dest_file_path, dest_file_path)

    def list(self, dir_path=Path(), file_name_prefix=''):
        """It lists the files in the dropbox folder that starts with the given prefix.

        Arguments:
            file_name_prefix {string} -- The prefix to filter the results.
        """
        #Candidate directory whose contents are to be listed
        candidate_dir_path = self._params.remote_dir_path / dir_path
        self._logger.info('Enumerating: %s with file_name_prefix: %s',
                          candidate_dir_path, file_name_prefix)

        #Call the downstream API
        response = self._client.files_list_folder(
            candidate_dir_path.as_posix())

        #Output list placeholder
        files = []
        sizes = []

        if response.entries:
            #Log the response summary
            self._logger.info('Got %d files in: %s', len(response.entries),
                              candidate_dir_path)

            #Extract the name of files satisfying the input criteria from the response entries.
            file_infos = [(dir_path / entry.name, entry.size)
                          for entry in response.entries
                          if entry.name.startswith(file_name_prefix)]

            files, sizes = zip(*file_infos)

        return files, sizes
Пример #14
0
class Dropbox(BlackboxStorage):
    """Storage handler that uploads backups to Dropbox."""

    required_fields = ("access_token", )

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.upload_base = self.config.get("upload_directory") or "/"
        self.client = DropboxClient(self.config["access_token"])
        self.valid = self._validate_token()

    def _validate_token(self):
        """Check if dropbox token is valid."""
        try:
            return self.client.check_user("test").result == "test"
        except AuthError:
            return False

    def sync(self, file_path: Path) -> None:
        """Sync a file to Dropbox."""
        # Check if Dropbox token is valid.
        if self.valid is False:
            error = "Dropbox token is invalid!"
            self.success = False
            self.output = error
            log.error(error)
            return None

        # This is size what can be uploaded as one chunk.
        # When file is bigger than that, this will be uploaded
        # in multiple parts.
        chunk_size = 4 * 1024 * 1024

        temp_file, recompressed = self.compress(file_path)
        upload_path = f"{self.upload_base}{file_path.name}{'.gz' if recompressed else ''}"

        try:
            with temp_file as f:
                file_size = os.stat(f.name).st_size
                log.debug(file_size)
                if file_size <= chunk_size:
                    self.client.files_upload(f.read(), upload_path,
                                             WriteMode.overwrite)
                else:
                    session_start = self.client.files_upload_session_start(
                        f.read(chunk_size))
                    cursor = UploadSessionCursor(session_start.session_id,
                                                 offset=f.tell())
                    # Commit contains path in Dropbox and write mode about file
                    commit = CommitInfo(upload_path, WriteMode.overwrite)

                    while f.tell() < file_size:
                        if (file_size - f.tell()) <= chunk_size:
                            self.client.files_upload_session_finish(
                                f.read(chunk_size), cursor, commit)
                        else:
                            self.client.files_upload_session_append(
                                f.read(chunk_size), cursor.session_id,
                                cursor.offset)
                            cursor.offset = f.tell()
            self.success = True
        except (ApiError, HttpError) as e:
            log.error(e)
            self.success = False
            self.output = str(e)

    def rotate(self, database_id: str) -> None:
        """
        Rotate the files in the Dropbox directory.

        All files in base directory of backups will be deleted when they
        are older than `retention_days`, and because of this,
        it's better to have backups in isolated folder.
        """
        # Check if Dropbox token is valid.
        if self.valid is False:
            log.error("Dropbox token is invalid - Can't delete old backups!")
            return None
        # Let's rotate only this type of database
        db_type_regex = rf"{database_id}_blackbox_\d{{2}}_\d{{2}}_\d{{4}}.+"

        # Receive first batch of files.
        files_result = self.client.files_list_folder(
            self.upload_base if self.upload_base != "/" else "")
        entries = [
            entry for entry in files_result.entries
            if self._is_backup_file(entry, db_type_regex)
        ]

        # If there is more files, receive all of them.
        while files_result.has_more:
            cursor = files_result.cursor
            files_result = self.client.files_list_folder_continue(cursor)
            entries += [
                entry for entry in files_result.entries
                if self._is_backup_file(entry, db_type_regex)
            ]

        retention_days = 7
        if Blackbox.retention_days:
            retention_days = Blackbox.retention_days

        # Find all old files and delete them.
        for item in entries:
            last_modified = item.server_modified
            now = datetime.now(tz=last_modified.tzinfo)
            delta = now - last_modified
            if delta.days >= retention_days:
                self.client.files_delete(item.path_lower)

    @staticmethod
    def _is_backup_file(entry, db_type_regex) -> bool:
        """Check if file is actually this kind of database backup."""
        return isinstance(entry, FileMetadata) and re.match(
            db_type_regex, entry.name)
Пример #15
0
def push_to_dropbox(branch_name, symbol, gui):
    global settings  # Read

    saves_path = settings["SAVES_DIR"]
    temp_dir = settings["TEMP_DIR"]

    if settings["OAUTH"] == 'null':
        return "Please type in /login to use this feature"

    # clear temp_dir
    for path_temp in listdir(temp_dir):
        remove(path.join(temp_dir, path_temp))

    # archive worlds starting with 'symbol' to temp_dir
    for path_save in listdir(saves_path):
        file_path = path.join(saves_path, path_save)
        if path.isdir(file_path) and path_save[0] == symbol:
            make_archive(path.join(temp_dir, path_save), 'zip', file_path)

    dbx = Dropbox(settings["OAUTH"].access_token)

    # clear branch_directory in dropbox
    try:
        if settings["CONFIRM"]:
            confirm = simpledialog.askstring(
                "Confirm",
                "Type in 'YES' if you wish to proceed. This will delete the current '{0}'"
                " branch if it already exists in dropbox".format(branch_name))
            if not confirm == "YES":
                return "Action Not "
        dbx.files_delete("/" + branch_name)
    except Exception:
        pass

    println("Starting upload... ", gui)
    println(
        "Do not close the app until 'done uploading' message is shown on the console",
        gui)

    # upload every zip file to dropbox in temp_dir
    for path_temp in listdir(temp_dir):
        zip_file = path.join(temp_dir, path_temp)
        destination = "/" + branch_name + "/" + path_temp

        with open(zip_file, "rb") as f:
            file_size = path.getsize(zip_file)
            if file_size < CHUNK_SIZE:
                dbx.files_upload(f.read(), destination)
            else:
                # upload_session_start_result
                upload_ssr = dbx.files_upload_session_start(f.read(CHUNK_SIZE))

                cursor = files.UploadSessionCursor(
                    session_id=upload_ssr.session_id, offset=f.tell())
                commit = files.CommitInfo(path=destination)

                while f.tell() < file_size:
                    percent = str(f.tell() / file_size * 100) + "%"
                    print(percent)

                    if (file_size - f.tell()) <= CHUNK_SIZE:
                        dbx.files_upload_session_finish(
                            f.read(CHUNK_SIZE), cursor, commit)
                    else:
                        dbx.files_upload_session_append(
                            f.read(CHUNK_SIZE), cursor.session_id,
                            cursor.offset)
                        cursor.offset = f.tell()

    # clear temp_dir
    for path_temp in listdir(temp_dir):
        remove(path.join(temp_dir, path_temp))

    save(settings)

    return "Done Uploading"
Пример #16
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting(
            'DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, path):
        path = PurePosixPath(self.root_path) / path
        path = str(path)

        if path == '/':
            path = ''

        return path

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        result = self.client.files_list_folder(full_path)

        for entry in result.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)

        assert not result.has_more, "FIXME: Not implemented!"

        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        # Note to the unwary, this is actually an mtime
        return metadata.client_modified

    def url(self, name):
        try:
            media = self.client.files_get_temporary_link(self._full_path(name))
            return media.link
        except ApiError:
            raise ValueError("This file is not accessible via a URL.")

    def _open(self, name, mode='rb'):
        return DropBoxFile(self._full_path(name), self)

    def _save(self, name, content):
        try:
            content.open()

            if content.size <= self.CHUNK_SIZE:
                self.client.files_upload(content.read(), self._full_path(name))
            else:
                self._chunked_upload(content, self._full_path(name))

        finally:
            content.close()

        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE))
        cursor = UploadSessionCursor(session_id=upload_session.session_id,
                                     offset=content.tell())
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit)
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor)
                cursor.offset = content.tell()
Пример #17
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None, timeout=None):
        oauth2_access_token = oauth2_access_token or setting(
            'DROPBOX_OAUTH2_TOKEN')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure an auth token at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")

        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        timeout = timeout or setting('DROPBOX_TIMEOUT', _DEFAULT_TIMEOUT)
        self.client = Dropbox(oauth2_access_token, timeout=timeout)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)

        if full_path == '/':
            full_path = ''

        metadata = self.client.files_list_folder(full_path)
        for entry in metadata.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.client_modified

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE))
        cursor = UploadSessionCursor(session_id=upload_session.session_id,
                                     offset=content.tell())
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit)
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor)
                cursor.offset = content.tell()