Пример #1
0
class DropboxHelper(object):
    def __init__(self, access_token):
        self.dropbox = Dropbox(oauth2_access_token=access_token)

    def upload(self, filename, file_path):
        with open(file_path, 'rb') as f:
            try:
                self.dropbox.files_upload(f.read(), '/' + filename)
            except Exception:
                os.remove(file_path)
                raise CommandError(
                    'Unable to upload file to Dropbox. Maybe access token is invalid.'
                )

    def delete_all_files(self):
        for i in self.dropbox.files_list_folder('').entries:
            self.dropbox.files_delete(i.path_lower)

    def download_last_backup(self, dir_path):
        entries = self.dropbox.files_list_folder('').entries

        if len(entries) == 0:
            raise CommandError('We could not find any backup.')

        entry = entries[-1]
        full_path = dir_path + entry.path_lower

        self.dropbox.files_download_to_file(full_path, entry.path_lower)
        return full_path, entry.content_hash
Пример #2
0
class DropboxHelper(object):

    def __init__(self, access_token):
        self.dropbox = Dropbox(oauth2_access_token=access_token)

    def upload(self, filename, file_path):
        with open(file_path, 'rb') as f:
            try:
                self.dropbox.files_upload(f.read(), '/' + filename)
            except Exception:
                os.remove(file_path)
                raise CommandError('Unable to upload file to Dropbox. Maybe access token is invalid.')

    def delete_all_files(self):
        for i in self.dropbox.files_list_folder('').entries:
            self.dropbox.files_delete(i.path_lower)

    def download_last_backup(self, dir_path):
        entries = self.dropbox.files_list_folder('').entries

        if len(entries) == 0:
            raise CommandError('We could not find any backup.')

        entry = entries[-1]
        full_path = dir_path + entry.path_lower

        self.dropbox.files_download_to_file(full_path, entry.path_lower)
        return full_path, entry.content_hash
Пример #3
0
class TestDropbox(unittest.TestCase):
    def setUp(self):
        self.dbx = Dropbox(oauth2_token)

    def test_bad_auth(self):
        # Test malformed token
        malformed_token_dbx = Dropbox(MALFORMED_TOKEN)
        with self.assertRaises(BadInputError) as cm:
            malformed_token_dbx.files_list_folder('')
        self.assertIn('token is malformed', cm.exception.message)

        # Test reasonable-looking invalid token
        invalid_token_dbx = Dropbox(INVALID_TOKEN)
        with self.assertRaises(AuthError) as cm:
            invalid_token_dbx.files_list_folder('')
        self.assertEqual(cm.exception.error['error']['.tag'],
                         'invalid_access_token')

    def test_rpc(self):
        self.dbx.files_list_folder('')

        # Test API error
        random_folder_path = '/' + \
                             ''.join(random.sample(string.ascii_letters, 15))
        with self.assertRaises(ApiError) as cm:
            self.dbx.files_list_folder(random_folder_path)
        self.assertIsInstance(cm.exception.error, ListFolderError)

    def test_upload_download(self):
        # Upload file
        timestamp = str(datetime.datetime.utcnow())
        random_filename = ''.join(random.sample(string.ascii_letters, 15))
        random_path = '/Test/%s/%s' % (timestamp, random_filename)
        test_contents = DUMMY_PAYLOAD
        self.dbx.files_upload(test_contents, random_path)

        # Download file
        metadata, resp = self.dbx.files_download(random_path)
        self.assertEqual(DUMMY_PAYLOAD, resp.content)

        # Cleanup folder
        self.dbx.files_delete('/Test/%s' % timestamp)

    def test_bad_upload_types(self):
        with self.assertRaises(TypeError):
            self.dbx.files_upload(BytesIO(b'test'), '/Test')

    @require_team_token
    def test_team(self, token):
        dbxt = DropboxTeam(token)
        dbxt.team_groups_list()
        r = dbxt.team_members_list()
        if r.members:
            # Only test assuming a member if there is a member
            dbxt.as_user(
                r.members[0].profile.team_member_id).files_list_folder('')
Пример #4
0
class TestDropbox(unittest.TestCase):

    def setUp(self):
        self.dbx = Dropbox(oauth2_token)

    def test_bad_auth(self):
        # Test malformed token
        malformed_token_dbx = Dropbox(MALFORMED_TOKEN)
        with self.assertRaises(BadInputError) as cm:
            malformed_token_dbx.files_list_folder('')
        self.assertIn('token is malformed', cm.exception.message)

        # Test reasonable-looking invalid token
        invalid_token_dbx = Dropbox(INVALID_TOKEN)
        with self.assertRaises(AuthError) as cm:
            invalid_token_dbx.files_list_folder('')
        self.assertEqual(cm.exception.error['error']['.tag'],
                         'invalid_access_token')

    def test_rpc(self):
        self.dbx.files_list_folder('')

        # Test API error
        random_folder_path = '/' + \
                             ''.join(random.sample(string.ascii_letters, 15))
        with self.assertRaises(ApiError) as cm:
            self.dbx.files_list_folder(random_folder_path)
        self.assertIsInstance(cm.exception.error, ListFolderError)

    def test_upload_download(self):
        # Upload file
        timestamp = str(datetime.datetime.utcnow())
        random_filename = ''.join(random.sample(string.ascii_letters, 15))
        random_path = '/Test/%s/%s' % (timestamp, random_filename)
        test_contents = string.ascii_letters
        self.dbx.files_upload(test_contents, random_path)

        # Download file
        metadata, resp = self.dbx.files_download(random_path)
        self.assertEqual(string.ascii_letters, resp.text)

        # Cleanup folder
        self.dbx.files_delete('/Test/%s' % timestamp)

    @require_team_token
    def test_team(self, token):
        dbxt = DropboxTeam(token)
        dbxt.team_groups_list()
        r = dbxt.team_members_list()
        if r.members:
            # Only test assuming a member if there is a member
            dbxt.as_user(r.members[0].profile.team_member_id).files_list_folder('')
Пример #5
0
class TestDropbox(unittest.TestCase):

    def setUp(self):
        self.dbx = Dropbox(oauth2_token)

    def test_bad_auth(self):
        # Test malformed token
        malformed_token_dbx = Dropbox(MALFORMED_TOKEN)
        with self.assertRaises(BadInputError) as cm:
            malformed_token_dbx.files_list_folder('')
        self.assertIn('token is malformed', cm.exception.message)

        # Test reasonable-looking invalid token
        invalid_token_dbx = Dropbox(INVALID_TOKEN)
        with self.assertRaises(AuthError) as cm:
            invalid_token_dbx.files_list_folder('')
        self.assertEqual(cm.exception.reason['error']['.tag'],
                         'invalid_access_token')

    def test_rpc(self):
        self.dbx.files_list_folder('')

        # Test API error
        random_folder_path = '/' + \
                             ''.join(random.sample(string.ascii_letters, 15))
        with self.assertRaises(ApiError) as cm:
            self.dbx.files_list_folder(random_folder_path)
        self.assertIsInstance(cm.exception.reason, ListFolderError)

    def test_upload_download(self):
        # Upload file
        timestamp = str(datetime.datetime.utcnow())
        random_filename = ''.join(random.sample(string.ascii_letters, 15))
        random_path = '/Test/%s/%s' % (timestamp, random_filename)
        test_contents = string.ascii_letters
        self.dbx.files_upload(test_contents, random_path)

        # Download file
        metadata, resp = self.dbx.files_download(random_path)
        self.assertEqual(string.ascii_letters, resp.text)

        # Cleanup folder
        self.dbx.files_delete('/Test/%s' % timestamp)
Пример #6
0
def delete():
    if access_token:
        dbx = Dropbox(access_token)
        path_lower = request.json["path_lower"]
        loop = 0
        result = False
        while not result and loop <= 9:
            try:
                metadata = dbx.files_delete(path_lower)
                result = True
            except ApiError:
                pass
            loop += 1
        if not result:
            return redirect(url_for("files", _external=True, _scheme="https"))
        return json.dumps({"success": True}), 200, {"ContentType": "application/json"}
    else:
        return redirect(url_for("oauth2_start", _external=True, _scheme="https"))
Пример #7
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""
    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting(
            'DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        metadata = self.client.files_get_metadata(full_path)
        for entry in metadata['contents']:
            entry['path'] = entry['path'].replace(full_path, '', 1)
            entry['path'] = entry['path'].replace('/', '', 1)
            if entry['is_dir']:
                directories.append(entry['path'])
            else:
                files.append(entry['path'])
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata['bytes']

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT)
        return mod_time

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT)
        return acc_time

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media['link']

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        self.client.files_upload(content, self._full_path(name))
        return name
Пример #8
0
class dropboxClient(object):

    ''' a class of methods to manage file storage on Dropbox API '''

    # https://www.dropbox.com/developers/documentation/http/documentation

    _class_fields = {
        'schema': {
            'access_token': '',
            'collection_name': 'labPack',
            'record_key': 'obs/terminal/2016-03-17T17-24-51-687845Z.ogg',
            'record_key_path': '/home/user/.config/collective-acuity-labpack/user-data/obs/terminal',
            'record_key_comp': 'obs',
            'previous_key': 'obs/terminal/2016-03-17T17-24-51-687845Z.yaml',
            'secret_key': '6tZ0rUexOiBcOse2-dgDkbeY',
            'prefix': 'obs/terminal',
            'delimiter': '2016-03-17T17-24-51-687845Z.yaml',
            'max_results': 1
        },
        'components': {
            '.collection_name': {
                'max_length': 255,
                'must_not_contain': ['/', '^\\.']
            },
            '.record_key': {
                'must_not_contain': [ '[^\\w\\-\\./]', '^\\.', '\\.$', '^/', '//' ]
            },
            '.record_key_path': {
                'max_length': 32767
            },
            '.record_key_comp': {
                'max_length': 255
            },
            '.secret_key': {
                'must_not_contain': [ '[\\t\\n\\r]' ]
            },
            '.max_results': {
                'min_value': 1,
                'integer_data': True
            },
            '.previous_key': {
                'must_not_contain': [ '[^\\w\\-\\./]', '^\\.', '\\.$', '^/', '//' ]
            },
            '.prefix': {
                'must_not_contain': [ '[^\\w\\-\\./]', '^\\.', '\\.$', '^/', '//' ]
            }
        },
        'metadata': {
            'record_optimal_bytes': 10000 * 1024,
            'record_max_bytes': 150000 * 1024
        }
    }
    
    def __init__(self, access_token, collection_name=''):
        
        '''
            a method to initialize the dropboxClient class
            
        :param access_token: string with oauth2 access token for users account
        '''    

        title = '%s.__init__' % self.__class__.__name__
    
    # construct input validation model
        self.fields = jsonModel(self._class_fields)
        
    # validate inputs
        input_fields = {
            'access_token': access_token,
            'collection_name': collection_name
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
    
    # workaround for module namespace conflict
        from sys import path as sys_path
        sys_path.append(sys_path.pop(0))
        from dropbox import Dropbox
        from dropbox.files import FileMetadata, WriteMode, DeleteArg
        from dropbox.exceptions import ApiError
        sys_path.insert(0, sys_path.pop())
    
    # construct dropbox client
        from labpack.compilers.objects import _method_constructor
        self.dropbox = Dropbox(oauth2_access_token=access_token)
    
    # construct dropbox objects
        self.objects = _method_constructor({
            'FileMetadata': FileMetadata,
            'ApiError': ApiError,
            'WriteMode': WriteMode,
            'DeleteArg': DeleteArg
        })
    
    # construct collection name
        self.collection_name = collection_name
    
    def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs):
        
        '''
            a helper method for other storage clients to import into appdata
            
        :param record_key: string with key for record
        :param record_data: byte data for body of record
        :param overwrite: [optional] boolean to overwrite existing records
        :param last_modified: [optional] float to record last modified date
        :param kwargs: [optional] keyword arguments from other import methods 
        :return: boolean indicating whether record was imported
        '''
        
        title = '%s._import' % self.__class__.__name__
    
    # check overwrite
        if not overwrite:
            if self.exists(record_key):
                return False
    
    # check max size
        import sys
        record_max = self.fields.metadata['record_max_bytes']
        record_size = sys.getsizeof(record_data)
        error_prefix = '%s(record_key="%s", record_data=b"...")' % (title, record_key)
        if record_size > record_max:
            raise ValueError('%s exceeds maximum record data size of %s bytes.' % (error_prefix, record_max))
    
    # TODO: apply session upload for files greater than record_max
            
    # construct upload kwargs
        upload_kwargs = {
            'f': record_data,
            'path': '/%s' % record_key,
            'mute': True,
            'mode': self.objects.WriteMode.overwrite
        }
    
    # modify file time
        import re
        if re.search('\\.drep$', record_key):
            from labpack.records.time import labDT
            drep_time = labDT.fromEpoch(1)
            upload_kwargs['client_modified'] = drep_time
        elif last_modified:
            from labpack.records.time import labDT
            mod_time = labDT.fromEpoch(last_modified)
            upload_kwargs['client_modified'] = mod_time
    
    # send upload request
        try:
            self.dropbox.files_upload(**upload_kwargs)
        except:
            raise DropboxConnectionError(title)
        
        return True
    
    def _walk(self, root_path=''):
        ''' an iterator method which walks the file structure of the dropbox collection '''
        title = '%s._walk' % self.__class__.__name__
        if root_path:
            root_path = '/%s' % root_path
        try:
            response = self.dropbox.files_list_folder(path=root_path, recursive=True)
            for record in response.entries:
                if not isinstance(record, self.objects.FileMetadata):
                    continue
                yield record.path_display[1:]
            if response.has_more:
                while response.has_more:
                    response = self.dropbox.files_list_folder_continue(response.cursor)
                    for record in response.entries:
                        if not isinstance(record, self.objects.FileMetadata):
                            continue
                        yield record.path_display[1:]
        except:
            raise DropboxConnectionError(title)
    
    def exists(self, record_key):
        
        ''' 
            a method to determine if a record exists in collection

        :param record_key: string with key of record
        :return: boolean reporting status
        '''
        
        title = '%s.exists' % self.__class__.__name__
    
    # validate inputs
        input_fields = {
            'record_key': record_key
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
    
    # send get metadata request
        file_path = '/%s' % record_key
        try:
            self.dropbox.files_get_metadata(file_path)
        except Exception as err:
            if str(err).find("LookupError('not_found'") > -1:
                return False
            else:
                raise DropboxConnectionError(title)

        return True
        
    def save(self, record_key, record_data, overwrite=True, secret_key=''):

        ''' 
            a method to create a record in the collection folder

        :param record_key: string with name to assign to record (see NOTES below)
        :param record_data: byte data for record body
        :param overwrite: [optional] boolean to overwrite records with same name
        :param secret_key: [optional] string with key to encrypt data
        :return: string with name of record

        NOTE:   record_key may only contain alphanumeric, /, _, . or -
                characters and may not begin with the . or / character.

        NOTE:   using one or more / characters splits the key into
                separate segments. these segments will appear as a
                sub directories inside the record collection and each
                segment is used as a separate index for that record
                when using the list method
                eg. lab/unittests/1473719695.2165067.json is indexed:
                [ 'lab', 'unittests', '1473719695.2165067', '.json' ]
        '''

        title = '%s.save' % self.__class__.__name__
            
    # validate inputs
        input_fields = {
            'record_key': record_key,
            'secret_key': secret_key
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)
    
    # validate byte data
        if not isinstance(record_data, bytes):
            raise ValueError('%s(record_data=b"...") must be byte data.' % title)
        
    # construct and validate file path
        file_root, file_name = os.path.split(record_key)
        self.fields.validate(file_name, '.record_key_comp')
        while file_root:
            file_root, path_node = os.path.split(file_root)
            self.fields.validate(path_node, '.record_key_comp')

    # check overwrite exception
        if not overwrite:
            if self.exists(record_key):
                raise Exception('%s(record_key="%s") already exists. To overwrite, set overwrite=True' % (title, record_key))
    
    # check size of file
        import sys
        record_optimal = self.fields.metadata['record_optimal_bytes']
        record_max = self.fields.metadata['record_max_bytes']
        record_size = sys.getsizeof(record_data)
        error_prefix = '%s(record_key="%s", record_data=b"...")' % (title, record_key)
        if record_size > record_max:
            raise ValueError('%s exceeds maximum record data size of %s bytes.' % (error_prefix, record_max))
        elif record_size > record_optimal:
            print('[WARNING] %s exceeds optimal record data size of %s bytes.' % (error_prefix, record_optimal))
    
    # TODO add upload session for support of files over 150MB
    # http://dropbox-sdk-python.readthedocs.io/en/latest/moduledoc.html#dropbox.dropbox.Dropbox.files_upload_session_start
            
    # encrypt data
        if secret_key:
            from labpack.encryption import cryptolab
            record_data, secret_key = cryptolab.encrypt(record_data, secret_key)
    
    # construct upload kwargs
        upload_kwargs = {
            'f': record_data,
            'path': '/%s' % record_key,
            'mute': True,
            'mode': self.objects.WriteMode.overwrite
        }
    
    # modify file time
        import re
        if re.search('\\.drep$', file_name):
            from labpack.records.time import labDT
            drep_time = labDT.fromEpoch(1)
            upload_kwargs['client_modified'] = drep_time
    
    # send upload request
        try:
            self.dropbox.files_upload(**upload_kwargs)
        except:
            raise DropboxConnectionError(title)
        
        return record_key
    
    def load(self, record_key, secret_key=''):

        ''' 
            a method to retrieve byte data of appdata record

        :param record_key: string with name of record
        :param secret_key: [optional] string used to decrypt data
        :return: byte data for record body
        '''

        title = '%s.load' % self.__class__.__name__
    
    # validate inputs
        input_fields = {
            'record_key': record_key,
            'secret_key': secret_key
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct file path
        file_path = '/%s' % record_key
    
    # request file data
        try:
            metadata, response = self.dropbox.files_download(file_path)
        except Exception as err:
            if str(err).find("LookupError('not_found'") > -1:
                raise Exception('%s(record_key=%s) does not exist.' % (title, record_key))
            else:
                raise DropboxConnectionError(title)
        record_data = response.content
    
    # decrypt (if necessary)
        if secret_key:
            from labpack.encryption import cryptolab
            record_data = cryptolab.decrypt(record_data, secret_key)
    
        return record_data
    
    def conditional_filter(self, path_filters):

        ''' a method to construct a conditional filter function for list method

        :param path_filters: dictionary or list of dictionaries with query criteria
        :return: filter_function object

        path_filters:
        [ { 0: { conditional operators }, 1: { conditional_operators }, ... } ]

        conditional operators:
            "byte_data": false,
            "discrete_values": [ "" ],
            "excluded_values": [ "" ],
            "greater_than": "",
            "less_than": "",
            "max_length": 0,
            "max_value": "",
            "min_length": 0,
            "min_value": "",
            "must_contain": [ "" ],
            "must_not_contain": [ "" ],
            "contains_either": [ "" ]
        '''

        title = '%s.conditional_filter' % self.__class__.__name__
        
        from labpack.compilers.filters import positional_filter
        filter_function = positional_filter(path_filters, title)
        
        return filter_function

    def list(self, prefix='', delimiter='', filter_function=None, max_results=1, previous_key=''):
        
        ''' 
            a method to list keys in the dropbox collection

        :param prefix: string with prefix value to filter results
        :param delimiter: string with value which results must not contain (after prefix)
        :param filter_function: (positional arguments) function used to filter results
        :param max_results: integer with maximum number of results to return
        :param previous_key: string with key in collection to begin search after
        :return: list of key strings

            NOTE:   each key string can be divided into one or more segments
                    based upon the / characters which occur in the key string as
                    well as its file extension type. if the key string represents
                    a file path, then each directory in the path, the file name
                    and the file extension are all separate indexed values.

                    eg. lab/unittests/1473719695.2165067.json is indexed:
                    [ 'lab', 'unittests', '1473719695.2165067', '.json' ]

                    it is possible to filter the records in the collection according
                    to one or more of these path segments using a filter_function.

            NOTE:   the filter_function must be able to accept an array of positional
                    arguments and return a value that can evaluate to true or false.
                    while searching the records, list produces an array of strings
                    which represent the directory structure in relative path of each
                    key string. if a filter_function is provided, this list of strings
                    is fed to the filter function. if the function evaluates this input
                    and returns a true value the file will be included in the list
                    results.
        '''
        
        title = '%s.list' % self.__class__.__name__
        
    # validate input
        input_fields = {
            'prefix': prefix,
            'delimiter': delimiter,
            'max_results': max_results,
            'previous_key': previous_key
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # validate filter function
        if filter_function:
            try:
                path_segments = [ 'lab', 'unittests', '1473719695.2165067', '.json' ]
                filter_function(*path_segments)
            except:
                err_msg = '%s(filter_function=%s)' % (title, filter_function.__class__.__name__)
                raise TypeError('%s must accept positional arguments.' % err_msg)

    # construct empty results list
        results_list = []
        check_key = True
        if previous_key: 
            check_key = False
    
    # determine root path
        root_path = ''
        if prefix:
            from os import path
            root_path, file_name = path.split(prefix)

    # iterate over dropbox files
        for file_path in self._walk(root_path):
            path_segments = file_path.split(os.sep)
            record_key = os.path.join(*path_segments)
            record_key = record_key.replace('\\','/')
            if record_key == previous_key:
                check_key = True
    
    # find starting point
            if not check_key:
                continue
                
    # apply prefix filter
            partial_key = record_key
            if prefix:
                if record_key.find(prefix) == 0:
                    partial_key = record_key[len(prefix):]
                else:
                    continue
    
    # apply delimiter filter
            if delimiter:
                if partial_key.find(delimiter) > -1:
                    continue
    
    # apply filter function
            if filter_function:
                if filter_function(*path_segments):
                    results_list.append(record_key)
            else:
                results_list.append(record_key)

    # return results list
            if len(results_list) == max_results:
                return results_list

        return results_list
    
    def delete(self, record_key):

        ''' a method to delete a file

        :param record_key: string with name of file
        :return: string reporting outcome
        '''

        title = '%s.delete' % self.__class__.__name__

    # validate inputs
        input_fields = {
            'record_key': record_key
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)

    # validate existence of file
        if not self.exists(record_key):
            exit_msg = '%s does not exist.' % record_key
            return exit_msg
            
    # remove file
        current_dir = os.path.split(record_key)[0]
        try:
            file_path = '/%s' % record_key
            self.dropbox.files_delete(file_path)
        except:
            raise DropboxConnectionError(title)

    # remove empty directories in path to file
        try:
            while current_dir:
                folder_path = '/%s' % current_dir
                response = self.dropbox.files_list_folder(folder_path)
                if not response.entries:
                    self.dropbox.files_delete(folder_path)
                    current_dir = os.path.split(current_dir)[0]
                else:
                    break
        except:
            raise DropboxConnectionError(title)

        exit_msg = '%s has been deleted.' % record_key
        return exit_msg
    
    def remove(self):
        
        ''' 
            a method to remove all records in the collection

        NOTE:   this method removes all the files in the collection, but the
                collection folder itself created by oauth2 cannot be removed.
                only the user can remove the app folder
                
        :return: string with confirmation of deletion
        '''

        title = '%s.remove' % self.__class__.__name__
    
    # get contents in root
        try:
            response = self.dropbox.files_list_folder(path='')
        except:
            raise DropboxConnectionError(title)

    # populate delete list
        delete_list = []
        for file in response.entries:
            delete_list.append(self.objects.DeleteArg(path=file.path_display))

    # continue retrieval if folder is large
        if response.has_more:
            try:
                while response.has_more:
                    response = self.dropbox.files_list_folder_continue(response.cursor)
                    for file in response.entries:
                        delete_list.append(self.objects.DeleteArg(path=file.path_display))
            except:
                raise DropboxConnectionError(title)

    # send batch delete request
        try:
            self.dropbox.files_delete_batch(delete_list)
        except:
            raise DropboxConnectionError(title)
    
    # return outcome
        insert = 'collection'
        if self.collection_name:
            insert = self.collection_name
        exit_msg = 'Contents of %s will been removed from Dropbox.' % insert
        return exit_msg

    def export(self, storage_client, overwrite=True):
        
        '''
            a method to export all the records in collection to another platform
            
        :param storage_client: class object with storage client methods
        :return: string with exit message
        '''
        
        title = '%s.export' % self.__class__.__name__
        
    # validate storage client
        method_list = [ 'save', 'load', 'list', 'export', 'delete', 'remove', '_import', 'collection_name' ]
        for method in method_list:
            if not getattr(storage_client, method, None):
                from labpack.parsing.grammar import join_words
                raise ValueError('%s(storage_client=...) must be a client object with %s methods.' % (title, join_words(method_list)))
            
    # walk collection folder to find files
        import os
        count = 0
        skipped = 0
        for file_path in self._walk():
            path_segments = file_path.split(os.sep)
            record_key = os.path.join(*path_segments)
            record_key = record_key.replace('\\','/')
            file_path = '/%s' % file_path
            
    # retrieve data and metadata
            try:
                metadata, response = self.dropbox.files_download(file_path)
            except:
                raise DropboxConnectionError(title)
            record_data = response.content
            client_modified = metadata.client_modified
            
    # import record into storage client
            last_modified = 0.0
            if client_modified:
                from dateutil.tz import tzutc
                from labpack.records.time import labDT
                last_modified = labDT.fromPython(client_modified.replace(tzinfo=tzutc())).epoch()
            outcome = storage_client._import(record_key, record_data, overwrite=overwrite, last_modified=last_modified)
            if outcome:
                count += 1
            else:
                skipped += 1
            
    # report outcome
        plural = ''
        skip_insert = ''
        new_folder = storage_client.collection_name
        if count != 1:
            plural = 's'
        if skipped > 0:
            skip_plural = ''
            if skipped > 1:
                skip_plural = 's'
            skip_insert = ' %s record%s skipped to avoid overwrite.' % (str(skipped), skip_plural)
        exit_msg = '%s record%s exported to %s.%s' % (str(count), plural, new_folder, skip_insert)
        return exit_msg
Пример #9
0
class DropboxNoteProvider(RemoteNoteProvider):

    __DAY_ONE_EXTENSION = ".doentry"

    def __init__(self, accessToken, folder, proxyHost=None, proxyPort=None, proxyUser=None, proxyPassword=None):
        proxies = _proxies(proxyHost, proxyPort, proxyUser, proxyPassword)
        self.__token = accessToken
        self.__basePath = folder
        self.__notesPath = folder + "/entries"
        self.__removedNotesPath = self.__notesPath + "/deleted"
        self.__photosPath = folder + "/photos"
        self.__client = Dropbox(self.__token, session=create_session(proxies=proxies))
        self.__notesCache = {}
        self.__dayOneFlavor = folder == SyncFolder.DayOne

    @online
    @expires
    def sync(self):
        _LOG.info("Listing all notes and photos")
        folder = self.__client.files_list_folder(self.__basePath, recursive=True)
        files = list(filter(lambda e: e is not None, map(FileEntry.fromMetadata, folder.entries)))
        while folder.has_more:
            folder = self.__client.files_list_folder_continue(folder.cursor)
            files.extend(filter(lambda e: e is not None, map(FileEntry.fromMetadata, folder.entries)))

        notes = {}
        for file in filter(lambda f: f.folder == self.__notesPath and isUuid(self.__normalizeNoteName(f.name)), files):
            uuid = self.__normalizeNoteName(file.name)
            notes[uuid] = NoteStatus(uuid, file.lastModified)

        for file in filter(lambda f: f.folder == self.__photosPath and f.name.endswith(".jpg"), files):
            uuid = file.name[:-4]
            if uuid in notes:
                notes[uuid].hasPhoto = True

        for file in filter(lambda f: f.folder == self.__removedNotesPath and isUuid(self.__normalizeNoteName(f.name)),
                           files):
            uuid = self.__normalizeNoteName(file.name)
            if uuid in notes:
                if file.lastModified >= notes[uuid].lastModified:
                    _LOG.warning("Sync integrity check deleting note: %s", uuid)
                    try:
                        self.__client.files_delete(self.__notePath(uuid))
                    except ApiError:
                        _LOG.warning("Note %s not found", uuid)
                    if notes[uuid].hasPhoto:
                        _LOG.warning("Sync integrity check deleting photo: %s", uuid)
                        try:
                            self.__client.files_delete(self.__photoPath(uuid))
                        except ApiError:
                            _LOG.warning("Photo %s not found", uuid)
                    del notes[uuid]
                else:
                    _LOG.warning("Sync integrity check deleting REMOVED note: %s", uuid)
                    try:
                        self.__client.files_delete(self.__removedNotePath(uuid))
                    except ApiError:
                        _LOG.warning("REMOVED note %s not found", uuid)
                    continue
            notes[uuid] = NoteStatus(uuid, file.lastModified, True)

        self.__notesCache = notes
        return notes

    @online
    @expires
    def get(self, uuid):
        _LOG.info("Getting note: %s", uuid)
        metadata, response = self.__client.files_download(self.__notePath(uuid))
        with response:
            note = unmarshalNote(response.content, metadata.client_modified)
        if uuid not in self.__notesCache or self.__notesCache[uuid].hasPhoto:
            _LOG.info("Getting photo: %s", uuid)
            try:
                with self.__client.files_download(self.__photoPath(uuid))[1] as response:
                    note.photo = response.content
            except ApiError as e:
                if e.error.is_path() and e.error.get_path().is_not_found():
                    _LOG.warning("Photo %s does not exist", uuid)
                else:
                    raise e
        return renderHtml(note)

    @online
    @expires
    def add(self, note):
        uuid = note.uuid
        _LOG.info("Adding note: %s", uuid)
        self.__uploadFile(self.__notePath(uuid), note.lastModified, marshalNote(note), overwrite=False)

        if note.photo:
            _LOG.info("Adding photo: %s", uuid)
            self.__uploadFile(self.__photoPath(uuid), note.lastModified, note.photo)
        elif uuid in self.__notesCache and self.__notesCache[uuid].hasPhoto:
            _LOG.info("Deleting photo: %s", uuid)
            try:
                self.__client.files_delete(self.__photoPath(uuid))
            except ApiError:
                _LOG.warning("Photo %s not found", uuid)

        # Clean removed note if exists
        if uuid in self.__notesCache and self.__notesCache[uuid].removed:
            _LOG.info("Deleting REMOVED note: %s", uuid)
            try:
                self.__client.files_delete(self.__removedNotePath(uuid))
            except ApiError:
                _LOG.warning("REMOVED note %s not found", uuid)

    @online
    @expires
    def update(self, note):
        uuid = note.uuid
        # Check if note exists
        if self.__notesCache and (uuid not in self.__notesCache or self.__notesCache[uuid].removed):
            raise RuntimeError("Note[uuid=%s] does not exist" % uuid)

        _LOG.info("Updating note: %s", uuid)
        self.__uploadFile(self.__notePath(uuid), note.lastModified, marshalNote(note))

        if note.photo:
            _LOG.info("Updating photo: %s", uuid)
            self.__uploadFile(self.__photoPath(uuid), note.lastModified, note.photo)
        elif uuid not in self.__notesCache or self.__notesCache[uuid].hasPhoto:
            _LOG.info("Deleting photo: %s", uuid)
            try:
                self.__client.files_delete(self.__photoPath(uuid))
            except ApiError:
                _LOG.warning("Photo %s not found", uuid)

    @online
    @expires
    def remove(self, note):
        uuid = note.uuid

        # Remove note if exists
        if uuid in self.__notesCache and not self.__notesCache[uuid].removed:
            _LOG.info("Deleting note: %s", uuid)
            try:
                self.__client.files_delete(self.__notePath(uuid))
            except ApiError:
                _LOG.warning("Note %s not found", uuid)

        # Remove photo if exists
        if uuid in self.__notesCache and self.__notesCache[uuid].hasPhoto:
            _LOG.info("Deleting photo: %s", uuid)
            try:
                self.__client.files_delete(self.__photoPath(uuid))
            except ApiError:
                _LOG.warning("Photo %s not found", uuid)

        _LOG.info("Adding REMOVED note: %s", uuid)
        self.__uploadFile(self.__removedNotePath(uuid), note.lastModified, b"")

    def __uploadFile(self, path, lastModified, content, overwrite=True):
        mode = WriteMode.overwrite if overwrite else WriteMode.add
        self.__client.files_upload(content, path, mode=mode, client_modified=lastModified)

    def __normalizeNoteName(self, name):
        if self.__dayOneFlavor and name.endswith(self.__DAY_ONE_EXTENSION):
            name = name[:-(len(self.__DAY_ONE_EXTENSION))]
        return name

    def __buildNotePath(self, parentPath, uuid):
        path = parentPath + "/" + uuid
        if self.__dayOneFlavor:
            path += self.__DAY_ONE_EXTENSION
        return path

    def __notePath(self, uuid):
        return self.__buildNotePath(self.__notesPath, uuid)

    def __removedNotePath(self, uuid):
        return self.__buildNotePath(self.__removedNotesPath, uuid)

    def __photoPath(self, uuid):
        return self.__photosPath + "/" + uuid + ".jpg"
Пример #10
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""
    location = setting('DROPBOX_ROOT_PATH', '/')
    oauth2_access_token = setting('DROPBOX_OAUTH2_TOKEN')
    timeout = setting('DROPBOX_TIMEOUT', _DEFAULT_TIMEOUT)
    write_mode = setting('DROPBOX_WRITE_MODE', _DEFAULT_MODE)

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=oauth2_access_token, root_path=location, timeout=timeout,
                 write_mode=write_mode):
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure an auth token at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")

        self.root_path = root_path
        self.write_mode = write_mode
        self.client = Dropbox(oauth2_access_token, timeout=timeout)

    def _full_path(self, name):
        if name == '/':
            name = ''
        
        # If the machine is windows do not append the drive letter to file path
        if os.name == 'nt':
            final_path = os.path.join(self.root_path, name).replace('\\', '/')
            
            # Separator on linux system
            sep = '//'
            base_path = self.root_path

            if (not os.path.normcase(final_path).startswith(os.path.normcase(base_path + sep)) and
                    os.path.normcase(final_path) != os.path.normcase(base_path) and
                    os.path.dirname(os.path.normcase(base_path)) != os.path.normcase(base_path)):
                raise SuspiciousFileOperation(
                    'The joined path ({}) is located outside of the base path '
                    'component ({})'.format(final_path, base_path))
            
            return final_path
        
        else:
            return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)

        if full_path == '/':
            full_path = ''

        metadata = self.client.files_list_folder(full_path)
        for entry in metadata.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.client_modified

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name), mode=WriteMode(self.write_mode))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE)
        )
        cursor = UploadSessionCursor(
            session_id=upload_session.session_id,
            offset=content.tell()
        )
        commit = CommitInfo(path=dest_path, mode=WriteMode(self.write_mode))

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit
                )
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor
                )
                cursor.offset = content.tell()

    def get_available_name(self, name, max_length=None):
        """Overwrite existing file with the same name."""
        name = self._full_path(name)
        if self.write_mode == 'overwrite':
            return get_available_overwrite_name(name, max_length)
        return super().get_available_name(name, max_length)
Пример #11
0
class DPBXBackend(duplicity.backend.Backend):
    """Connect to remote store using Dr*pB*x service"""
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.api_account = None
        self.api_client = None
        self.auth_flow = None

        self.login()

    def user_authenticated(self):
        try:
            account = self.api_client.users_get_current_account()
            log.Debug("User authenticated as ,%s" % account)
            return True
        except:
            log.Debug('User not authenticated')
            return False

    def load_access_token(self):
        return os.environ.get('DPBX_ACCESS_TOKEN', None)

    def save_access_token(self, access_token):
        raise BackendException(
            'dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' %
            access_token)

    def obtain_access_token(self):
        log.Info("dpbx: trying to obtain access token")
        for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
            if env_var not in os.environ:
                raise BackendException(
                    'dpbx: %s environment variable not set' % env_var)

        app_key = os.environ['DPBX_APP_KEY']
        app_secret = os.environ['DPBX_APP_SECRET']

        if not sys.stdout.isatty() or not sys.stdin.isatty():
            log.FatalError(
                'dpbx error: cannot interact, but need human attention',
                log.ErrorCode.backend_command_error)

        auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
        log.Debug('dpbx,auth_flow.start()')
        authorize_url = auth_flow.start()
        print
        print '-' * 72
        print "1. Go to: " + authorize_url
        print "2. Click \"Allow\" (you might have to log in first)."
        print "3. Copy the authorization code."
        print '-' * 72
        auth_code = raw_input("Enter the authorization code here: ").strip()
        try:
            log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
            authresult = auth_flow.finish(auth_code)
        except Exception as e:
            raise BackendException('dpbx: Unable to obtain access token: %s' %
                                   e)
        log.Info("dpbx: Authentication successfull")
        self.save_access_token(authresult.access_token)

    def login(self):
        if self.load_access_token() is None:
            self.obtain_access_token()

        self.api_client = Dropbox(self.load_access_token())
        self.api_account = None
        try:
            log.Debug('dpbx,users_get_current_account([token])')
            self.api_account = self.api_client.users_get_current_account()
            log.Debug("dpbx,%s" % self.api_account)

        except (BadInputError, AuthError) as e:
            log.Debug('dpbx,exception: %s' % e)
            log.Info(
                "dpbx: Authentication failed. Trying to obtain new access token"
            )

            self.obtain_access_token()

            # We're assuming obtain_access_token will throw exception.
            # So this line should not be reached
            raise BackendException(
                "dpbx: Please update DPBX_ACCESS_TOKEN and try again")

        log.Info("dpbx: Successfully authenticated as %s" %
                 self.api_account.name.display_name)

    def _error_code(self, operation, e):
        if isinstance(e, ApiError):
            err = e.error

            if isinstance(err, GetMetadataError) and err.is_path():
                if err.get_path().is_not_found():
                    return log.ErrorCode.backend_not_found
            elif isinstance(err, DeleteError) and err.is_path_lookup():
                lookup = e.error.get_path_lookup()
                if lookup.is_not_found():
                    return log.ErrorCode.backend_not_found

    @command()
    def _put(self, source_path, remote_filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        file_size = os.path.getsize(source_path.name)
        progress.report_transfer(0, file_size)

        if file_size < DPBX_UPLOAD_CHUNK_SIZE:
            # Upload whole file at once to avoid extra server request
            res_metadata = self.put_file_small(source_path, remote_path)
        else:
            res_metadata = self.put_file_chunked(source_path, remote_path)

        # A few sanity checks
        if res_metadata.path_display != remote_path:
            raise BackendException(
                'dpbx: result path mismatch: %s (expected: %s)' %
                (res_metadata.path_display, remote_path))
        if res_metadata.size != file_size:
            raise BackendException(
                'dpbx: result size mismatch: %s (expected: %s)' %
                (res_metadata.size, file_size))

    def put_file_small(self, source_path, remote_path):
        if not self.user_authenticated():
            self.login()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            log.Debug('dpbx,files_upload(%s, [%d bytes])' %
                      (remote_path, file_size))

            res_metadata = self.api_client.files_upload(
                f.read(),
                remote_path,
                mode=WriteMode.overwrite,
                autorename=False,
                client_modified=None,
                mute=True)
            log.Debug('dpbx,files_upload(): %s' % res_metadata)
            progress.report_transfer(file_size, file_size)
            return res_metadata
        finally:
            f.close()

    def put_file_chunked(self, source_path, remote_path):
        if not self.user_authenticated():
            self.login()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            buf = f.read(DPBX_UPLOAD_CHUNK_SIZE)
            log.Debug(
                'dpbx,files_upload_session_start([%d bytes]), total: %d' %
                (len(buf), file_size))
            upload_sid = self.api_client.files_upload_session_start(buf)
            log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid)
            upload_cursor = UploadSessionCursor(upload_sid.session_id,
                                                f.tell())
            commit_info = CommitInfo(remote_path,
                                     mode=WriteMode.overwrite,
                                     autorename=False,
                                     client_modified=None,
                                     mute=True)
            res_metadata = None
            progress.report_transfer(f.tell(), file_size)

            requested_offset = None
            current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
            retry_number = globals.num_retries
            is_eof = False

            # We're doing our own error handling and retrying logic because
            # we can benefit from Dpbx chunked upload and retry only failed
            # chunk
            while not is_eof or not res_metadata:
                try:
                    if requested_offset is not None:
                        upload_cursor.offset = requested_offset

                    if f.tell() != upload_cursor.offset:
                        f.seek(upload_cursor.offset)
                    buf = f.read(current_chunk_size)

                    is_eof = f.tell() >= file_size
                    if not is_eof and len(buf) == 0:
                        continue

                    # reset temporary status variables
                    requested_offset = None
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
                    retry_number = globals.num_retries

                    if not is_eof:
                        assert len(buf) != 0
                        log.Debug(
                            'dpbx,files_upload_sesssion_append([%d bytes], offset=%d)'
                            % (len(buf), upload_cursor.offset))
                        self.api_client.files_upload_session_append(
                            buf, upload_cursor.session_id,
                            upload_cursor.offset)
                    else:
                        log.Debug(
                            'dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)'
                            % (len(buf), upload_cursor.offset))
                        res_metadata = self.api_client.files_upload_session_finish(
                            buf, upload_cursor, commit_info)

                    upload_cursor.offset = f.tell()
                    log.Debug('progress: %d of %d' %
                              (upload_cursor.offset, file_size))
                    progress.report_transfer(upload_cursor.offset, file_size)
                except ApiError as e:
                    error = e.error
                    if isinstance(error, UploadSessionLookupError
                                  ) and error.is_incorrect_offset():
                        # Server reports that we should send another chunk.
                        # Most likely this is caused by network error during
                        # previous upload attempt. In such case we'll get
                        # expected offset from server and it's enough to just
                        # seek() and retry again
                        new_offset = error.get_incorrect_offset(
                        ).correct_offset
                        log.Debug(
                            'dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)'
                            % (upload_cursor.offset, new_offset))
                        if requested_offset is not None:
                            # chunk failed even after seek attempt. Something
                            # strange and no safe way to recover
                            raise BackendException(
                                "dpbx: unable to chunk upload")
                        else:
                            # will seek and retry
                            requested_offset = new_offset
                        continue
                    raise
                except ConnectionError as e:
                    log.Debug('dpbx,files_upload_session_append: %s' % e)

                    retry_number -= 1

                    if not self.user_authenticated():
                        self.login()

                    if retry_number == 0:
                        raise

                    # We don't know for sure, was partial upload successful or
                    # not. So it's better to retry smaller amount to avoid extra
                    # reupload
                    log.Info('dpbx: sleeping a bit before chunk retry')
                    time.sleep(30)
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
                    requested_offset = None
                    continue

            if f.tell() != file_size:
                raise BackendException('dpbx: something wrong')

            log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata)
            progress.report_transfer(f.tell(), file_size)

            return res_metadata

        finally:
            f.close()

    @command()
    def _get(self, remote_filename, local_path):
        if not self.user_authenticated():
            self.login()

        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        log.Debug('dpbx,files_download(%s)' % remote_path)
        res_metadata, http_fd = self.api_client.files_download(remote_path)
        log.Debug('dpbx,files_download(%s): %s, %s' %
                  (remote_path, res_metadata, http_fd))
        file_size = res_metadata.size
        to_fd = None
        progress.report_transfer(0, file_size)
        try:
            to_fd = local_path.open('wb')
            for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE):
                to_fd.write(c)
                progress.report_transfer(to_fd.tell(), file_size)

        finally:
            if to_fd:
                to_fd.close()
            http_fd.close()

        # It's different from _query() check because we're not querying metadata
        # again. Since this check is free, it's better to have it here
        local_size = os.path.getsize(local_path.name)
        if local_size != file_size:
            raise BackendException("dpbx: wrong file size: %d (expected: %d)" %
                                   (local_size, file_size))

        local_path.setdata()

    @command()
    def _list(self):
        # Do a long listing to avoid connection reset
        if not self.user_authenticated():
            self.login()
        remote_dir = '/' + urllib.unquote(
            self.parsed_url.path.lstrip('/')).rstrip()

        log.Debug('dpbx.files_list_folder(%s)' % remote_dir)
        res = []
        try:
            resp = self.api_client.files_list_folder(remote_dir)
            log.Debug('dpbx.list(%s): %s' % (remote_dir, resp))

            while True:
                res.extend([entry.name for entry in resp.entries])
                if not resp.has_more:
                    break
                resp = self.api_client.files_list_folder_continue(resp.cursor)
        except ApiError as e:
            if (isinstance(e.error, ListFolderError) and e.error.is_path()
                    and e.error.get_path().is_not_found()):
                log.Debug('dpbx.list(%s): ignore missing folder (%s)' %
                          (remote_dir, e))
            else:
                raise

        # Warn users of old version dpbx about automatically renamed files
        self.check_renamed_files(res)

        return res

    @command()
    def _delete(self, filename):
        if not self.user_authenticated():
            self.login()

        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_delete(%s)' % remote_path)
        self.api_client.files_delete(remote_path)

        # files_permanently_delete seems to be better for backup purpose
        # but it's only available for Business accounts
        # self.api_client.files_permanently_delete(remote_path)

    @command()
    def _close(self):
        """close backend session? no! just "flush" the data"""
        log.Debug('dpbx.close():')

    @command()
    def _query(self, filename):
        if not self.user_authenticated():
            self.login()
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_get_metadata(%s)' % remote_path)
        info = self.api_client.files_get_metadata(remote_path)
        log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info))
        return {'size': info.size}

    def check_renamed_files(self, file_list):
        if not self.user_authenticated():
            self.login()
        bad_list = [
            x for x in file_list
            if DPBX_AUTORENAMED_FILE_RE.search(x) is not None
        ]
        if len(bad_list) == 0:
            return
        log.Warn('-' * 72)
        log.Warn(
            'Warning! It looks like there are automatically renamed files on backend'
        )
        log.Warn(
            'They were probably created when using older version of duplicity.'
        )
        log.Warn('')
        log.Warn(
            'Please check your backup consistency. Most likely you will need to choose'
        )
        log.Warn(
            'largest file from duplicity-* (number).gpg and remove brackets from its name.'
        )
        log.Warn('')
        log.Warn(
            'These files are not managed by duplicity at all and will not be')
        log.Warn('removed/rotated automatically.')
        log.Warn('')
        log.Warn('Affected files:')
        for x in bad_list:
            log.Warn('\t%s' % x)
        log.Warn('')
        log.Warn('In any case it\'s better to create full backup.')
        log.Warn('-' * 72)
Пример #12
0
class DropboxStorage(Storage):
    """
    A storage class providing access to resources in a Dropbox Public folder.
    """
    def __init__(self, location='/Public'):
        self.client = Dropbox(ACCESS_TOKEN)
        self.account_info = self.client.users_get_current_account()
        self.location = location
        self.base_url = 'https://dl.dropboxusercontent.com/'

    def _get_abs_path(self, name):
        return os.path.realpath(os.path.join(self.location, name))

    def _open(self, name, mode='rb'):
        name = self._get_abs_path(name)
        remote_file = DropboxFile(name, self, mode=mode)
        return remote_file

    def _save(self, name, content):
        name = self._get_abs_path(name)
        directory = os.path.dirname(name)
        if not self.exists(directory) and directory:
            self.client.files_create_folder(directory)
        # response = self.client.files_get_metadata(directory)
        # if not response['is_dir']:
        #     raise IOError("%s exists and is not a directory." % directory)
        abs_name = os.path.realpath(os.path.join(self.location, name))
        foo = self.client.files_upload(content.read(), abs_name)
        return name

    def delete(self, name):
        name = self._get_abs_path(name)
        self.client.files_delete(name)

    def exists(self, name):
        name = self._get_abs_path(name)
        try:
            self.client.files_get_metadata(name)
        except ApiError as e:
            if e.error.is_path() and e.error.get_path().is_not_found(
            ):  # not found
                return False
            raise e
        return True

    def listdir(self, path):
        path = self._get_abs_path(path)
        response = self.client.files_list_folder(path)
        directories = []
        files = []
        for entry in response.entries:
            if type(entry) == FolderMetadata:
                directories.append(os.path.basename(entry.path_display))
            elif type(entry) == FileMetadata:
                files.append(os.path.basename(entry.path_display))
        return directories, files

    def size(self, name):
        cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name))
        size = cache.get(cache_key)

        if not size:
            size = self.client.files_get_metadata(name).size
            cache.set(cache_key, size, CACHE_TIMEOUT)
        return size

    def url(self, name):
        cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name))
        url = cache.get(cache_key)

        if not url:
            url = self.client.files_get_temporary_link(name).link
            cache.set(cache_key, url, SHARE_LINK_CACHE_TIMEOUT)

        return url

    def get_available_name(self, name):
        """
        Returns a filename that's free on the target storage system, and
        available for new content to be written to.
        """
        name = self._get_abs_path(name)
        dir_name, file_name = os.path.split(name)
        file_root, file_ext = os.path.splitext(file_name)
        # If the filename already exists, add an underscore and a number (before
        # the file extension, if one exists) to the filename until the generated
        # filename doesn't exist.
        count = itertools.count(1)
        while self.exists(name):
            # file_ext includes the dot.
            name = os.path.join(
                dir_name, "%s_%s%s" % (file_root, count.next(), file_ext))

        return name
Пример #13
0
class DPBXBackend(duplicity.backend.Backend):
    """Connect to remote store using Dr*pB*x service"""

    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        self.api_account = None
        self.api_client = None
        self.auth_flow = None

        self.login()

    def load_access_token(self):
        return os.environ.get('DPBX_ACCESS_TOKEN', None)

    def save_access_token(self, access_token):
        raise BackendException('dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' % access_token)

    def obtain_access_token(self):
        log.Info("dpbx: trying to obtain access token")
        for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']:
            if env_var not in os.environ:
                raise BackendException('dpbx: %s environment variable not set' % env_var)

        app_key = os.environ['DPBX_APP_KEY']
        app_secret = os.environ['DPBX_APP_SECRET']

        if not sys.stdout.isatty() or not sys.stdin.isatty():
            log.FatalError('dpbx error: cannot interact, but need human attention', log.ErrorCode.backend_command_error)

        auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret)
        log.Debug('dpbx,auth_flow.start()')
        authorize_url = auth_flow.start()
        print
        print '-' * 72
        print "1. Go to: " + authorize_url
        print "2. Click \"Allow\" (you might have to log in first)."
        print "3. Copy the authorization code."
        print '-' * 72
        auth_code = raw_input("Enter the authorization code here: ").strip()
        try:
            log.Debug('dpbx,auth_flow.finish(%s)' % auth_code)
            access_token, _ = auth_flow.finish(auth_code)
        except Exception as e:
            raise BackendException('dpbx: Unable to obtain access token: %s' % e)
        log.Info("dpbx: Authentication successfull")
        self.save_access_token(access_token)

    def login(self):
        if self.load_access_token() is None:
            self.obtain_access_token()

        self.api_client = Dropbox(self.load_access_token())
        self.api_account = None
        try:
            log.Debug('dpbx,users_get_current_account([token])')
            self.api_account = self.api_client.users_get_current_account()
            log.Debug("dpbx,%s" % self.api_account)

        except (BadInputError, AuthError) as e:
            log.Debug('dpbx,exception: %s' % e)
            log.Info("dpbx: Authentication failed. Trying to obtain new access token")

            self.obtain_access_token()

            # We're assuming obtain_access_token will throw exception. So this line should not be reached
            raise BackendException("dpbx: Please update DPBX_ACCESS_TOKEN and try again")

        log.Info("dpbx: Successfully authenticated as %s" % self.api_account.name.display_name)

    def _error_code(self, operation, e):
        if isinstance(e, ApiError):
            err = e.error

            if isinstance(err, GetMetadataError) and err.is_path():
                if err.get_path().is_not_found():
                    return log.ErrorCode.backend_not_found
            elif isinstance(err, DeleteError) and err.is_path_lookup():
                lookup = e.error.get_path_lookup()
                if lookup.is_not_found():
                    return log.ErrorCode.backend_not_found

    @command()
    def _put(self, source_path, remote_filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        file_size = os.path.getsize(source_path.name)
        f = source_path.open('rb')
        try:
            progress.report_transfer(0, file_size)
            buf = f.read(DPBX_UPLOAD_CHUNK_SIZE)
            log.Debug('dpbx,files_upload_session_start([%d bytes]), total: %d' % (len(buf), file_size))
            upload_sid = self.api_client.files_upload_session_start(buf)
            log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid)
            upload_cursor = UploadSessionCursor(upload_sid.session_id, f.tell())
            commit_info = CommitInfo(remote_path, mode=WriteMode.overwrite, autorename=False, client_modified=None, mute=True)
            res_metadata = None
            progress.report_transfer(f.tell(), file_size)

            requested_offset = None
            current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
            retry_number = globals.num_retries

            # We're doing our own error handling and retrying logic because
            # we can benefit from Dpbx chunked upload and retry only failed chunk
            while (f.tell() < file_size) or not res_metadata:
                try:
                    if requested_offset is not None:
                        upload_cursor.offset = requested_offset

                    if f.tell() != upload_cursor.offset:
                        f.seek(upload_cursor.offset)
                    buf = f.read(current_chunk_size)

                    # reset temporary status variables
                    requested_offset = None
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
                    retry_number = globals.num_retries

                    if len(buf) != 0:
                        log.Debug('dpbx,files_upload_sesssion_append([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset))
                        self.api_client.files_upload_session_append(buf, upload_cursor.session_id, upload_cursor.offset)
                    else:
                        log.Debug('dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset))
                        res_metadata = self.api_client.files_upload_session_finish(buf, upload_cursor, commit_info)

                    upload_cursor.offset = f.tell()
                    log.Debug('progress: %d of %d' % (upload_cursor.offset, file_size))
                    progress.report_transfer(upload_cursor.offset, file_size)
                except ApiError as e:
                    error = e.error
                    if isinstance(error, UploadSessionLookupError) and error.is_incorrect_offset():
                        # Server reports that we should send another chunk. Most likely this is caused by
                        # network error during previous upload attempt. In such case we'll get expected offset
                        # from server and it's enough to just seek() and retry again
                        new_offset = error.get_incorrect_offset().correct_offset
                        log.Debug('dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)' % (upload_cursor.offset, new_offset))
                        if requested_offset is not None:
                            # chunk failed even after seek attempt. Something strange and no safe way to recover
                            raise BackendException("dpbx: unable to chunk upload")
                        else:
                            # will seek and retry
                            requested_offset = new_offset
                        continue
                    raise
                except ConnectionError as e:
                    log.Debug('dpbx,files_upload_session_append: %s' % e)

                    retry_number -= 1
                    if retry_number == 0:
                        raise

                    # We don't know for sure, was partial upload successfull or not. So it's better to retry smaller amount to avoid extra reupload
                    log.Info('dpbx: sleeping a bit before chunk retry')
                    time.sleep(30)
                    current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
                    requested_offset = None
                    continue

            if f.tell() != file_size:
                raise BackendException('dpbx: something wrong')

            log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata)
            progress.report_transfer(f.tell(), file_size)

            # A few sanity checks
            if res_metadata.path_display != remote_path:
                raise BackendException('dpbx: result path mismatch: %s (expected: %s)' % (res_metadata.path_display, remote_path))
            if res_metadata.size != file_size:
                raise BackendException('dpbx: result size mismatch: %s (expected: %s)' % (res_metadata.size, file_size))

        finally:
            f.close()

    @command()
    def _get(self, remote_filename, local_path):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip()

        log.Debug('dpbx,files_download(%s)' % remote_path)
        res_metadata, http_fd = self.api_client.files_download(remote_path)
        log.Debug('dpbx,files_download(%s): %s, %s' % (remote_path, res_metadata, http_fd))
        file_size = res_metadata.size
        to_fd = None
        progress.report_transfer(0, file_size)
        try:
            to_fd = local_path.open('wb')
            for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE):
                to_fd.write(c)
                progress.report_transfer(to_fd.tell(), file_size)

        finally:
            if to_fd:
                to_fd.close()
            http_fd.close()

        # It's different from _query() check because we're not querying metadata again.
        # Since this check is free, it's better to have it here
        local_size = os.path.getsize(local_path.name)
        if local_size != file_size:
            raise BackendException("dpbx: wrong file size: %d (expected: %d)" % (local_size, file_size))

        local_path.setdata()

    @command()
    def _list(self):
        # Do a long listing to avoid connection reset
        remote_dir = '/' + urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()

        log.Debug('dpbx.files_list_folder(%s)' % remote_dir)
        resp = self.api_client.files_list_folder(remote_dir)
        log.Debug('dpbx.list(%s): %s' % (remote_dir, resp))

        res = []
        while True:
            res.extend([entry.name for entry in resp.entries])
            if not resp.has_more:
                break
            resp = self.api_client.files_list_folder_continue(resp.cursor)

        # Warn users of old version dpbx about automatically renamed files
        self.check_renamed_files(res)

        return res

    @command()
    def _delete(self, filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_delete(%s)' % remote_path)
        self.api_client.files_delete(remote_path)

        # files_permanently_delete seems to be better for backup purpose
        # but it's only available for Business accounts
        # self.api_client.files_permanently_delete(remote_path)

    @command()
    def _close(self):
        """close backend session? no! just "flush" the data"""
        log.Debug('dpbx.close():')

    @command()
    def _query(self, filename):
        remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/'))
        remote_path = '/' + os.path.join(remote_dir, filename).rstrip()

        log.Debug('dpbx.files_get_metadata(%s)' % remote_path)
        info = self.api_client.files_get_metadata(remote_path)
        log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info))
        return {'size': info.size}

    def check_renamed_files(self, file_list):
        bad_list = [x for x in file_list if DPBX_AUTORENAMED_FILE_RE.search(x) is not None]
        if len(bad_list) == 0:
            return
        log.Warn('-' * 72)
        log.Warn('Warning! It looks like there are automatically renamed files on backend')
        log.Warn('They were probably created when using older version of duplicity.')
        log.Warn('')
        log.Warn('Please check your backup consistency. Most likely you will need to choose')
        log.Warn('largest file from duplicity-* (number).gpg and remove brackets from its name.')
        log.Warn('')
        log.Warn('These files are not managed by duplicity at all and will not be')
        log.Warn('removed/rotated automatically.')
        log.Warn('')
        log.Warn('Affected files:')
        for x in bad_list:
            log.Warn('\t%s' % x)
        log.Warn('')
        log.Warn('In any case it\'s better to create full backup.')
        log.Warn('-' * 72)
Пример #14
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting('DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        metadata = self.client.files_get_metadata(full_path)
        for entry in metadata['contents']:
            entry['path'] = entry['path'].replace(full_path, '', 1)
            entry['path'] = entry['path'].replace('/', '', 1)
            if entry['is_dir']:
                directories.append(entry['path'])
            else:
                files.append(entry['path'])
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata['bytes']

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT)
        return mod_time

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT)
        return acc_time

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE)
        )
        cursor = UploadSessionCursor(
            session_id=upload_session.session_id,
            offset=content.tell()
        )
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit
                )
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor
                )
                cursor.offset = content.tell()
Пример #15
0
def push_to_dropbox(branch_name, symbol, gui):
    global settings  # Read

    saves_path = settings["SAVES_DIR"]
    temp_dir = settings["TEMP_DIR"]

    if settings["OAUTH"] == 'null':
        return "Please type in /login to use this feature"

    # clear temp_dir
    for path_temp in listdir(temp_dir):
        remove(path.join(temp_dir, path_temp))

    # archive worlds starting with 'symbol' to temp_dir
    for path_save in listdir(saves_path):
        file_path = path.join(saves_path, path_save)
        if path.isdir(file_path) and path_save[0] == symbol:
            make_archive(path.join(temp_dir, path_save), 'zip', file_path)

    dbx = Dropbox(settings["OAUTH"].access_token)

    # clear branch_directory in dropbox
    try:
        if settings["CONFIRM"]:
            confirm = simpledialog.askstring(
                "Confirm",
                "Type in 'YES' if you wish to proceed. This will delete the current '{0}'"
                " branch if it already exists in dropbox".format(branch_name))
            if not confirm == "YES":
                return "Action Not "
        dbx.files_delete("/" + branch_name)
    except Exception:
        pass

    println("Starting upload... ", gui)
    println(
        "Do not close the app until 'done uploading' message is shown on the console",
        gui)

    # upload every zip file to dropbox in temp_dir
    for path_temp in listdir(temp_dir):
        zip_file = path.join(temp_dir, path_temp)
        destination = "/" + branch_name + "/" + path_temp

        with open(zip_file, "rb") as f:
            file_size = path.getsize(zip_file)
            if file_size < CHUNK_SIZE:
                dbx.files_upload(f.read(), destination)
            else:
                # upload_session_start_result
                upload_ssr = dbx.files_upload_session_start(f.read(CHUNK_SIZE))

                cursor = files.UploadSessionCursor(
                    session_id=upload_ssr.session_id, offset=f.tell())
                commit = files.CommitInfo(path=destination)

                while f.tell() < file_size:
                    percent = str(f.tell() / file_size * 100) + "%"
                    print(percent)

                    if (file_size - f.tell()) <= CHUNK_SIZE:
                        dbx.files_upload_session_finish(
                            f.read(CHUNK_SIZE), cursor, commit)
                    else:
                        dbx.files_upload_session_append(
                            f.read(CHUNK_SIZE), cursor.session_id,
                            cursor.offset)
                        cursor.offset = f.tell()

    # clear temp_dir
    for path_temp in listdir(temp_dir):
        remove(path.join(temp_dir, path_temp))

    save(settings)

    return "Done Uploading"
Пример #16
0
class DropboxUploader(BaseUploader):
    def __init__(self):
        super(BaseUploader, self).__init__()
        self.oauth2_access_token = os.getenv(
            "DROPBOX_OAUTH2_TOKEN") or get_app_config("DROPBOX_OAUTH2_TOKEN")
        self.root_path = (os.getenv("DROPBOX_ROOT_PATH")
                          or get_app_config("DROPBOX_ROOT_PATH") or "/CTFd")
        self.client = Dropbox(self.oauth2_access_token, timeout=100)
        self.write_mode = "add"  # can be set to overwrite

    def _clean_filename(self, c):
        if c in string.ascii_letters + string.digits + "-" + "_" + ".":
            return True

    def _full_path(self, name):
        return safe_join(self.root_path, name).replace("\\", "/")

    def store(self, fileobj, filename):
        self.client.files_upload(fileobj.read(),
                                 self._full_path(filename),
                                 mode=WriteMode(self.write_mode))
        return filename

    def upload(self, file_obj, filename):
        filename = filter(self._clean_filename,
                          secure_filename(filename).replace(" ", "_"))
        filename = "".join(filename)
        if len(filename) <= 0:
            return False

        md5hash = hexencode(os.urandom(16))

        dst = md5hash + "/" + filename
        self.store(file_obj, dst)
        return dst

    def download(self, filename):
        media = self.client.files_get_temporary_link(self._full_path(filename))
        print(media.link)
        return redirect(media.link)

    def delete(self, filename):
        directory = os.path.dirname(self._full_path(filename))
        self.client.files_delete(directory)
        return True

    def sync(self):
        local_folder = current_app.config.get("UPLOAD_FOLDER")

        root_metadata = self.client.files_list_folder(self.root_path)

        for folder_entry in root_metadata.entries:
            if isinstance(folder_entry, FolderMetadata):
                filemetadata = self.client.files_list_folder(
                    folder_entry.path_lower)
                for file_entry in filemetadata.entries:
                    if not isinstance(file_entry, FolderMetadata):

                        dropbox_path = file_entry.path_lower.replace(
                            self.root_path.lower() + "/", "")
                        local_path = os.path.join(local_folder, dropbox_path)
                        directory = os.path.dirname(local_path)
                        if not os.path.exists(directory):
                            os.makedirs(directory)

                        self.client.files_download_to_file(
                            local_path, file_entry.path_lower)
Пример #17
0
class Dropbox(BlackboxStorage):
    """Storage handler that uploads backups to Dropbox."""

    required_fields = ("access_token", )

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.upload_base = self.config.get("upload_directory") or "/"
        self.client = DropboxClient(self.config["access_token"])
        self.valid = self._validate_token()

    def _validate_token(self):
        """Check if dropbox token is valid."""
        try:
            return self.client.check_user("test").result == "test"
        except AuthError:
            return False

    def sync(self, file_path: Path) -> None:
        """Sync a file to Dropbox."""
        # Check if Dropbox token is valid.
        if self.valid is False:
            error = "Dropbox token is invalid!"
            self.success = False
            self.output = error
            log.error(error)
            return None

        # This is size what can be uploaded as one chunk.
        # When file is bigger than that, this will be uploaded
        # in multiple parts.
        chunk_size = 4 * 1024 * 1024

        temp_file, recompressed = self.compress(file_path)
        upload_path = f"{self.upload_base}{file_path.name}{'.gz' if recompressed else ''}"

        try:
            with temp_file as f:
                file_size = os.stat(f.name).st_size
                log.debug(file_size)
                if file_size <= chunk_size:
                    self.client.files_upload(f.read(), upload_path,
                                             WriteMode.overwrite)
                else:
                    session_start = self.client.files_upload_session_start(
                        f.read(chunk_size))
                    cursor = UploadSessionCursor(session_start.session_id,
                                                 offset=f.tell())
                    # Commit contains path in Dropbox and write mode about file
                    commit = CommitInfo(upload_path, WriteMode.overwrite)

                    while f.tell() < file_size:
                        if (file_size - f.tell()) <= chunk_size:
                            self.client.files_upload_session_finish(
                                f.read(chunk_size), cursor, commit)
                        else:
                            self.client.files_upload_session_append(
                                f.read(chunk_size), cursor.session_id,
                                cursor.offset)
                            cursor.offset = f.tell()
            self.success = True
        except (ApiError, HttpError) as e:
            log.error(e)
            self.success = False
            self.output = str(e)

    def rotate(self, database_id: str) -> None:
        """
        Rotate the files in the Dropbox directory.

        All files in base directory of backups will be deleted when they
        are older than `retention_days`, and because of this,
        it's better to have backups in isolated folder.
        """
        # Check if Dropbox token is valid.
        if self.valid is False:
            log.error("Dropbox token is invalid - Can't delete old backups!")
            return None
        # Let's rotate only this type of database
        db_type_regex = rf"{database_id}_blackbox_\d{{2}}_\d{{2}}_\d{{4}}.+"

        # Receive first batch of files.
        files_result = self.client.files_list_folder(
            self.upload_base if self.upload_base != "/" else "")
        entries = [
            entry for entry in files_result.entries
            if self._is_backup_file(entry, db_type_regex)
        ]

        # If there is more files, receive all of them.
        while files_result.has_more:
            cursor = files_result.cursor
            files_result = self.client.files_list_folder_continue(cursor)
            entries += [
                entry for entry in files_result.entries
                if self._is_backup_file(entry, db_type_regex)
            ]

        retention_days = 7
        if Blackbox.retention_days:
            retention_days = Blackbox.retention_days

        # Find all old files and delete them.
        for item in entries:
            last_modified = item.server_modified
            now = datetime.now(tz=last_modified.tzinfo)
            delta = now - last_modified
            if delta.days >= retention_days:
                self.client.files_delete(item.path_lower)

    @staticmethod
    def _is_backup_file(entry, db_type_regex) -> bool:
        """Check if file is actually this kind of database backup."""
        return isinstance(entry, FileMetadata) and re.match(
            db_type_regex, entry.name)
Пример #18
0
class DropboxStorage(Storage):
    """
    A storage class providing access to resources in a Dropbox folder.
    """
    def __init__(self, token=ACCESS_TOKEN, location=ROOT_FOLDER):
        if not token:
            raise ImproperlyConfigured("You must configure an access token at "
                                       "'settings.DROPBOX_ACCESS_TOKEN'.")

        self.client = Dropbox(token)
        self.account_info = self.client.users_get_current_account()
        self.location = location or DEFAULT_ROOT_FOLDER
        self.base_url = 'https://dl.dropboxusercontent.com/'

    def _get_abs_path(self, name):
        return os.path.realpath(os.path.join(self.location, name))

    def _open(self, name, mode='rb'):
        name = self._get_abs_path(name)
        remote_file = DropboxFile(name, self, mode=mode)
        return remote_file

    def _save(self, name, content):
        name = self._get_abs_path(name)
        directory = os.path.dirname(name)
        if not self.exists(directory) and directory:
            self.client.files_create_folder(directory)
        # response = self.client.files_get_metadata(directory)
        # if not response['is_dir']:
        #     raise IOError("%s exists and is not a directory." % directory)
        abs_name = os.path.realpath(os.path.join(self.location, name))
        self.client.files_upload(content.read(), abs_name)
        return name

    def delete(self, name):
        name = self._get_abs_path(name)
        try:
            self.client.files_delete(name)
        except ApiError as e:
            if isinstance(e.error, DeleteError)\
                    and e.error.is_path_lookup()\
                    and e.error.get_path_lookup().is_not_found():
                # not found
                return False
            # error
            raise e
        # deleted
        return True

    def exists(self, name):
        name = self._get_abs_path(name)
        try:
            self.client.files_get_metadata(name)
        except ApiError as e:
            if hasattr(e.error, 'is_path')\
                    and e.error.is_path()\
                    and e.error.get_path().is_not_found():
                # not found
                return False
            # error
            raise e
        # found
        return True

    def listdir(self, path):
        path = self._get_abs_path(path)
        response = self.client.files_list_folder(path)
        directories = []
        files = []
        for entry in response.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(os.path.basename(entry.path_display))
            elif isinstance(entry, FileMetadata):
                files.append(os.path.basename(entry.path_display))
        return directories, files

    def size(self, name):
        name = self._get_abs_path(name)
        return self.client.files_get_metadata(name).size

    def url(self, name):
        name = self._get_abs_path(name)
        return self.client.files_get_temporary_link(name).link

    def modified_time(self, name):
        name = self._get_abs_path(name)
        return self.client.files_get_metadata(name).server_modified

    def accessed_time(self, name):
        name = self._get_abs_path(name)
        # Note to the unwary, this is actually an mtime
        return self.client.files_get_metadata(name).client_modified

    def get_available_name(self, name, max_length=None):
        """
        Returns a filename that's free on the target storage system, and
        available for new content to be written to.
        """
        name = self._get_abs_path(name)
        dir_name, file_name = os.path.split(name)
        file_root, file_ext = os.path.splitext(file_name)
        # If the filename already exists, add an underscore and a number (before
        # the file extension, if one exists) to the filename until the generated
        # filename doesn't exist.
        count = itertools.count(1)
        while self.exists(name):
            # file_ext includes the dot.
            _fn = "%s_%s%s" % (file_root, count.next(), file_ext)
            name = os.path.join(dir_name, _fn)

        return name
Пример #19
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None, timeout=None):
        oauth2_access_token = oauth2_access_token or setting(
            'DROPBOX_OAUTH2_TOKEN')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure an auth token at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")

        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        timeout = timeout or setting('DROPBOX_TIMEOUT', _DEFAULT_TIMEOUT)
        self.client = Dropbox(oauth2_access_token, timeout=timeout)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)

        if full_path == '/':
            full_path = ''

        metadata = self.client.files_list_folder(full_path)
        for entry in metadata.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.client_modified

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        content.open()
        if content.size <= self.CHUNK_SIZE:
            self.client.files_upload(content.read(), self._full_path(name))
        else:
            self._chunked_upload(content, self._full_path(name))
        content.close()
        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE))
        cursor = UploadSessionCursor(session_id=upload_session.session_id,
                                     offset=content.tell())
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit)
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor)
                cursor.offset = content.tell()
Пример #20
0
class DropboxFS(FS):
    def __init__(self, accessToken):
        super().__init__()
        self.dropbox = Dropbox(accessToken)
        _meta = self._meta = {
            "case_insensitive": False,  # I think?
            "invalid_path_chars": ":",  # not sure what else
            "max_path_length": None,  # don't know what the limit is
            "max_sys_path_length": None,  # there's no syspath
            "network": True,
            "read_only": False,
            "supports_rename": False  # since we don't have a syspath...
        }

    def __repr__(self):
        return "<DropboxDriveFS>"

    def _infoFromMetadata(self, metadata):  # pylint: disable=no-self-use
        rawInfo = {
            "basic": {
                "name": metadata.name,
                "is_dir": isinstance(metadata, FolderMetadata),
            }
        }
        if isinstance(metadata, FileMetadata):
            rawInfo.update({
                "details": {
                    "accessed": None,  # not supported by Dropbox API
                    "created": None,  # not supported by Dropbox API?,
                    "metadata_changed": None,  # not supported by Dropbox
                    "modified": datetime_to_epoch(
                        metadata.server_modified
                    ),  # API documentation says that this is reliable
                    "size": metadata.size,
                    "type": 0
                },
                "dropbox": {
                    "content_hash": metadata.
                    content_hash,  # see https://www.dropbox.com/developers/reference/content-hash
                    "rev": metadata.rev,
                    "client_modified": metadata.
                    client_modified  # unverified value coming from dropbox clients
                }
            })
            if metadata.media_info is not None and metadata.media_info.is_metadata(
            ) is True:
                media_info_metadata = metadata.media_info.get_metadata()
                if media_info_metadata.time_taken is not None:
                    rawInfo.update({
                        "media_info": {
                            "taken_date_time":
                            datetime_to_epoch(media_info_metadata.time_taken)
                        }
                    })
                if media_info_metadata.location is not None:
                    rawInfo.update({
                        "media_info": {
                            "location_latitude":
                            media_info_metadata.location.latitude,
                            "location_longitude":
                            media_info_metadata.location.longitude
                        }
                    })
                # Dropbox doesn't parse some jpgs properly
                if media_info_metadata.dimensions is not None:
                    rawInfo.update({
                        "media_info": {
                            "dimensions_height":
                            media_info_metadata.dimensions.height,
                            "dimensions_width":
                            media_info_metadata.dimensions.width
                        }
                    })
        elif isinstance(metadata, FolderMetadata):
            rawInfo.update({
                "details": {
                    "accessed": None,  # not supported by Dropbox API
                    "created": None,  # not supported by Dropbox API,
                    "metadata_changed": None,  # not supported by Dropbox
                    "modified": None,  # not supported for folders
                    "size": None,  # not supported for folders
                    "type": 1
                }
            })
        else:
            assert False, f"{metadata.name}, {metadata}, {type(metadata)}"
        return Info(rawInfo)

    def getinfo(self, path, namespaces=None):
        if path == "/":
            return Info({"basic": {"name": "", "is_dir": True}})
        try:
            if not path.startswith("/"):
                path = "/" + path
            metadata = self.dropbox.files_get_metadata(path,
                                                       include_media_info=True)
        except ApiError as e:
            raise ResourceNotFound(path=path, exc=e)
        return self._infoFromMetadata(metadata)

    def setinfo(self, path, info):  # pylint: disable=too-many-branches
        # dropbox doesn't support changing any of the metadata values
        pass

    def listdir(self, path):
        return [x.name for x in self.scandir(path)]

    def makedir(self, path, permissions=None, recreate=False):
        try:
            folderMetadata = self.dropbox.files_create_folder(path)
        except ApiError as e:
            assert isinstance(e.reason, CreateFolderError)
            # TODO - there are other possibilities
            raise DirectoryExpected(path=path)
        # don't need to close this filesystem so we return the non-closing version
        return SubFS(self, path)

    def openbin(self, path, mode="r", buffering=-1, **options):
        mode = Mode(mode)
        exists = True
        isDir = False
        try:
            isDir = self.getinfo(path).is_dir
        except ResourceNotFound:
            exists = False
        if mode.exclusive and exists:
            raise FileExists(path)
        elif mode.reading and not mode.create and not exists:
            raise ResourceNotFound(path)
        elif isDir:
            raise FileExpected(path)
        return DropboxFile(self.dropbox, path, mode)

    def remove(self, path):
        try:
            self.dropbox.files_delete(path)
        except ApiError as e:
            raise FileExpected(path=path, exc=e)

    def removedir(self, path):
        try:
            self.dropbox.files_delete(path)
        except ApiError as e:
            assert e.reason is DeleteError
            raise DirectoryExpected(path=path, exc=e)

    # non-essential method - for speeding up walk
    def scandir(self, path, namespaces=None, page=None):
        #
        if path == "/":
            path = ""
        # get all the avaliable metadata since it's cheap
        # TODO - this call has a recursive flag so we can either use that and cache OR override walk
        result = self.dropbox.files_list_folder(path, include_media_info=True)
        allEntries = result.entries
        while result.has_more:
            result = self.dropbox.files_list_folder_continue(result.cursor)
            allEntries += result.entries
        return [self._infoFromMetadata(x) for x in allEntries]
Пример #21
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    CHUNK_SIZE = 4 * 1024 * 1024

    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting(
            'DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, path):
        path = PurePosixPath(self.root_path) / path
        path = str(path)

        if path == '/':
            path = ''

        return path

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        result = self.client.files_list_folder(full_path)

        for entry in result.entries:
            if isinstance(entry, FolderMetadata):
                directories.append(entry.name)
            else:
                files.append(entry.name)

        assert not result.has_more, "FIXME: Not implemented!"

        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.size

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata.server_modified

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        # Note to the unwary, this is actually an mtime
        return metadata.client_modified

    def url(self, name):
        try:
            media = self.client.files_get_temporary_link(self._full_path(name))
            return media.link
        except ApiError:
            raise ValueError("This file is not accessible via a URL.")

    def _open(self, name, mode='rb'):
        return DropBoxFile(self._full_path(name), self)

    def _save(self, name, content):
        try:
            content.open()

            if content.size <= self.CHUNK_SIZE:
                self.client.files_upload(content.read(), self._full_path(name))
            else:
                self._chunked_upload(content, self._full_path(name))

        finally:
            content.close()

        return name

    def _chunked_upload(self, content, dest_path):
        upload_session = self.client.files_upload_session_start(
            content.read(self.CHUNK_SIZE))
        cursor = UploadSessionCursor(session_id=upload_session.session_id,
                                     offset=content.tell())
        commit = CommitInfo(path=dest_path)

        while content.tell() < content.size:
            if (content.size - content.tell()) <= self.CHUNK_SIZE:
                self.client.files_upload_session_finish(
                    content.read(self.CHUNK_SIZE), cursor, commit)
            else:
                self.client.files_upload_session_append_v2(
                    content.read(self.CHUNK_SIZE), cursor)
                cursor.offset = content.tell()
Пример #22
0
class DropboxStorage(Storage):
    """
    A storage class providing access to resources in a Dropbox Public folder.
    """

    def __init__(self, location='/Public'):
        self.client = Dropbox(ACCESS_TOKEN)
        self.account_info = self.client.users_get_current_account()
        self.location = location
        self.base_url = 'https://dl.dropboxusercontent.com/'

    def _get_abs_path(self, name):
        return os.path.realpath(os.path.join(self.location, name))

    def _open(self, name, mode='rb'):
        name = self._get_abs_path(name)
        remote_file = DropboxFile(name, self, mode=mode)
        return remote_file

    def _save(self, name, content):
        name = self._get_abs_path(name)
        directory = os.path.dirname(name)
        if not self.exists(directory) and directory:
            self.client.files_create_folder(directory)
        # response = self.client.files_get_metadata(directory)
        # if not response['is_dir']:
        #     raise IOError("%s exists and is not a directory." % directory)
        abs_name = os.path.realpath(os.path.join(self.location, name))
        foo = self.client.files_upload(content.read(), abs_name)
        return name

    def delete(self, name):
        name = self._get_abs_path(name)
        self.client.files_delete(name)

    def exists(self, name):
        name = self._get_abs_path(name)
        try:
            self.client.files_get_metadata(name)
        except ApiError as e:
            if e.error.is_path() and e.error.get_path().is_not_found():  # not found
                return False
            raise e
        return True

    def listdir(self, path):
        path = self._get_abs_path(path)
        response = self.client.files_list_folder(path)
        directories = []
        files = []
        for entry in response.entries:
            if type(entry) == FolderMetadata:
                directories.append(os.path.basename(entry.path_display))
            elif type(entry) == FileMetadata:
                files.append(os.path.basename(entry.path_display))
        return directories, files

    def size(self, name):
        cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name))
        size = cache.get(cache_key)

        if not size:
            size = self.client.files_get_metadata(name).size
            cache.set(cache_key, size, CACHE_TIMEOUT)
        return size

    def url(self, name):
        if name.startswith(self.location):
            name = name[len(self.location) + 1:]

        name = os.path.basename(self.location) + "/" + name

        if self.base_url is None:
            raise ValueError("This file is not accessible via a URL.")

        myurl = urlparse.urljoin(self.base_url, filepath_to_uri(name))

        if "static" not in self.location:
            # Use a dynamic URL for "non-static" files.
            try:
                new_name = os.path.dirname(self.location) + "/" + name
                fp = filepath_to_uri(new_name)
                cache_key = 'django-dropbox-size:{}'.format(fp)
                myurl = cache.get(cache_key)
                if not myurl:
                    try:
                        shared_link = self.client.sharing_create_shared_link(fp)
                        myurl = shared_link.url + '&raw=1'
                        logger.debug("shared link: {0}, myurl: {1}".format(shared_link, myurl))
                    except Exception,e:
                        logger.exception(e)
                    if myurl is None:
                        temp_link = self.client.files_get_temporary_link(fp)
                        myurl = temp_link.link
                        logger.debug("temp link: {0}, myurl: {1}".format(temp_link, myurl))
                    cache.set(cache_key, myurl, SHARE_LINK_CACHE_TIMEOUT)
            except Exception,e:
                logger.exception(e)

        return myurl

        """
Пример #23
0
class DropBoxStorage(Storage):
    """DropBox Storage class for Django pluggable storage system."""

    def __init__(self, oauth2_access_token=None, root_path=None):
        oauth2_access_token = oauth2_access_token or setting('DROPBOX_OAUTH2_TOKEN')
        self.root_path = root_path or setting('DROPBOX_ROOT_PATH', '/')
        if oauth2_access_token is None:
            raise ImproperlyConfigured("You must configure a token auth at"
                                       "'settings.DROPBOX_OAUTH2_TOKEN'.")
        self.client = Dropbox(oauth2_access_token)

    def _full_path(self, name):
        if name == '/':
            name = ''
        return safe_join(self.root_path, name).replace('\\', '/')

    def delete(self, name):
        self.client.files_delete(self._full_path(name))

    def exists(self, name):
        try:
            return bool(self.client.files_get_metadata(self._full_path(name)))
        except ApiError:
            return False

    def listdir(self, path):
        directories, files = [], []
        full_path = self._full_path(path)
        metadata = self.client.files_get_metadata(full_path)
        for entry in metadata['contents']:
            entry['path'] = entry['path'].replace(full_path, '', 1)
            entry['path'] = entry['path'].replace('/', '', 1)
            if entry['is_dir']:
                directories.append(entry['path'])
            else:
                files.append(entry['path'])
        return directories, files

    def size(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        return metadata['bytes']

    def modified_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        mod_time = datetime.strptime(metadata['modified'], DATE_FORMAT)
        return mod_time

    def accessed_time(self, name):
        metadata = self.client.files_get_metadata(self._full_path(name))
        acc_time = datetime.strptime(metadata['client_mtime'], DATE_FORMAT)
        return acc_time

    def url(self, name):
        media = self.client.files_get_temporary_link(self._full_path(name))
        return media.link

    def _open(self, name, mode='rb'):
        remote_file = DropBoxFile(self._full_path(name), self)
        return remote_file

    def _save(self, name, content):
        self.client.files_upload(content, self._full_path(name))
        return name