def rename_file(self, old_path, new_path):
        self.log.debug('[rename_file] Renaming {} to {}'.format(
            old_path, new_path))

        if old_path == '':
            http_400('Cannot rename root.')

        if self.exists(new_path):
            http_409('File already exists ({}).'.format(new_path))

        owner, dataset_id, file_path = self._to_dw_path(new_path)

        if self.dir_exists(old_path):
            # This is an account, dataset/project or subdirectory
            if self.compatibility_mode:
                dataset = self.api.get_dataset(owner, dataset_id)
                for f in dataset.get('files', []):
                    parent = directory_path(old_path)
                    if f['name'].startswith(parent):
                        self.rename_file(
                            f['name'],
                            normalize_path(new_path, f['name'][len(parent):]))
            else:
                http_400('Only files can be renamed.')

        if file_path is None:
            http_400('Invalid path ({}). Files can only be created within '
                     'datasets or data projects.'.format(new_path))

        old_file = self.get(old_path, content=True)
        self.save(old_file, new_path)
        self.delete_file(old_path)
示例#2
0
    def get(self, path, content=True, type=None, format=None):
        """
        Special case handling for listing root dir.
        """
        path = normalize_path(path)
        if path:
            return self.__get(path, content=content, type=type, format=format)
        if not content:
            return base_directory_model('')

        extra_content = self._extra_root_dirs()
        rm = self.root_manager
        if rm is None:
            root_model = base_directory_model('')
            root_model.update(
                format='json',
                content=extra_content,
            )
        else:
            root_model = rm.get(
                path,
                content=content,
                type=type,
                format=format,
            )
            # Append the extra directories.
            root_model['content'].extend(extra_content)
        return root_model
def test_normalize_path():
    assert_that(normalize_path(''), equal_to(''))
    assert_that(normalize_path('/'), equal_to(''))
    assert_that(normalize_path('', ''), equal_to(''))
    assert_that(normalize_path('path'), equal_to('path'))
    assert_that(normalize_path('/path'), equal_to('path'))
    assert_that(normalize_path('path/path/'), equal_to('path/path'))
    assert_that(normalize_path('path/', '/path'), equal_to('path/path'))
示例#4
0
 def delete(self, path):
     """
     Ensure that roots of our managers can't be deleted.  This should be
     enforced by https://github.com/ipython/ipython/pull/8168, but rogue
     implementations might override this behavior.
     """
     path = normalize_path(path)
     if path in self.managers:
         raise HTTPError(
             400, 'Can\'t delete root of %s' % self.managers[path]
         )
     return self.__delete(path)
    def map_subdir(self, subdir, parent, dataset_obj, include_content=False):
        self.log.debug('[map_subdir] s:{} p:{} d:{} c:{}'.format(
            subdir, parent, dataset_obj['id'], include_content))
        subdir_model = create_model({
            'type': 'directory',
            'name': subdir,
            'path': self._api_path(
                normalize_path(
                    dataset_obj['owner'], dataset_obj['id'],
                    subdir if parent == '' else normalize_path(parent,
                                                               subdir))),
            'writable': dataset_obj.get('accessLevel') in ['WRITE', 'ADMIN'],
            'created': dataset_obj['created'],
            'last_modified': dataset_obj['updated']
        })

        if include_content:
            subdir_model['content'] = self.map_items(
                dataset_obj, parent=normalize_path(parent, subdir))
            subdir_model['format'] = 'json'

        return subdir_model
    def map_dataset(self, dataset, include_content=False):
        self.log.debug('[map_dataset] d:{} c:{}'.format(dataset['id'],
                                                        include_content))
        dataset_dir_model = create_model({
            'type': 'directory',
            'name': dataset['id'],
            'path': self._api_path(
                normalize_path(dataset['owner'], dataset['id'])),
            'writable': dataset.get('accessLevel') in ['WRITE', 'ADMIN'],
            'created': dataset['created'],
            'last_modified': dataset['updated']
        })

        if include_content:
            dataset_dir_model['content'] = self.map_items(dataset)
            dataset_dir_model['format'] = 'json'

        return dataset_dir_model
    def __init__(self, **kwargs):
        super(DwContents, self).__init__(**kwargs)

        # Configuration
        token = self.dw_auth_token
        root_dir = getattr(self, 'root_dir', '/')
        logger = self.log

        # Testing options
        self.api = kwargs.get('api', DwContentsApi(token))
        self.compatibility_mode = kwargs.get('compatibility_mode', False)

        # Final setup
        self.root_dir = normalize_path(root_dir)
        self.mapper = DwMapper(root_dir=root_dir, logger=logger)

        # Share token with datadotworld package
        os.environ['DW_AUTH_TOKEN'] = token
    def map_file(self, file_obj, parent, dataset_obj,
                 content_type=None, content_format=None,
                 content_func=None):
        self.log.debug('[map_file] f:{} p:{} d:{} t:{} c:{}'.format(
            file_obj.get('name'), parent, dataset_obj.get('id'),
            content_type, content_func is not None))

        file_name = relative_path(file_obj['name'], parent)

        gtype = guess_type(file_obj['name'])
        content_type = content_type if content_type is not None else gtype

        file_model = create_model({
            'type': content_type,
            'name': file_name,
            'path': self._api_path(
                normalize_path(
                    dataset_obj['owner'], dataset_obj['id'],
                    file_obj['name'])),
            'writable': dataset_obj.get('accessLevel') in ['WRITE', 'ADMIN'],
            'created': file_obj['created'],
            'last_modified': file_obj['updated']
        })

        if content_func is not None:
            content = content_func()

            gformat = guess_format(file_obj['name'], content_type)
            content_format = (content_format
                              if content_format is not None else gformat)
            content_mimetype = {
                'text': 'text/plain',
                'base64': 'application/octet-stream'
            }.get(content_format)

            file_model['content'] = content
            file_model['format'] = content_format
            file_model['mimetype'] = content_mimetype

        return file_model
示例#9
0
def _resolve_path(path, manager_dict):
    """
    Resolve a path based on a dictionary of manager prefixes.

    Returns a triple of (prefix, manager, manager_relative_path).
    """
    path = normalize_path(path)
    parts = path.split('/')

    # Try to find a sub-manager for the first subdirectory.
    mgr = manager_dict.get(parts[0])
    if mgr is not None:
        return parts[0], mgr, '/'.join(parts[1:])

    # Try to find use the root manager, if one was supplied.
    mgr = manager_dict.get('')
    if mgr is not None:
        return '', mgr, path

    raise HTTPError(
        404,
        'Couldn\'t resolve path [{path}] and '
        'no root manager supplied!'.format(path=path)
    )
示例#10
0
 def __init__(self, prefix='', root_dir='', logger=None):
     self.root_dir = normalize_path(root_dir)
     self.prefix = normalize_path(prefix)
     self.log = (logger
                 if logger is not None else logging.getLogger('dwcontents'))
示例#11
0
 def make_dir(self, api_path):
     dummy_file = normalize_path(api_path, 'dummy')
     self.api.upload_file('testy-tester', 'jupyter', dummy_file, '')
 def _get_file(dataset, file_path):
     file_path = normalize_path(file_path)
     return next(
         (f for f in dataset.get('files', []) if f['name'] == file_path),
         None)
    def save(self, model, path):
        self.log.debug('[save] Saving {} ({})'.format(path, model))
        self.run_pre_save_hook(model, path)

        owner, dataset_id, file_path = self._to_dw_path(path)

        if model['type'] == 'directory':
            if self.compatibility_mode:
                self.api.upload_file(owner, dataset_id,
                                     normalize_path(file_path, 'dummy'), '')
                return self.mapper.map_subdir(
                    file_path, '', self.api.get_dataset(owner, dataset_id))
            else:
                if file_path is not None:
                    http_400('Unable to create directory ({}). Only '
                             'files can be created within data sets '
                             'or data projects.'.format(path))
                elif dataset_id is not None:
                    # This should be possible, however, Jupyter doesn't prompt
                    # users to name the directory and instead creates an
                    # untitled directory.
                    # Until data.world supports moving datasets, users wouldn't
                    # be able to give them proper names.
                    # TODO Fix API (support moving datasets)
                    http_400('Unable to create directory ({}). This path is'
                             'reserved for datasets or data projects '
                             'that must be managed via data.world\'s '
                             'website. Visit https://data.world/'
                             'create-a-project'.format(path))
                else:
                    http_400('Unable to create directory ({}). This path is'
                             'reserved for data.world accounts that '
                             'must be created via data.world\'s '
                             'website.'.format(path))
        else:
            if self.dir_exists(path):
                http_400('Wrong type. {} is not a file.'.format(path))

            if file_path is None:
                http_400('Invalid path ({}). Files can only be created '
                         'within datasets or data projects.'.format(path))

            if model['type'] == 'notebook':
                self.check_and_sign(to_nb_json(model['content']), path)
                content = json.dumps(model['content']).encode('utf-8')
            else:
                model_format = model['format']
                if model_format == 'base64':
                    content = (base64.b64decode(
                        model['content'].encode('ascii')))
                else:
                    content = model['content'].encode('utf-8')

            updated_dataset = self.api.upload_file(owner, dataset_id,
                                                   file_path, content)

            file_dir, _ = split_parent(file_path)
            return self.mapper.map_file(self._get_file(updated_dataset,
                                                       file_path),
                                        file_dir,
                                        updated_dataset,
                                        content_type=(model['type']),
                                        content_format=model.get('format'))