Esempio n. 1
0
def _validate_paths(root, from_path, to_path, target_node):
    # The source path should not be a parent of the destination.
    p_to = tools.split_path(to_path)
    p_from = tools.split_path(from_path)
    if not (p_to and p_from) or (p_from == p_to[:len(p_from)]):
        raise BasicError(403, E_CPY_BAD_PATHS)

    # Attempt to retrieve the parent directory for the destination file, and
    # fail with a 403 if it doesn't exist.
    try:
        obj = database.get_stored_object(root, '/'.join(p_to[:-1]))
    except database.MissingNodeException:
        raise BasicError(403, E_DEST_DOES_NOT_EXIST)

    # The destination path should start with an existing object, which type
    # must be a directory. The destination path should always end with a new
    # path element (copy cannot overwrite).
    is_directory = obj and (type(obj) == models.TreeLink)
    if not is_directory:
        raise BasicError(403, E_BAD_DESTINATION)

    # Attempt to retrieve the original item to copy.
    source = _get_source_object(root, from_path, target_node)
    source_is_dir = source and (type(source) == models.TreeLink)

    # We verify that there is no (undeleted) item at the destination path.
    target = obj.tree.sub_trees if source_is_dir else obj.tree.sub_files
    if p_to[-1] in target and not target[p_to[-1]].is_deleted:
        raise BasicError(403, E_DEST_EXISTS)
Esempio n. 2
0
def fileops_createfolder():
    # Root and path data are passed as POST data rather than URL args.
    root = request.form['root']
    path_ = request.form['path']
    tools.validate_root_or_abort(root)
    tools.validate_path_or_abort(path_)

    # A file operation is always traduced by a new Commit object in order to
    # track the changes.
    try:
        commit = database.create_commit(root)
        ref_node, new_node = database.copy_hierarchy(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_NON_EXISTING_DESTINATION_PATH)

    # To stick with Dropbox behaviour, we raise a 403 if the directory already
    # exists.
    treename = tools.split_path(path_)[-1]
    existing = ref_node and ref_node.sub_trees.get(treename)
    if existing and not existing.is_deleted:
        raise BasicError(403, E_DIR_ALREADY_EXISTS)

    # Create the new directory and commit the change.
    output = models.TreeLink(tree=models.Tree(), path=treename)
    new_node.sub_trees[treename] = output
    database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, output)
Esempio n. 3
0
def copy_hierarchy(root, path, destination, source=None):
    # If source node is not specifed, use latest logged user's head.
    if not source:
        r_node = g.user.dbuser.nodes.get(root)
        source = r_node.head.root

    # Create current and reference nodes and successively clone as required. We
    # try to preserve as much hierarchy as possible.
    path_elements = tools.split_path(path)
    for directory in path_elements[:-1]:
        # The impacted path is always composed of new tree objects. We don't
        # create nodes which didn't exist previously (requires create_folder).
        if not source or (directory not in source.sub_trees):
            raise MissingNodeException()

        if not config.enable_full_history:
            destination = source.sub_trees[directory].tree
        else:  # pragma: no cover
            # Copy the previous node's content to a newly created one.
            new_node = TreeLink(tree=Tree(), path=directory)
            source.sub_trees[directory].tree.copy_to(new_node.tree)
            destination.sub_trees[directory] = new_node
            destination = new_node.tree

        # Move on to the next nodes.
        source = source.sub_trees[directory].tree

    # Return both the reference node and its new instance.
    return source, destination
Esempio n. 4
0
def _send_file(stream, path, metadata, encryption_iv):
    if not stream:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # The file will be decrypted with the user's password as it is read.
    crypt_key = g.user.dbuser.password[:32]
    stream = AESDecryptionStream(stream, crypt_key, encryption_iv)

    filename = tools.split_path(path)[-1]
    response = send_file(stream, add_etags=False, as_attachment=True,
                         attachment_filename=filename)
    response.headers['x-datastore-metadata'] = json.dumps(metadata)
    return response
Esempio n. 5
0
def get_stored_object(root, path, search_node=None):
    """Retrieve a database stored object.

    Args:
        root: the storage node
        path: the storage path (down the provided root)
        search_node: the starting point for the search (defaults to the user's
            root if not provided)

    Returns:
        The requested node as a BlobLink, or a TreeLink for a directory.

    Raises:
        HTTP 404 if either the starting point for the search or the requested
            node are not found.
    """
    # Retrieve the search node (either as specified, or the user's head).
    search_node = search_node or _get_default_search_node(root)

    # Retrieving the root is a special case as the path is empty. However, the
    # root is a Tree rather than a TreeLink.
    path_elements = tools.split_path(path)
    if not path_elements:
        return _as_tree_link(search_node)

    # Walk the user commit tree to find the requested path. We assume that any
    # part of the path but the last is necessarily a directory.
    for directory in path_elements[:-1]:
        if not directory in search_node.sub_trees:
            raise MissingNodeException()
        search_node = search_node.sub_trees[directory].tree

    # The last path element can either be a directory or a file.
    output = None
    path_end = path_elements[-1].rstrip('/')

    # If the path has a trailing slash, we only allow it to identify a
    # directory. If it doesn't, it may identify either a file (in prioriry) or
    # a directory (if not matching file is found).
    if path.endswith('/'):
        output = search_node.sub_trees.get(path_end)
    else:
        output = (search_node.sub_files.get(path_end) or
                  search_node.sub_trees.get(path_end))

    # If it is neither found as a directory nor a file, raise a MissingNode.
    # Note that we return the BlobLink rather than the Blob itself in order to
    # retrieve the revision (which is an attribute of the link).
    if not output:
        raise MissingNodeException()
    return output
Esempio n. 6
0
def do_put(root, path_, stream, hasher, encryption_iv):
    # A file operation is always traduced by a new Commit object in order to
    # track the changes. If copying fails because of an incomplete source
    # hierarchy we abort with a 404.
    try:
        commit = database.create_commit(root)
        ref_node, new_node = database.copy_hierarchy(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_NON_EXISTING_DESTINATION_PATH)

    # Handle the case where the file already exists. In the general case, if
    # the filename already exists and that 'overwrite' is set to False, we put
    # to a new filename such that 'test.txt' becomes 'test (1).txt'. Also, when
    # a content is put to an older revision (identified by 'parent_rev'), then
    # the filename 'test.txt' becomes 'test (conflicted copy).txt'.
    split_pt = tools.split_path(path_)
    filename = split_pt[-1]

    # It is an error to post a file named like an (non deleted) directory.
    existing_dir = ref_node.sub_trees.get(filename)
    if existing_dir and not existing_dir.is_deleted:
        raise BasicError(403, E_DIR_ALREADY_EXISTS)

    if filename in ref_node.sub_files:
        filename = _handle_conflict(ref_node, filename, **_get_url_params())
        path_ = '/'.join(['/'.join(split_pt[:-1]), filename])

    # We start by storing the provided content, and then we try and make the
    # database structure reflect the requested change.
    filehash = file_store.register_blob(root, path_, stream, hasher)
    fileblob = _find_or_create_blob(filehash, encryption_iv)

    # Update the blob entry if it's actually different from the previous one,
    # and commit to the database. Considering that the on disk blobs are
    # encrypted with a randomly generated IV, this is more than unlikely.
    old_blob = ref_node and ref_node.sub_files.get(filename)
    if old_blob and (old_blob.hash == fileblob.hash):  # pragma: no cover
        output = old_blob
        old_blob.is_deleted = False  # Restore the file if it was deleted
    else:
        output = models.BlobLink(blob=fileblob, path=filename, parent=old_blob)
        new_node.sub_files[filename] = output
        database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, output)