コード例 #1
0
ファイル: search.py プロジェクト: icecrime/datastore
def _gen_metadata(obj, root, path, query, **kwargs):
    accept = lambda o: kwargs.get('include_deleted', False) or not o.is_deleted
    if query in obj.path and accept(obj):
        yield metadata.make_metadata(root, path, obj, **kwargs)
    for _, subd in sorted(obj.tree.sub_trees.items()):
        sub_path = os.path.join(path, subd.path)
        for m in _gen_metadata(subd, root, sub_path, query, **kwargs):
            yield m
    for _, subf in sorted(obj.tree.sub_files.items()):
        if query in subf.path and accept(subf):
            sub_path = os.path.join(path, subf.path)
            yield metadata.make_metadata(root, sub_path, subf, **kwargs)
コード例 #2
0
ファイル: create_folder.py プロジェクト: icecrime/datastore
def fileops_createfolder():
    # Root and path data are passed as POST data rather than URL args.
    root = request.form['root']
    path_ = request.form['path']
    tools.validate_root_or_abort(root)
    tools.validate_path_or_abort(path_)

    # A file operation is always traduced by a new Commit object in order to
    # track the changes.
    try:
        commit = database.create_commit(root)
        ref_node, new_node = database.copy_hierarchy(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_NON_EXISTING_DESTINATION_PATH)

    # To stick with Dropbox behaviour, we raise a 403 if the directory already
    # exists.
    treename = tools.split_path(path_)[-1]
    existing = ref_node and ref_node.sub_trees.get(treename)
    if existing and not existing.is_deleted:
        raise BasicError(403, E_DIR_ALREADY_EXISTS)

    # Create the new directory and commit the change.
    output = models.TreeLink(tree=models.Tree(), path=treename)
    new_node.sub_trees[treename] = output
    database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, output)
コード例 #3
0
ファイル: revisions.py プロジェクト: icecrime/datastore
def files_revisions(root, path_):
    tools.validate_root_or_abort(root)

    try:
        stored_object = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)
    else:
        if not isinstance(stored_object, models.BlobLink):
            raise BasicError(404, E_FILE_NOT_FOUND)

    # Maximum number of file revision to fetch (up to 1000).
    rev_limit = int(request.args.get('rev_limit', '10'))
    if not (0 < rev_limit <= 1000):
        raise BasicError(406, E_REV_LIMIT(0, 1000))

    def _revisions(stored_object):
        while stored_object:
            yield stored_object
            stored_object = stored_object.parent

    # Remark: we cannot use flask.jsonify here (through our usual api_endpoint
    # decorator), see http://flask.pocoo.org/docs/security/#json-security.
    return json.dumps([
        metadata.make_metadata(root, path_, obj)
        for obj in itertools.islice(_revisions(stored_object), rev_limit)
    ]), 200, {'content-type': 'application/json'}
コード例 #4
0
ファイル: copy.py プロジェクト: icecrime/datastore
def fileops_copy():
    root, from_path, to_path = _get_params()
    tools.validate_root_or_abort(root)

    # A copy operation is always traduced by a new Commit object in order to
    # track the changes.
    commit = database.create_commit(root)
    obj, obj_copy = do_copy(commit.root, root, from_path, to_path)

    # Store the commit, and return the metadata for the new object.
    database.store_commit(root, commit)
    return metadata.make_metadata(root, to_path, obj_copy)
コード例 #5
0
ファイル: get.py プロジェクト: icecrime/datastore
def files_get(root, path_):
    tools.validate_root_or_abort(root)

    # Attempt to retrieve the database representation of the requested file
    # from the database, and raise a 404 if we failed in doing so.
    try:
        dbobject = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # Request the actual disk object to the file_store, and send the result as
    # a file to the client.
    fmdata = metadata.make_metadata(root, path_, dbobject)
    stream = file_store.retrieve_blob_stream(root, dbobject.hash)
    return _send_file(stream, path_, fmdata, dbobject.iv)
コード例 #6
0
ファイル: move.py プロジェクト: icecrime/datastore
def fileops_move():
    root, from_path, to_path = _get_params()
    tools.validate_root_or_abort(root)

    # Move is implemented in terms of copy.
    commit = database.create_commit(root)
    obj, obj_copy = copy.do_copy(commit.root, root, from_path, to_path)

    # Delete the source object.
    source = database.get_stored_object(root, from_path, commit.root)
    delete.recursive_delete(source)

    # Store the commit, and return the metadata for the new object.
    database.store_commit(root, commit)
    return metadata.make_metadata(root, to_path, obj_copy)
コード例 #7
0
ファイル: put.py プロジェクト: icecrime/datastore
def do_put(root, path_, stream, hasher, encryption_iv):
    # A file operation is always traduced by a new Commit object in order to
    # track the changes. If copying fails because of an incomplete source
    # hierarchy we abort with a 404.
    try:
        commit = database.create_commit(root)
        ref_node, new_node = database.copy_hierarchy(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_NON_EXISTING_DESTINATION_PATH)

    # Handle the case where the file already exists. In the general case, if
    # the filename already exists and that 'overwrite' is set to False, we put
    # to a new filename such that 'test.txt' becomes 'test (1).txt'. Also, when
    # a content is put to an older revision (identified by 'parent_rev'), then
    # the filename 'test.txt' becomes 'test (conflicted copy).txt'.
    split_pt = tools.split_path(path_)
    filename = split_pt[-1]

    # It is an error to post a file named like an (non deleted) directory.
    existing_dir = ref_node.sub_trees.get(filename)
    if existing_dir and not existing_dir.is_deleted:
        raise BasicError(403, E_DIR_ALREADY_EXISTS)

    if filename in ref_node.sub_files:
        filename = _handle_conflict(ref_node, filename, **_get_url_params())
        path_ = '/'.join(['/'.join(split_pt[:-1]), filename])

    # We start by storing the provided content, and then we try and make the
    # database structure reflect the requested change.
    filehash = file_store.register_blob(root, path_, stream, hasher)
    fileblob = _find_or_create_blob(filehash, encryption_iv)

    # Update the blob entry if it's actually different from the previous one,
    # and commit to the database. Considering that the on disk blobs are
    # encrypted with a randomly generated IV, this is more than unlikely.
    old_blob = ref_node and ref_node.sub_files.get(filename)
    if old_blob and (old_blob.hash == fileblob.hash):  # pragma: no cover
        output = old_blob
        old_blob.is_deleted = False  # Restore the file if it was deleted
    else:
        output = models.BlobLink(blob=fileblob, path=filename, parent=old_blob)
        new_node.sub_files[filename] = output
        database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, output)
コード例 #8
0
ファイル: delete.py プロジェクト: icecrime/datastore
def fileops_delete():
    root, path_ = _get_params()
    tools.validate_root_or_abort(root)
    commit = database.create_commit(root)

    # Retrieve the stored object (could be a blob or tree link) or abort with
    # a 404 if we fail.
    try:
        stored_object = database.get_stored_object(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # Mark the database object as deleted if it's not already.
    if stored_object.is_deleted:
        raise BasicError(404, E_ALREADY_DELETED)

    # Recursively delete the object (if necessary), and commit the transaction.
    recursive_delete(stored_object)
    database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, stored_object)
コード例 #9
0
ファイル: get.py プロジェクト: icecrime/datastore
def files_get_key(key):
    db_ref = shares.retrieve_ref(urllib2.unquote(key))
    fmdata = metadata.make_metadata(db_ref.root, db_ref.path, db_ref.blob)
    stream = file_store.retrieve_blob_stream(db_ref.root, db_ref.blob.hash)
    return _send_file(stream, db_ref.path, fmdata, db_ref.blob.iv)