示例#1
0
def files_shares(root, path_):
    tools.validate_root_or_abort(root)

    try:
        stored_object = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)
    else:
        if not stored_object or type(stored_object) != models.BlobLink:
            raise BasicError(404, E_FILE_NOT_FOUND)

    # Start by purging the table of all expired uploads.
    _purge_expired_links()

    # If we already have a reference to this file, no need to recreate one.
    db_ref = _find_ref_by_key(_make_file_ref(stored_object))
    if not db_ref:
        db_ref = _create_blob_ref(root, path_, stored_object)

    # Number of days to expiration.
    expire_days = int(request.args.get('expire_days', '1'))
    if not (0 < expire_days <= 10000):
        raise BasicError(406, E_EXPIRE_DAYS(0, 10000))

    # Regardless of the fact that the reference previously existed, its expiry
    # is reset.
    db_ref.blob = stored_object
    db_ref.expires = datetime.utcnow() + timedelta(days=expire_days)
    g.db_session.commit()

    # Return the reference for this file, used for retrieval.
    return make_link_data(db_ref)
示例#2
0
def _validate_paths(root, from_path, to_path, target_node):
    # The source path should not be a parent of the destination.
    p_to = tools.split_path(to_path)
    p_from = tools.split_path(from_path)
    if not (p_to and p_from) or (p_from == p_to[:len(p_from)]):
        raise BasicError(403, E_CPY_BAD_PATHS)

    # Attempt to retrieve the parent directory for the destination file, and
    # fail with a 403 if it doesn't exist.
    try:
        obj = database.get_stored_object(root, '/'.join(p_to[:-1]))
    except database.MissingNodeException:
        raise BasicError(403, E_DEST_DOES_NOT_EXIST)

    # The destination path should start with an existing object, which type
    # must be a directory. The destination path should always end with a new
    # path element (copy cannot overwrite).
    is_directory = obj and (type(obj) == models.TreeLink)
    if not is_directory:
        raise BasicError(403, E_BAD_DESTINATION)

    # Attempt to retrieve the original item to copy.
    source = _get_source_object(root, from_path, target_node)
    source_is_dir = source and (type(source) == models.TreeLink)

    # We verify that there is no (undeleted) item at the destination path.
    target = obj.tree.sub_trees if source_is_dir else obj.tree.sub_files
    if p_to[-1] in target and not target[p_to[-1]].is_deleted:
        raise BasicError(403, E_DEST_EXISTS)
示例#3
0
def files_search(root, path_):
    tools.validate_root_or_abort(root)

    # First step is to verify that we have a valid query in the request, there
    # is no need to go further and query the DB if this condition is not met.
    query = request.args['query']
    if len(query) < 3:
        raise BasicError(400, E_QUERY_LEN(3))

    # Find the root for the search, identified by the provided path.
    try:
        stored_object = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_SEARCH_DIR_NOT_FOUND)
    else:
        if not isinstance(stored_object, models.TreeLink):
            raise BasicError(404, E_SEARCH_PATH_NOT_A_DIR)

    # Maximum number of results to fetch (up to 1000).
    file_limit = int(request.args.get('file_limit', '1000'))
    if not (0 < file_limit <= 1000):
        raise BasicError(406, E_FILE_LIMIT(0, 1000))

    # We handle the same URL parameters as the metadata call, although we
    # explicitely disable listing.
    kwargs = {
        'list': False,
        'include_deleted': get_boolean_arg(request.args, 'include_deleted')
    }

    # Remark: we cannot use flask.jsonify here (through our usual api_endpoint
    # decorator), see http://flask.pocoo.org/docs/security/#json-security.
    return json.dumps([
        m for m in _gen_metadata(stored_object, root, path_, query, **kwargs)
    ]), 200, {'content-type': 'application/json'}
示例#4
0
def files_revisions(root, path_):
    tools.validate_root_or_abort(root)

    try:
        stored_object = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)
    else:
        if not isinstance(stored_object, models.BlobLink):
            raise BasicError(404, E_FILE_NOT_FOUND)

    # Maximum number of file revision to fetch (up to 1000).
    rev_limit = int(request.args.get('rev_limit', '10'))
    if not (0 < rev_limit <= 1000):
        raise BasicError(406, E_REV_LIMIT(0, 1000))

    def _revisions(stored_object):
        while stored_object:
            yield stored_object
            stored_object = stored_object.parent

    # Remark: we cannot use flask.jsonify here (through our usual api_endpoint
    # decorator), see http://flask.pocoo.org/docs/security/#json-security.
    return json.dumps([
        metadata.make_metadata(root, path_, obj)
        for obj in itertools.islice(_revisions(stored_object), rev_limit)
    ]), 200, {'content-type': 'application/json'}
示例#5
0
def _get_source_object(root, from_path, target_node):
    try:
        source = database.get_stored_object(root, from_path, target_node)
    except database.MissingNodeException:
        raise BasicError(404, E_SOURCE_NOT_FOUND)
    if source.is_deleted:
        raise BasicError(404, E_SOURCE_DELETED)
    return source
示例#6
0
文件: get.py 项目: icecrime/datastore
def files_get(root, path_):
    tools.validate_root_or_abort(root)

    # Attempt to retrieve the database representation of the requested file
    # from the database, and raise a 404 if we failed in doing so.
    try:
        dbobject = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # Request the actual disk object to the file_store, and send the result as
    # a file to the client.
    fmdata = metadata.make_metadata(root, path_, dbobject)
    stream = file_store.retrieve_blob_stream(root, dbobject.hash)
    return _send_file(stream, path_, fmdata, dbobject.iv)
示例#7
0
def fileops_move():
    root, from_path, to_path = _get_params()
    tools.validate_root_or_abort(root)

    # Move is implemented in terms of copy.
    commit = database.create_commit(root)
    obj, obj_copy = copy.do_copy(commit.root, root, from_path, to_path)

    # Delete the source object.
    source = database.get_stored_object(root, from_path, commit.root)
    delete.recursive_delete(source)

    # Store the commit, and return the metadata for the new object.
    database.store_commit(root, commit)
    return metadata.make_metadata(root, to_path, obj_copy)
示例#8
0
def fileops_delete():
    root, path_ = _get_params()
    tools.validate_root_or_abort(root)
    commit = database.create_commit(root)

    # Retrieve the stored object (could be a blob or tree link) or abort with
    # a 404 if we fail.
    try:
        stored_object = database.get_stored_object(root, path_, commit.root)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # Mark the database object as deleted if it's not already.
    if stored_object.is_deleted:
        raise BasicError(404, E_ALREADY_DELETED)

    # Recursively delete the object (if necessary), and commit the transaction.
    recursive_delete(stored_object)
    database.store_commit(root, commit)
    return metadata.make_metadata(root, path_, stored_object)
示例#9
0
def files_metadata(root, path_):
    tools.validate_root_or_abort(root)

    try:
        stored_object = database.get_stored_object(root, path_)
    except database.MissingNodeException:
        raise BasicError(404, E_FILE_NOT_FOUND)

    # If the client has provided a hash value and it compares equal to the one
    # we have just generated, return a 304 (Not Modified).
    params = _get_url_params()
    metadata = make_metadata(root, path_, stored_object, **params)
    if request.args.get("hash") == metadata.get("hash", ""):
        return Response(status=304)

    # Little hack here: we cannot decorate files_metadata function as an json
    # api endpoint because it may return a 304 without data. We use this tiny
    # internal decorated function to the job when there is data to return.
    @decorators.api_endpoint
    def _json_metadata_return(metadata):
        return metadata

    return _json_metadata_return(metadata)