def _make_file_metadata(root, path_, blob, **kwargs): # Retrieve the blob statistics from the file_store. file_date, file_size = file_store.stat_blob(root, path_, blob.hash) # Build and return the metadata dictionary. metadata = _make_base_metadata(root, path_, blob) metadata.update( { "bytes": file_size, "is_dir": False, "modified": display.human_timestamp(file_date or 0), "rev": blob.hash[:hash_size], "size": display.human_size(file_size or 0), } ) # Try to deduce a mime type from the filename. mime_type, _ = mimetypes.guess_type(path_) if mime_type: metadata["mime_type"] = mime_type # Fetch the associated link information if requested. if kwargs.get("include_links", False): db_ref = shares.find_obj_ref(blob) if db_ref: metadata["link"] = shares.make_link_data(db_ref) return metadata
def make_link_data(db_ref): return { 'key': db_ref.key, 'link': url_for('files_get_key', key=db_ref.key), 'metadata': url_for('files_metadata_key', key=db_ref.key), 'expires': display.human_timestamp(db_ref.expires), }
def _make_dir_metadata(root, path_, tree_link, **kwargs): tree = tree_link.tree # Build and return the metadata dictionary. metadata = _make_base_metadata(root, path_.rstrip("/"), tree_link) metadata.update( { "bytes": 0, "is_dir": True, "modified": display.human_timestamp(tree.created), "rev": _make_tree_rev(tree)[:hash_size], "size": display.human_size(0), } ) # Handle the (default) list mode, where a 'contents' sub-entry contains the # metadatas for each file in the directory. if kwargs.get("list", True): options = kwargs.copy() options["list"] = False # Don't list recursively contents, hash_ = _make_dir_content(root, path_, tree, **options) metadata.update(hash=hash_, contents=contents) return metadata
def _make_response(db_upload): return { 'expires': display.human_timestamp(db_upload.expires), 'offset': db_upload.offset, 'upload_id': db_upload.upload_id, }