Beispiel #1
0
 def getPiecemap(self, inner_path):
     file_info = self.site.content_manager.getFileInfo(inner_path)
     piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
     self.site.needFile(piecemap_inner_path, priority=20)
     piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)]
     piecemap["piece_size"] = file_info["piece_size"]
     return piecemap
Beispiel #2
0
    def sidebarRenderBadFiles(self, body, site):
        body.append(
            _(u"""
            <li>
             <label>{_[Needs to be updated]}:</label>
             <ul class='filelist'>
        """))

        i = 0
        for bad_file, tries in site.bad_files.iteritems():
            i += 1
            body.append(
                _(
                    u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""",
                    {
                        "bad_file_path": bad_file,
                        "bad_filename": helper.getFilename(bad_file),
                        "tries": _.pluralize(tries, "{} try", "{} tries")
                    }))
            if i > 30:
                break

        if len(site.bad_files) > 30:
            num_bad_files = len(site.bad_files) - 30
            body.append(
                _(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""",
                  nested=True))

        body.append("""
             </ul>
            </li>
        """)
Beispiel #3
0
    def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
        files_node = {}
        files_optional_node = {}

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern, file_relative_path):
                ignored = True
            elif file_name.startswith("."):
                ignored = True
            elif optional_pattern and re.match(optional_pattern, file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, definied regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" + file_relative_path)
                sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
                else:
                    self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return files_node, files_optional_node
Beispiel #4
0
 def getPiecemap(self, inner_path):
     file_info = self.site.content_manager.getFileInfo(inner_path)
     piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
     self.site.needFile(piecemap_inner_path, priority=20)
     piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)]
     piecemap["piece_size"] = file_info["piece_size"]
     return piecemap
Beispiel #5
0
    def sidebarRenderBadFiles(self, body, site):
        body.append(_(u"""
            <li>
             <label>{_[Needs to be updated]}:</label>
             <ul class='filelist'>
        """))

        i = 0
        for bad_file, tries in site.bad_files.iteritems():
            i += 1
            body.append(_(u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
                "bad_file_path": cgi.escape(bad_file, True),
                "bad_filename": cgi.escape(helper.getFilename(bad_file), True),
                "tries": _.pluralize(tries, "{} try", "{} tries")
            }))
            if i > 30:
                break

        if len(site.bad_files) > 30:
            num_bad_files = len(site.bad_files) - 30
            body.append(_(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))

        body.append("""
             </ul>
            </li>
        """)
Beispiel #6
0
    def hashFiles(self,
                  dir_inner_path,
                  ignore_pattern=None,
                  optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path):
            ignored = True
            self.log.error(
                "- [ERROR] Only ascii encoded directories allowed: %s" %
                dir_inner_path)

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern,
                                             file_relative_path):
                ignored = True
            elif file_name.startswith(".") or file_name.endswith(
                    "-old") or file_name.endswith("-new"):
                ignored = True
            elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path):
                ignored = True
                self.log.error(
                    "- [ERROR] Only ascii encoded filenames allowed: %s" %
                    file_relative_path)
            elif optional_pattern and re.match(optional_pattern,
                                               file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" +
                                                      file_relative_path)
                sha512sum = CryptHash.sha512sum(
                    file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
                    self.hashfield.appendHash(sha512sum)
                else:
                    self.log.info("- %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
        return files_node, files_optional_node
Beispiel #7
0
 def actionOptionalFilePin(self, to, inner_path, address=None):
     if type(inner_path) is not list:
         inner_path = [inner_path]
     back = self.setPin(inner_path, 1, address)
     num_file = len(inner_path)
     if back == "ok":
         if num_file == 1:
             self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
         else:
             self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
     self.response(to, back)
 def actionOptionalFileUnpin(self, to, inner_path, address=None):
     if type(inner_path) is not list:
         inner_path = [inner_path]
     back = self.setPin(inner_path, 0, address)
     num_file = len(inner_path)
     if back == "ok":
         if num_file == 1:
             self.cmd("notification", ["done", _["Removed pin from %s"] % helper.getFilename(inner_path[0]), 5000])
         else:
             self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
     self.response(to, back)
Beispiel #9
0
 def testGetFilename(self):
     assert helper.getFilename("data/users/content.json") == "content.json"
     assert helper.getFilename("data/users") == "users"
     assert helper.getFilename("") == ""
     assert helper.getFilename("content.json") == "content.json"
     assert helper.getFilename("data/users/") == ""
     assert helper.getFilename("/data/users/content.json") == "content.json"
Beispiel #10
0
 def testGetFilename(self):
     assert helper.getFilename("data/users/content.json") == "content.json"
     assert helper.getFilename("data/users") == "users"
     assert helper.getFilename("") == ""
     assert helper.getFilename("content.json") == "content.json"
     assert helper.getFilename("data/users/") == ""
     assert helper.getFilename("/data/users/content.json") == "content.json"
Beispiel #11
0
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        inner_path = dir_inner_path + file_relative_path

        file_size = self.site.storage.getSize(inner_path)
        # Only care about optional files >1MB
        if not optional or file_size < 1 * 1024 * 1024:
            return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

        back = {}
        content = self.contents.get(dir_inner_path + "content.json")

        hash = None
        piecemap_relative_path = None
        piece_size = None

        # Don't re-hash if it's already in content.json
        if content and file_relative_path in content.get("files_optional", {}):
            file_node = content["files_optional"][file_relative_path]
            if file_node["size"] == file_size:
                self.log.info("- [SAME SIZE] %s" % file_relative_path)
                hash = file_node.get("sha512")
                piecemap_relative_path = file_node.get("piecemap")
                piece_size = file_node.get("piece_size")

        if not hash or not piecemap_relative_path:  # Not in content.json yet
            if file_size < 5 * 1024 * 1024:  # Don't create piecemap automatically for files smaller than 5MB
                return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

            self.log.info("- [HASHING] %s" % file_relative_path)
            merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size)
            if not hash:
                hash = merkle_root

            if not piecemap_relative_path:
                file_name = helper.getFilename(file_relative_path)
                piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
                piecemap_inner_path = inner_path + ".piecemap.msgpack"

                msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb"))

                back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))

        piece_num = int(math.ceil(float(file_size) / piece_size))

        # Add the merkle root to hashfield
        hash_id = self.site.content_manager.hashfield.getHashId(hash)
        self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
        self.site.storage.piecefields[hash].fromstring("1" * piece_num)

        back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
        return back
Beispiel #12
0
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        inner_path = dir_inner_path + file_relative_path

        file_size = self.site.storage.getSize(inner_path)
        # Only care about optional files >1MB
        if not optional or file_size < 1 * 1024 * 1024:
            return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

        back = {}
        content = self.contents.get(dir_inner_path + "content.json")

        hash = None
        piecemap_relative_path = None
        piece_size = None

        # Don't re-hash if it's already in content.json
        if content and file_relative_path in content.get("files_optional", {}):
            file_node = content["files_optional"][file_relative_path]
            if file_node["size"] == file_size:
                self.log.info("- [SAME SIZE] %s" % file_relative_path)
                hash = file_node.get("sha512")
                piecemap_relative_path = file_node.get("piecemap")
                piece_size = file_node.get("piece_size")

        if not hash or not piecemap_relative_path:  # Not in content.json yet
            if file_size < 5 * 1024 * 1024:  # Don't create piecemap automatically for files smaller than 5MB
                return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

            self.log.info("- [HASHING] %s" % file_relative_path)
            merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size)
            if not hash:
                hash = merkle_root

            if not piecemap_relative_path:
                file_name = helper.getFilename(file_relative_path)
                piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
                piecemap_inner_path = inner_path + ".piecemap.msgpack"

                msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb"))

                back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))

        piece_num = int(math.ceil(float(file_size) / piece_size))

        # Add the merkle root to hashfield
        hash_id = self.site.content_manager.hashfield.getHashId(hash)
        self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
        self.site.storage.piecefields[hash].fromstring("1" * piece_num)

        back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
        return back
Beispiel #13
0
    def hashFiles(self,
                  dir_inner_path,
                  ignore_pattern=None,
                  optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if dir_inner_path and not self.isValidRelativePath(dir_inner_path):
            ignored = True
            self.log.error(
                "- [ERROR] Only ascii encoded directories allowed: %s" %
                dir_inner_path)

        for file_relative_path in self.site.storage.walk(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and SafeRe.match(ignore_pattern,
                                                 file_relative_path):
                ignored = True
            elif file_name.startswith(".") or file_name.endswith(
                    "-old") or file_name.endswith("-new"):
                ignored = True
            elif not self.isValidRelativePath(file_relative_path):
                ignored = True
                self.log.error("- [ERROR] Invalid filename: %s" %
                               file_relative_path)
            elif dir_inner_path == "" and file_relative_path == self.site.storage.getDbFile(
            ):
                ignored = True
            elif optional_pattern and SafeRe.match(optional_pattern,
                                                   file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                if optional:
                    self.log.info("- [OPTIONAL] %s" % file_relative_path)
                    files_optional_node.update(
                        self.hashFile(dir_inner_path,
                                      file_relative_path,
                                      optional=True))
                else:
                    self.log.info("- %s" % file_relative_path)
                    files_node.update(
                        self.hashFile(dir_inner_path, file_relative_path))
        return files_node, files_optional_node
Beispiel #14
0
    def hashFiles(self,
                  dir_inner_path,
                  ignore_pattern=None,
                  optional_pattern=None):
        files_node = {}
        files_optional_node = {}

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern,
                                             file_relative_path):
                ignored = True
            elif file_name.startswith("."):
                ignored = True
            elif optional_pattern and re.match(optional_pattern,
                                               file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, definied regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" +
                                                      file_relative_path)
                sha512sum = CryptHash.sha512sum(
                    file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
                else:
                    self.log.info("- %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
        return files_node, files_optional_node
Beispiel #15
0
    def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path):
            ignored = True
            self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path)

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern, file_relative_path):
                ignored = True
            elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"):
                ignored = True
            elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path):
                ignored = True
                self.log.error("- [ERROR] Only ascii encoded filenames allowed: %s" % file_relative_path)
            elif optional_pattern and re.match(optional_pattern, file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_inner_path = dir_inner_path + "/" + file_relative_path
                file_path = self.site.storage.getPath(file_inner_path)
                sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    file_size = os.path.getsize(file_path)
                    files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": file_size}
                    if not self.hashfield.hasHash(sha512sum):
                        self.optionalDownloaded(file_inner_path, sha512sum, file_size, own=True)
                else:
                    self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return files_node, files_optional_node
Beispiel #16
0
    def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if dir_inner_path and not self.isValidRelativePath(dir_inner_path):
            ignored = True
            self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path)

        for file_relative_path in self.site.storage.walk(dir_inner_path, ignore_pattern):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"):
                ignored = True
            elif not self.isValidRelativePath(file_relative_path):
                ignored = True
                self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path)
            elif dir_inner_path == "" and file_relative_path == self.site.storage.getDbFile():
                ignored = True
            elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                if optional:
                    self.log.info("- [OPTIONAL] %s" % file_relative_path)
                    files_optional_node.update(
                        self.hashFile(dir_inner_path, file_relative_path, optional=True)
                    )
                else:
                    self.log.info("- %s" % file_relative_path)
                    files_node.update(
                        self.hashFile(dir_inner_path, file_relative_path)
                    )
        return files_node, files_optional_node
Beispiel #17
0
    def actionBigfileUpload(self):
        nonce = self.get.get("upload_nonce")
        if nonce not in upload_nonces:
            return self.error403("Upload nonce error.")

        upload_info = upload_nonces[nonce]
        del upload_nonces[nonce]

        self.sendHeader(200,
                        "text/html",
                        noscript=True,
                        extra_headers={
                            "Access-Control-Allow-Origin": "null",
                            "Access-Control-Allow-Credentials": "true"
                        })

        self.readMultipartHeaders(self.env['wsgi.input'])  # Skip http headers

        site = upload_info["site"]
        inner_path = upload_info["inner_path"]

        with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
            merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
                self.env['wsgi.input'], upload_info["size"],
                upload_info["piece_size"], out_file)

        if len(piecemap_info["sha512_pieces"]) == 1:  # Small file, don't split
            hash = piecemap_info["sha512_pieces"][0].encode("hex")
            hash_id = site.content_manager.hashfield.getHashId(hash)
            site.content_manager.optionalDownloaded(inner_path,
                                                    hash_id,
                                                    upload_info["size"],
                                                    own=True)

        else:  # Big file
            file_name = helper.getFilename(inner_path)
            msgpack.pack({file_name: piecemap_info},
                         site.storage.open(upload_info["piecemap"], "wb"))

            # Find piecemap and file relative path to content.json
            file_info = site.content_manager.getFileInfo(inner_path,
                                                         new_file=True)
            content_inner_path_dir = helper.getDirname(
                file_info["content_inner_path"])
            piecemap_relative_path = upload_info["piecemap"][
                len(content_inner_path_dir):]
            file_relative_path = inner_path[len(content_inner_path_dir):]

            # Add file to content.json
            if site.storage.isFile(file_info["content_inner_path"]):
                content = site.storage.loadJson(
                    file_info["content_inner_path"])
            else:
                content = {}
            if "files_optional" not in content:
                content["files_optional"] = {}

            content["files_optional"][file_relative_path] = {
                "sha512": merkle_root,
                "size": upload_info["size"],
                "piecemap": piecemap_relative_path,
                "piece_size": piece_size
            }

            merkle_root_hash_id = site.content_manager.hashfield.getHashId(
                merkle_root)
            site.content_manager.optionalDownloaded(inner_path,
                                                    merkle_root_hash_id,
                                                    upload_info["size"],
                                                    own=True)
            site.storage.writeJson(file_info["content_inner_path"], content)

            site.content_manager.contents.loadItem(
                file_info["content_inner_path"])  # reload cache

        return {
            "merkle_root": merkle_root,
            "piece_num": len(piecemap_info["sha512_pieces"]),
            "piece_size": piece_size,
            "inner_path": inner_path
        }
Beispiel #18
0
    def actionBigfileUpload(self):
        nonce = self.get.get("upload_nonce")
        if nonce not in upload_nonces:
            return self.error403("Upload nonce error.")

        upload_info = upload_nonces[nonce]
        del upload_nonces[nonce]

        self.sendHeader(200, "text/html", noscript=True, extra_headers=[
            ("Access-Control-Allow-Origin", "null"),
            ("Access-Control-Allow-Credentials", "true")
        ])

        self.readMultipartHeaders(self.env['wsgi.input'])  # Skip http headers

        site = upload_info["site"]
        inner_path = upload_info["inner_path"]

        with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
            merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
                self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file
            )

        if len(piecemap_info["sha512_pieces"]) == 1:  # Small file, don't split
            hash = piecemap_info["sha512_pieces"][0].encode("hex")
            site.content_manager.optionalDownloaded(inner_path, hash, upload_info["size"], own=True)

        else:  # Big file
            file_name = helper.getFilename(inner_path)
            msgpack.pack({file_name: piecemap_info}, site.storage.open(upload_info["piecemap"], "wb"))

            # Find piecemap and file relative path to content.json
            file_info = site.content_manager.getFileInfo(inner_path, new_file=True)
            content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
            piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):]
            file_relative_path = inner_path[len(content_inner_path_dir):]

            # Add file to content.json
            if site.storage.isFile(file_info["content_inner_path"]):
                content = site.storage.loadJson(file_info["content_inner_path"])
            else:
                content = {}
            if "files_optional" not in content:
                content["files_optional"] = {}

            content["files_optional"][file_relative_path] = {
                "sha512": merkle_root,
                "size": upload_info["size"],
                "piecemap": piecemap_relative_path,
                "piece_size": piece_size
            }

            site.content_manager.optionalDownloaded(inner_path, merkle_root, upload_info["size"], own=True)
            site.storage.writeJson(file_info["content_inner_path"], content)

            site.content_manager.contents.loadItem(file_info["content_inner_path"])  # reload cache

        return {
            "merkle_root": merkle_root,
            "piece_num": len(piecemap_info["sha512_pieces"]),
            "piece_size": piece_size,
            "inner_path": inner_path
        }
Beispiel #19
0
    def handleBigfileUpload(self, upload_info, read):
        site = upload_info["site"]
        inner_path = upload_info["inner_path"]

        with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
            merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
                read, upload_info["size"], upload_info["piece_size"], out_file)

        if len(piecemap_info["sha512_pieces"]) == 1:  # Small file, don't split
            hash = binascii.hexlify(piecemap_info["sha512_pieces"][0])
            hash_id = site.content_manager.hashfield.getHashId(hash)
            site.content_manager.optionalDownloaded(inner_path,
                                                    hash_id,
                                                    upload_info["size"],
                                                    own=True)

        else:  # Big file
            file_name = helper.getFilename(inner_path)
            site.storage.open(upload_info["piecemap"], "wb").write(
                Msgpack.pack({file_name: piecemap_info}))

            # Find piecemap and file relative path to content.json
            file_info = site.content_manager.getFileInfo(inner_path,
                                                         new_file=True)
            content_inner_path_dir = helper.getDirname(
                file_info["content_inner_path"])
            piecemap_relative_path = upload_info["piecemap"][
                len(content_inner_path_dir):]
            file_relative_path = inner_path[len(content_inner_path_dir):]

            # Add file to content.json
            if site.storage.isFile(file_info["content_inner_path"]):
                content = site.storage.loadJson(
                    file_info["content_inner_path"])
            else:
                content = {}
            if "files_optional" not in content:
                content["files_optional"] = {}

            content["files_optional"][file_relative_path] = {
                "sha512": merkle_root,
                "size": upload_info["size"],
                "piecemap": piecemap_relative_path,
                "piece_size": piece_size
            }

            merkle_root_hash_id = site.content_manager.hashfield.getHashId(
                merkle_root)
            site.content_manager.optionalDownloaded(inner_path,
                                                    merkle_root_hash_id,
                                                    upload_info["size"],
                                                    own=True)
            site.storage.writeJson(file_info["content_inner_path"], content)

            site.content_manager.contents.loadItem(
                file_info["content_inner_path"])  # reload cache

        return json.dumps({
            "merkle_root": merkle_root,
            "piece_num": len(piecemap_info["sha512_pieces"]),
            "piece_size": piece_size,
            "inner_path": inner_path
        })