Beispiel #1
0
    def loadContent(self,
                    content_inner_path="content.json",
                    add_bad_files=True,
                    delete_removed_files=True,
                    load_includes=True,
                    force=False):
        content_inner_path = content_inner_path.strip(
            "/")  # Remove / from beginning
        old_content = self.contents.get(content_inner_path)
        content_path = self.site.storage.getPath(content_inner_path)
        content_dir = helper.getDirname(
            self.site.storage.getPath(content_inner_path))
        content_inner_dir = helper.getDirname(content_inner_path)

        if os.path.isfile(content_path):
            try:
                # Check if file is newer than what we have
                if not force and old_content and not self.site.settings.get(
                        "own"):
                    for line in open(content_path):
                        if '"modified"' not in line:
                            continue
                        match = re.search("([0-9\.]+),$", line.strip(" \r\n"))
                        if match and float(match.group(1)) <= old_content.get(
                                "modified", 0):
                            self.log.debug(
                                "%s loadContent same json file, skipping" %
                                content_inner_path)
                            return [], []

                new_content = json.load(open(content_path))
            except Exception, err:
                self.log.warning("%s load error: %s" %
                                 (content_path, Debug.formatException(err)))
                return [], []
Beispiel #2
0
    def deleteFiles(self):
        self.log.debug("Deleting files from content.json...")
        files = []  # Get filenames
        for content_inner_path in self.site.content_manager.contents.keys():
            content = self.site.content_manager.contents[content_inner_path]
            files.append(content_inner_path)
            # Add normal files
            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(
                    content_inner_path
                ) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)
            # Add optional files
            for file_relative_path in content.get("files_optional", {}).keys():
                file_inner_path = helper.getDirname(
                    content_inner_path
                ) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)

        if self.isFile("dbschema.json"):
            self.log.debug("Deleting db file...")
            self.closeDb()
            self.has_db = False
            try:
                schema = self.loadJson("dbschema.json")
                db_path = self.getPath(schema["db_file"])
                if os.path.isfile(db_path):
                    os.unlink(db_path)
            except Exception, err:
                self.log.error("Db file delete error: %s" % err)
Beispiel #3
0
    def deleteFiles(self):
        self.log.debug("Deleting files from content.json...")
        files = []  # Get filenames
        for content_inner_path in self.site.content_manager.contents.keys():
            content = self.site.content_manager.contents[content_inner_path]
            files.append(content_inner_path)
            # Add normal files
            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)
            # Add optional files
            for file_relative_path in content.get("files_optional", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)

        if self.isFile("dbschema.json"):
            self.log.debug("Deleting db file...")
            self.closeDb()
            self.has_db = False
            try:
                schema = self.loadJson("dbschema.json")
                db_path = self.getPath(schema["db_file"])
                if os.path.isfile(db_path):
                    os.unlink(db_path)
            except Exception, err:
                self.log.error("Db file delete error: %s" % err)
Beispiel #4
0
    def verifyFiles(self, quick_check=False):  # Fast = using file size
        bad_files = []

        if not self.site.content_manager.contents.get("content.json"):  # No content.json, download it first
            self.site.needFile("content.json", update=True)  # Force update to fix corrupt file
            self.site.content_manager.loadContent()  # Reload content.json
        for content_inner_path, content in self.site.content_manager.contents.items():
            if not os.path.isfile(self.getPath(content_inner_path)):  # Missing content.json file
                self.log.debug("[MISSING] %s" % content_inner_path)
                bad_files.append(content_inner_path)

            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.log.debug("[MISSING] %s" % file_inner_path)
                    bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
                else:
                    ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))

                if not ok:
                    self.log.debug("[CHANGED] %s" % file_inner_path)
                    bad_files.append(file_inner_path)

            # Optional files
            optional_added = 0
            optional_removed = 0
            for file_relative_path in content.get("files_optional", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.site.content_manager.hashfield.removeHash(content["files_optional"][file_relative_path]["sha512"])
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
                else:
                    ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))

                if ok:
                    self.site.content_manager.hashfield.appendHash(content["files_optional"][file_relative_path]["sha512"])
                    optional_added += 1
                else:
                    self.site.content_manager.hashfield.removeHash(content["files_optional"][file_relative_path]["sha512"])
                    optional_removed += 1
                    self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)

            self.log.debug(
                "%s verified: %s, quick: %s, bad: %s, optionals: +%s -%s" %
                (content_inner_path, len(content["files"]), quick_check, bad_files, optional_added, optional_removed)
            )

        return bad_files
Beispiel #5
0
    def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True):
        content_inner_path = content_inner_path.strip("/")  # Remove / from begning
        old_content = self.contents.get(content_inner_path)
        content_path = self.site.storage.getPath(content_inner_path)
        content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path))
        content_inner_dir = helper.getDirname(content_inner_path)

        if os.path.isfile(content_path):
            try:
                new_content = json.load(open(content_path))
            except Exception, err:
                self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
                return [], []
Beispiel #6
0
    def getUserContentRules(self, parent_content, inner_path, content):
        user_contents = parent_content["user_contents"]

        # Delivered for directory
        if "inner_path" in parent_content:
            parent_content_dir = helper.getDirname(parent_content["inner_path"])
            user_address = re.match("([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1)
        else:
            user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1)

        try:
            if not content:
                content = self.site.storage.loadJson(inner_path)  # Read the file if no content specified
            user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"])  # web/[email protected]
            cert_user_id = content["cert_user_id"]
        except Exception:  # Content.json not exist
            user_urn = "n-a/n-a"
            cert_user_id = "n-a"

        if user_address in user_contents["permissions"]:
            rules = copy.copy(user_contents["permissions"].get(user_address, {}))  # Default rules based on address
        else:
            rules = copy.copy(user_contents["permissions"].get(cert_user_id, {}))  # Default rules based on username

        if rules is False:
            banned = True
            rules = {}
        else:
            banned = False
        if "signers" in rules:
            rules["signers"] = rules["signers"][:]  # Make copy of the signers
        for permission_pattern, permission_rules in user_contents["permission_rules"].items():  # Regexp rules
            if not SafeRe.match(permission_pattern, user_urn):
                continue  # Rule is not valid for user
            # Update rules if its better than current recorded ones
            for key, val in permission_rules.iteritems():
                if key not in rules:
                    if type(val) is list:
                        rules[key] = val[:]  # Make copy
                    else:
                        rules[key] = val
                elif type(val) is int:  # Int, update if larger
                    if val > rules[key]:
                        rules[key] = val
                elif hasattr(val, "startswith"):  # String, update if longer
                    if len(val) > len(rules[key]):
                        rules[key] = val
                elif type(val) is list:  # List, append
                    rules[key] += val

        rules["cert_signers"] = user_contents["cert_signers"]  # Add valid cert signers
        if "signers" not in rules:
            rules["signers"] = []

        if not banned:
            rules["signers"].append(user_address)  # Add user as valid signer
        rules["user_address"] = user_address
        rules["includes_allowed"] = False

        return rules
Beispiel #7
0
    def actionBigfileUploadInit(self, to, inner_path, size):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" %
                           (auth_address, valid_signers))
            return self.response(
                to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path,
                                                          new_file=True)

        content_inner_path_dir = helper.getDirname(
            file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }
        return {
            "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
            "piece_size": piece_size,
            "inner_path": inner_path,
            "file_relative_path": file_relative_path
        }
Beispiel #8
0
 def setContent(self, site, inner_path, content, size=0):
     super(ContentDbPlugin, self).setContent(site,
                                             inner_path,
                                             content,
                                             size=size)
     old_content = site.content_manager.contents.get(inner_path, {})
     if (
             not self.need_filling or self.filled.get(site.address)
     ) and "files_optional" in content or "files_optional" in old_content:
         self.setContentFilesOptional(site, inner_path, content)
         # Check deleted files
         if old_content:
             old_files = old_content.get("files_optional", {}).keys()
             new_files = content.get("files_optional", {}).keys()
             content_inner_dir = helper.getDirname(inner_path)
             deleted = [
                 content_inner_dir + key for key in old_files
                 if key not in new_files
             ]
             if deleted:
                 site_id = self.site_ids[site.address]
                 self.execute("DELETE FROM file_optional WHERE ?", {
                     "site_id": site_id,
                     "inner_path": deleted
                 })
Beispiel #9
0
    def getUserContentRules(self, parent_content, inner_path, content):
        user_contents = parent_content["user_contents"]

        # Delivered for directory
        if "inner_path" in parent_content:
            parent_content_dir = helper.getDirname(parent_content["inner_path"])
            user_address = re.match("([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1)
        else:
            user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1)

        try:
            if not content:
                content = self.site.storage.loadJson(inner_path)  # Read the file if no content specified
            user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"])  # web/[email protected]
            cert_user_id = content["cert_user_id"]
        except Exception:  # Content.json not exist
            user_urn = "n-a/n-a"
            cert_user_id = "n-a"

        if user_address in user_contents["permissions"]:
            rules = copy.copy(user_contents["permissions"].get(user_address, {}))  # Default rules based on address
        else:
            rules = copy.copy(user_contents["permissions"].get(cert_user_id, {}))  # Default rules based on username

        if rules is False:
            banned = True
            rules = {}
        else:
            banned = False
        if "signers" in rules:
            rules["signers"] = rules["signers"][:]  # Make copy of the signers
        for permission_pattern, permission_rules in user_contents["permission_rules"].items():  # Regexp rules
            if not SafeRe.match(permission_pattern, user_urn):
                continue  # Rule is not valid for user
            # Update rules if its better than current recorded ones
            for key, val in permission_rules.iteritems():
                if key not in rules:
                    if type(val) is list:
                        rules[key] = val[:]  # Make copy
                    else:
                        rules[key] = val
                elif type(val) is int:  # Int, update if larger
                    if val > rules[key]:
                        rules[key] = val
                elif hasattr(val, "startswith"):  # String, update if longer
                    if len(val) > len(rules[key]):
                        rules[key] = val
                elif type(val) is list:  # List, append
                    rules[key] += val

        rules["cert_signers"] = user_contents["cert_signers"]  # Add valid cert signers
        if "signers" not in rules:
            rules["signers"] = []

        if not banned:
            rules["signers"].append(user_address)  # Add user as valid signer
        rules["user_address"] = user_address
        rules["includes_allowed"] = False

        return rules
Beispiel #10
0
    def actionBigfileUploadInit(self, to, inner_path, size):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers))
            return self.response(to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True)

        content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }
        return {
            "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
            "piece_size": piece_size,
            "inner_path": inner_path,
            "file_relative_path": file_relative_path
        }
Beispiel #11
0
 def getDiffs(self, inner_path, limit=30 * 1024, update_files=True):
     if inner_path not in self.contents:
         return {}
     diffs = {}
     content_inner_path_dir = helper.getDirname(inner_path)
     for file_relative_path in self.contents[inner_path].get("files", {}):
         file_inner_path = content_inner_path_dir + file_relative_path
         if self.site.storage.isFile(file_inner_path + "-new"):  # New version present
             diffs[file_relative_path] = Diff.diff(
                 list(self.site.storage.open(file_inner_path)),
                 list(self.site.storage.open(file_inner_path + "-new")),
                 limit=limit
             )
             if update_files:
                 self.site.storage.delete(file_inner_path)
                 self.site.storage.rename(file_inner_path + "-new", file_inner_path)
         if self.site.storage.isFile(file_inner_path + "-old"):  # Old version present
             diffs[file_relative_path] = Diff.diff(
                 list(self.site.storage.open(file_inner_path + "-old")),
                 list(self.site.storage.open(file_inner_path)),
                 limit=limit
             )
             if update_files:
                 self.site.storage.delete(file_inner_path + "-old")
     return diffs
Beispiel #12
0
    def getFileInfo(self, inner_path, new_file=False):
        dirs = inner_path.split("/")  # Parent dirs of content.json
        inner_path_parts = [dirs.pop()]  # Filename relative to content.json
        while True:
            content_inner_path = "%s/content.json" % "/".join(dirs)
            content_inner_path = content_inner_path.strip("/")
            content = self.contents.get(content_inner_path)

            # Check in files
            if content and "files" in content:
                back = content["files"].get("/".join(inner_path_parts))
                if back:
                    back["content_inner_path"] = content_inner_path
                    back["optional"] = False
                    back["relative_path"] = "/".join(inner_path_parts)
                    return back

            # Check in optional files
            if content and "files_optional" in content:  # Check if file in this content.json
                back = content["files_optional"].get(
                    "/".join(inner_path_parts))
                if back:
                    back["content_inner_path"] = content_inner_path
                    back["optional"] = True
                    back["relative_path"] = "/".join(inner_path_parts)
                    return back

            # Return the rules if user dir
            if content and "user_contents" in content:
                back = content["user_contents"]
                content_inner_path_dir = helper.getDirname(content_inner_path)
                relative_content_path = inner_path[len(content_inner_path_dir
                                                       ):]
                if "/" in relative_content_path:
                    user_auth_address = re.match(
                        "([A-Za-z0-9]+)/.*", relative_content_path).group(1)
                    back["content_inner_path"] = "%s%s/content.json" % (
                        content_inner_path_dir, user_auth_address)
                else:
                    back[
                        "content_inner_path"] = content_inner_path_dir + "content.json"
                back["optional"] = None
                back["relative_path"] = "/".join(inner_path_parts)
                return back

            if new_file and content:
                back = {}
                back["content_inner_path"] = content_inner_path
                back["relative_path"] = "/".join(inner_path_parts)
                back["optional"] = None
                return back

            # No inner path in this dir, lets try the parent dir
            if dirs:
                inner_path_parts.insert(0, dirs.pop())
            else:  # No more parent dirs
                break

        # Not found
        return False
Beispiel #13
0
 def getPiecemap(self, inner_path):
     file_info = self.site.content_manager.getFileInfo(inner_path)
     piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
     self.site.needFile(piecemap_inner_path, priority=20)
     piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)]
     piecemap["piece_size"] = file_info["piece_size"]
     return piecemap
Beispiel #14
0
 def listContents(self, inner_path="content.json", user_files=False):
     back = [inner_path]
     content_inner_dir = helper.getDirname(inner_path)
     for relative_path in self.contents[inner_path].get("includes", {}).keys():
         include_inner_path = content_inner_dir + relative_path
         back += self.listContents(include_inner_path)
     return back
Beispiel #15
0
 def getPiecemap(self, inner_path):
     file_info = self.site.content_manager.getFileInfo(inner_path)
     piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
     self.site.needFile(piecemap_inner_path, priority=20)
     piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)]
     piecemap["piece_size"] = file_info["piece_size"]
     return piecemap
Beispiel #16
0
 def getDiffs(self, inner_path, limit=30 * 1024, update_files=True):
     if inner_path not in self.contents:
         return None
     diffs = {}
     content_inner_path_dir = helper.getDirname(inner_path)
     for file_relative_path in self.contents[inner_path].get("files", {}):
         file_inner_path = content_inner_path_dir + file_relative_path
         if self.site.storage.isFile(file_inner_path + "-new"):  # New version present
             diffs[file_relative_path] = Diff.diff(
                 list(self.site.storage.open(file_inner_path)),
                 list(self.site.storage.open(file_inner_path + "-new")),
                 limit=limit
             )
             if update_files:
                 self.site.storage.delete(file_inner_path)
                 self.site.storage.rename(file_inner_path + "-new", file_inner_path)
         if self.site.storage.isFile(file_inner_path + "-old"):  # Old version present
             diffs[file_relative_path] = Diff.diff(
                 list(self.site.storage.open(file_inner_path + "-old")),
                 list(self.site.storage.open(file_inner_path)),
                 limit=limit
             )
             if update_files:
                 self.site.storage.delete(file_inner_path + "-old")
     return diffs
Beispiel #17
0
 def getDbFiles(self):
     found = 0
     for content_inner_path, content in self.site.content_manager.contents.iteritems(
     ):
         # content.json file itself
         if self.isFile(content_inner_path):
             yield content_inner_path, self.getPath(content_inner_path)
         else:
             self.log.error("[MISSING] %s" % content_inner_path)
         # Data files in content.json
         content_inner_path_dir = helper.getDirname(
             content_inner_path)  # Content.json dir relative to site
         for file_relative_path in content.get(
                 "files", {}).keys() + content.get("files_optional",
                                                   {}).keys():
             if not file_relative_path.endswith(
                     ".json") and not file_relative_path.endswith(
                         "json.gz"):
                 continue  # We only interesed in json files
             file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
             file_inner_path = file_inner_path.strip("/")  # Strip leading /
             if self.isFile(file_inner_path):
                 yield file_inner_path, self.getPath(file_inner_path)
             else:
                 self.log.error("[MISSING] %s" % file_inner_path)
             found += 1
             if found % 100 == 0:
                 time.sleep(0.000001)  # Context switch to avoid UI block
Beispiel #18
0
    def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}):
        s = time.time()
        if config.verbose:
            self.log.debug("Downloading %s..." % inner_path)
        found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
        content_inner_dir = helper.getDirname(inner_path)
        if not found:
            self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
            if check_modifications:  # Download failed, but check modifications if its succed later
                self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications")
            return False  # Could not download content.json

        if config.verbose:
            self.log.debug("Got %s" % inner_path)
        changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False)

        if peer:  # Update last received update from peer to prevent re-sending the same update to it
            peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"]

        # Start download files
        file_threads = []
        if download_files:
            for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
                file_inner_path = content_inner_dir + file_relative_path

                # Try to diff first
                diff_success = False
                diff_actions = diffs.get(file_relative_path)
                if diff_actions and self.bad_files.get(file_inner_path):
                    try:
                        new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions)
                        new_file.seek(0)
                        diff_success = self.content_manager.verifyFile(file_inner_path, new_file)
                        if diff_success:
                            self.log.debug("Patched successfully: %s" % file_inner_path)
                            new_file.seek(0)
                            self.storage.write(file_inner_path, new_file)
                            self.onFileDone(file_inner_path)
                    except Exception, err:
                        self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
                        diff_success = False

                if not diff_success:
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt

            # Optionals files
            if inner_path == "content.json":
                gevent.spawn(self.updateHashfield)

            if self.settings.get("autodownloadoptional"):
                for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
                    file_inner_path = content_inner_dir + file_relative_path
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt
Beispiel #19
0
    def setContentFilesOptional(self,
                                site,
                                content_inner_path,
                                content,
                                cur=None):
        if not cur:
            cur = self
            try:
                cur.execute("BEGIN")
            except Exception as err:
                self.log.warning(
                    "Transaction begin error %s %s: %s" %
                    (site, content_inner_path, Debug.formatException(err)))

        num = 0
        site_id = self.site_ids[site.address]
        content_inner_dir = helper.getDirname(content_inner_path)
        for relative_inner_path, file in content.get("files_optional",
                                                     {}).iteritems():
            file_inner_path = content_inner_dir + relative_inner_path
            hash_id = int(file["sha512"][0:4], 16)
            if hash_id in site.content_manager.hashfield:
                is_downloaded = 1
            else:
                is_downloaded = 0
            if site.address + "/" + content_inner_dir in self.my_optional_files:
                is_pinned = 1
            else:
                is_pinned = 0
            cur.insertOrUpdate("file_optional", {
                "hash_id": hash_id,
                "size": int(file["size"])
            }, {
                "site_id": site_id,
                "inner_path": file_inner_path
            },
                               oninsert={
                                   "time_added":
                                   int(time.time()),
                                   "time_downloaded":
                                   int(time.time()) if is_downloaded else 0,
                                   "is_downloaded":
                                   is_downloaded,
                                   "peer":
                                   is_downloaded,
                                   "is_pinned":
                                   is_pinned
                               })
            self.optional_files[site_id][file_inner_path[-8:]] = 1
            num += 1

        if cur == self:
            try:
                cur.execute("END")
            except Exception as err:
                self.log.warning(
                    "Transaction end error %s %s: %s" %
                    (site, content_inner_path, Debug.formatException(err)))
        return num
Beispiel #20
0
    def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}):
        s = time.time()
        if config.verbose:
            self.log.debug("Downloading %s..." % inner_path)
        found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
        content_inner_dir = helper.getDirname(inner_path)
        if not found:
            self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
            if check_modifications:  # Download failed, but check modifications if its succed later
                self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications")
            return False  # Could not download content.json

        if config.verbose:
            self.log.debug("Got %s" % inner_path)
        changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False)

        if peer:  # Update last received update from peer to prevent re-sending the same update to it
            peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"]

        # Start download files
        file_threads = []
        if download_files:
            for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
                file_inner_path = content_inner_dir + file_relative_path

                # Try to diff first
                diff_success = False
                diff_actions = diffs.get(file_relative_path)
                if diff_actions and self.bad_files.get(file_inner_path):
                    try:
                        new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions)
                        new_file.seek(0)
                        diff_success = self.content_manager.verifyFile(file_inner_path, new_file)
                        if diff_success:
                            self.log.debug("Patched successfully: %s" % file_inner_path)
                            new_file.seek(0)
                            self.storage.write(file_inner_path, new_file)
                            self.onFileDone(file_inner_path)
                    except Exception, err:
                        self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
                        diff_success = False

                if not diff_success:
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt

            # Optionals files
            if inner_path == "content.json":
                gevent.spawn(self.updateHashfield)

            if self.settings.get("autodownloadoptional"):
                for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
                    file_inner_path = content_inner_dir + file_relative_path
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt
Beispiel #21
0
    def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False):
        s = time.time()
        self.log.debug("Downloading %s..." % inner_path)
        found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
        content_inner_dir = helper.getDirname(inner_path)
        if not found:
            self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
            if check_modifications:  # Download failed, but check modifications if its succed later
                self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications")
            return False  # Could not download content.json

        self.log.debug("Got %s" % inner_path)
        changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False)

        # Start download files
        file_threads = []
        if download_files:
            for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
                file_inner_path = content_inner_dir + file_relative_path
                # Start download and dont wait for finish, return the event
                res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                if res is not True and res is not False:  # Need downloading and file is allowed
                    file_threads.append(res)  # Append evt

            # Optionals files
            if inner_path == "content.json":
                gevent.spawn(self.updateHashfield)

            if self.settings.get("autodownloadoptional"):
                for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
                    file_inner_path = content_inner_dir + file_relative_path
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt

        # Wait for includes download
        include_threads = []
        for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
            file_inner_path = content_inner_dir + file_relative_path
            include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
            include_threads.append(include_thread)

        self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads)))
        gevent.joinall(include_threads)
        self.log.debug("%s: Includes download ended" % inner_path)

        if check_modifications:  # Check if every file is up-to-date
            self.checkModifications(0)

        self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
        gevent.joinall(file_threads)
        self.log.debug("%s: DownloadContent ended in %.2fs" % (inner_path, time.time() - s))

        if not self.worker_manager.tasks:
            self.onComplete()  # No more task trigger site complete

        return True
Beispiel #22
0
    def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" %
                           (auth_address, valid_signers))
            return self.response(
                to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path,
                                                          new_file=True)

        content_inner_path_dir = helper.getDirname(
            file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }

        if protocol == "xhr":
            return {
                "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
                "piece_size": piece_size,
                "inner_path": inner_path,
                "file_relative_path": file_relative_path
            }
        elif protocol == "websocket":
            server_url = self.request.getWsServerUrl()
            if server_url:
                proto, host = server_url.split("://")
                origin = proto.replace("http", "ws") + "://" + host
            else:
                origin = "{origin}"
            return {
                "url":
                origin +
                "/ZeroNet-Internal/BigfileUploadWebsocket?upload_nonce=" +
                nonce,
                "piece_size":
                piece_size,
                "inner_path":
                inner_path,
                "file_relative_path":
                file_relative_path
            }
        else:
            return {"error": "Unknown protocol"}
Beispiel #23
0
 def listContents(self, inner_path="content.json", user_files=False):
     if inner_path not in self.contents:
         return []
     back = [inner_path]
     content_inner_dir = helper.getDirname(inner_path)
     for relative_path in self.contents[inner_path].get("includes", {}).keys():
         include_inner_path = content_inner_dir + relative_path
         back += self.listContents(include_inner_path)
     return back
Beispiel #24
0
    def getFileInfo(self, inner_path, new_file=False):
        dirs = inner_path.split("/")  # Parent dirs of content.json
        inner_path_parts = [dirs.pop()]  # Filename relative to content.json
        while True:
            content_inner_path = "%s/content.json" % "/".join(dirs)
            content_inner_path = content_inner_path.strip("/")
            content = self.contents.get(content_inner_path)

            # Check in files
            if content and "files" in content:
                back = content["files"].get("/".join(inner_path_parts))
                if back:
                    back["content_inner_path"] = content_inner_path
                    back["optional"] = False
                    back["relative_path"] = "/".join(inner_path_parts)
                    return back

            # Check in optional files
            if content and "files_optional" in content:  # Check if file in this content.json
                back = content["files_optional"].get("/".join(inner_path_parts))
                if back:
                    back["content_inner_path"] = content_inner_path
                    back["optional"] = True
                    back["relative_path"] = "/".join(inner_path_parts)
                    return back

            # Return the rules if user dir
            if content and "user_contents" in content:
                back = content["user_contents"]
                content_inner_path_dir = helper.getDirname(content_inner_path)
                relative_content_path = inner_path[len(content_inner_path_dir):]
                user_auth_address_match = re.match("([A-Za-z0-9]+)/.*", relative_content_path)
                if user_auth_address_match:
                    user_auth_address = user_auth_address_match.group(1)
                    back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address)
                else:
                    back["content_inner_path"] = content_inner_path_dir + "content.json"
                back["optional"] = None
                back["relative_path"] = "/".join(inner_path_parts)
                return back

            if new_file and content:
                back = {}
                back["content_inner_path"] = content_inner_path
                back["relative_path"] = "/".join(inner_path_parts)
                back["optional"] = None
                return back

            # No inner path in this dir, lets try the parent dir
            if dirs:
                inner_path_parts.insert(0, dirs.pop())
            else:  # No more parent dirs
                break

        # Not found
        return False
Beispiel #25
0
    def addTask(self, inner_path, *args, **kwargs):
        file_info = kwargs.get("file_info")
        if file_info and "piecemap" in file_info:  # Bigfile
            self.site.settings["has_bigfile"] = True

            piecemap_inner_path = helper.getDirname(
                file_info["content_inner_path"]) + file_info["piecemap"]
            piecemap_task = None
            if not self.site.storage.isFile(piecemap_inner_path):
                # Start download piecemap
                piecemap_task = super(WorkerManagerPlugin,
                                      self).addTask(piecemap_inner_path,
                                                    priority=30)
                autodownload_bigfile_size_limit = self.site.settings.get(
                    "autodownload_bigfile_size_limit",
                    config.autodownload_bigfile_size_limit)
                if "|" not in inner_path and self.site.isDownloadable(
                        inner_path
                ) and file_info[
                        "size"] / 1024 / 1024 <= autodownload_bigfile_size_limit:
                    gevent.spawn_later(0.1, self.site.needFile, inner_path +
                                       "|all")  # Download all pieces

            if "|" in inner_path:
                # Start download piece
                task = super(WorkerManagerPlugin,
                             self).addTask(inner_path, *args, **kwargs)

                inner_path, file_range = inner_path.split("|")
                pos_from, pos_to = map(int, file_range.split("-"))
                task["piece_i"] = pos_from / file_info["piece_size"]
                task["sha512"] = file_info["sha512"]
            else:
                if inner_path in self.site.bad_files:
                    del self.site.bad_files[inner_path]
                if piecemap_task:
                    task = piecemap_task
                else:
                    fake_evt = gevent.event.AsyncResult(
                    )  # Don't download anything if no range specified
                    fake_evt.set(True)
                    task = {"evt": fake_evt}

            if not self.site.storage.isFile(inner_path):
                self.site.storage.createSparseFile(inner_path,
                                                   file_info["size"],
                                                   file_info["sha512"])
                piece_num = int(
                    math.ceil(
                        float(file_info["size"]) / file_info["piece_size"]))
                self.site.storage.piecefields[file_info["sha512"]].fromstring(
                    "0" * piece_num)
        else:
            task = super(WorkerManagerPlugin,
                         self).addTask(inner_path, *args, **kwargs)
        return task
Beispiel #26
0
 def removeContent(self, inner_path):
     inner_dir = helper.getDirname(inner_path)
     try:
         content = self.contents[inner_path]
         files = dict(content.get("files", {}),
                      **content.get("files_optional", {}))
     except Exception, err:
         self.log.debug("Error loading %s for removeContent: %s" %
                        (inner_path, Debug.formatException(err)))
         files = {}
Beispiel #27
0
    def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False):
        s = time.time()
        self.log.debug("Downloading %s..." % inner_path)
        found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
        content_inner_dir = helper.getDirname(inner_path)
        if not found:
            self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
            if check_modifications:  # Download failed, but check modifications if its succed later
                self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications")
            return False  # Could not download content.json

        self.log.debug("Got %s" % inner_path)
        changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False)

        # Start download files
        file_threads = []
        if download_files:
            for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
                file_inner_path = content_inner_dir + file_relative_path
                # Start download and dont wait for finish, return the event
                res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                if res is not True and res is not False:  # Need downloading and file is allowed
                    file_threads.append(res)  # Append evt

            # Optionals files
            if inner_path == "content.json":
                gevent.spawn(self.updateHashfield)

            if self.settings.get("autodownloadoptional"):
                for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
                    file_inner_path = content_inner_dir + file_relative_path
                    # Start download and dont wait for finish, return the event
                    res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
                    if res is not True and res is not False:  # Need downloading and file is allowed
                        file_threads.append(res)  # Append evt

        # Wait for includes download
        include_threads = []
        for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
            file_inner_path = content_inner_dir + file_relative_path
            include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
            include_threads.append(include_thread)

        self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads)))
        gevent.joinall(include_threads)
        self.log.debug("%s: Includes download ended" % inner_path)

        if check_modifications:  # Check if every file is up-to-date
            self.checkModifications(0)

        self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
        gevent.joinall(file_threads)
        self.log.debug("%s: DownloadContent ended in %.2fs" % (inner_path, time.time() - s))

        return True
Beispiel #28
0
 def removeContent(self, inner_path):
     inner_dir = helper.getDirname(inner_path)
     try:
         content = self.contents[inner_path]
         files = dict(
             content.get("files", {}),
             **content.get("files_optional", {})
         )
     except Exception, err:
         self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
         files = {}
Beispiel #29
0
    def loadContent(self,
                    content_inner_path="content.json",
                    add_bad_files=True,
                    delete_removed_files=True,
                    load_includes=True):
        content_inner_path = content_inner_path.strip(
            "/")  # Remove / from begning
        old_content = self.contents.get(content_inner_path)
        content_path = self.site.storage.getPath(content_inner_path)
        content_dir = helper.getDirname(
            self.site.storage.getPath(content_inner_path))
        content_inner_dir = helper.getDirname(content_inner_path)

        if os.path.isfile(content_path):
            try:
                new_content = json.load(open(content_path))
            except Exception, err:
                self.log.error("%s load error: %s" %
                               (content_path, Debug.formatException(err)))
                return [], []
Beispiel #30
0
 def deleteContent(self, site, inner_path):
     content = site.content_manager.contents.get(inner_path)
     if content and "files_optional" in content:
         site_id = self.site_ids[site.address]
         content_inner_dir = helper.getDirname(inner_path)
         optional_inner_paths = [
             content_inner_dir + relative_inner_path
             for relative_inner_path in content.get("files_optional", {}).keys()
         ]
         self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths})
     super(ContentDbPlugin, self).deleteContent(site, inner_path)
Beispiel #31
0
    def getDbFiles(self):
        merger_types = merger_db.get(self.site.address)

        # First return the site's own db files
        for item in super(SiteStoragePlugin, self).getDbFiles():
            yield item

        # Not a merger site, that's all
        if not merger_types:
            raise StopIteration

        merged_sites = [
            site_manager.sites[address]
            for address, merged_type in merged_db.iteritems()
            if merged_type in merger_types
        ]
        found = 0
        for merged_site in merged_sites:
            self.log.debug("Loading merged site: %s" % merged_site)
            merged_type = merged_db[merged_site.address]
            for content_inner_path, content in merged_site.content_manager.contents.iteritems(
            ):
                # content.json file itself
                if merged_site.storage.isFile(
                        content_inner_path):  # Missing content.json file
                    merged_inner_path = "merged-%s/%s/%s" % (
                        merged_type, merged_site.address, content_inner_path)
                    yield merged_inner_path, merged_site.storage.getPath(
                        content_inner_path)
                else:
                    merged_site.log.error("[MISSING] %s" % content_inner_path)
                # Data files in content.json
                content_inner_path_dir = helper.getDirname(
                    content_inner_path)  # Content.json dir relative to site
                for file_relative_path in content.get(
                        "files", {}).keys() + content.get(
                            "files_optional", {}).keys():
                    if not file_relative_path.endswith(".json"):
                        continue  # We only interesed in json files
                    file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
                    file_inner_path = file_inner_path.strip(
                        "/")  # Strip leading /
                    if merged_site.storage.isFile(file_inner_path):
                        merged_inner_path = "merged-%s/%s/%s" % (
                            merged_type, merged_site.address, file_inner_path)
                        yield merged_inner_path, merged_site.storage.getPath(
                            file_inner_path)
                    else:
                        merged_site.log.error("[MISSING] %s" % file_inner_path)
                    found += 1
                    if found % 100 == 0:
                        time.sleep(
                            0.000001)  # Context switch to avoid UI block
Beispiel #32
0
 def testGetDirname(self):
     assert helper.getDirname("data/users/content.json") == "data/users/"
     assert helper.getDirname("data/users") == "data/"
     assert helper.getDirname("") == ""
     assert helper.getDirname("content.json") == ""
     assert helper.getDirname("data/users/") == "data/users/"
     assert helper.getDirname("/data/users/content.json") == "/data/users/"
Beispiel #33
0
 def testGetDirname(self):
     assert helper.getDirname("data/users/content.json") == "data/users/"
     assert helper.getDirname("data/users") == "data/"
     assert helper.getDirname("") == ""
     assert helper.getDirname("content.json") == ""
     assert helper.getDirname("data/users/") == "data/users/"
     assert helper.getDirname("/data/users/content.json") == "data/users/"
Beispiel #34
0
    def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
        # Add file to content.db and set it as pinned
        content_db = self.site.content_manager.contents.db
        content_inner_dir = helper.getDirname(inner_path)
        content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
        if len(content_db.my_optional_files) > 50:  # Keep only last 50
            oldest_key = min(
                content_db.my_optional_files.iterkeys(),
                key=(lambda key: content_db.my_optional_files[key])
            )
            del content_db.my_optional_files[oldest_key]

        return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs)
Beispiel #35
0
    def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
        # Add file to content.db and set it as pinned
        content_db = self.site.content_manager.contents.db
        content_inner_dir = helper.getDirname(inner_path)
        content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
        if len(content_db.my_optional_files) > 50:  # Keep only last 50
            oldest_key = min(
                iter(content_db.my_optional_files.keys()),
                key=(lambda key: content_db.my_optional_files[key])
            )
            del content_db.my_optional_files[oldest_key]

        return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs)
Beispiel #36
0
    def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True, force=False):
        content_inner_path = content_inner_path.strip("/")  # Remove / from beginning
        old_content = self.contents.get(content_inner_path)
        content_path = self.site.storage.getPath(content_inner_path)
        content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path))
        content_inner_dir = helper.getDirname(content_inner_path)

        if os.path.isfile(content_path):
            try:
                # Check if file is newer than what we have
                if not force and old_content and not self.site.settings.get("own"):
                    for line in open(content_path):
                        if '"modified"' not in line:
                            continue
                        match = re.search("([0-9\.]+),$", line.strip(" \r\n"))
                        if match and float(match.group(1)) <= old_content.get("modified", 0):
                            self.log.debug("%s loadContent same json file, skipping" % content_inner_path)
                            return [], []

                new_content = json.load(open(content_path))
            except Exception, err:
                self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
                return [], []
Beispiel #37
0
 def setContent(self, site, inner_path, content, size=0):
     super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size)
     old_content = site.content_manager.contents.get(inner_path, {})
     if (not self.need_filling or self.filled.get(site.address)) and "files_optional" in content or "files_optional" in old_content:
         self.setContentFilesOptional(site, inner_path, content)
         # Check deleted files
         if old_content:
             old_files = old_content.get("files_optional", {}).keys()
             new_files = content.get("files_optional", {}).keys()
             content_inner_dir = helper.getDirname(inner_path)
             deleted = [content_inner_dir + key for key in old_files if key not in new_files]
             if deleted:
                 site_id = self.site_ids[site.address]
                 self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted})
Beispiel #38
0
 def removeContent(self, inner_path):
     inner_dir = helper.getDirname(inner_path)
     content = self.contents[inner_path]
     files = dict(content.get("files", {}),
                  **content.get("files_optional", {}))
     files["content.json"] = True
     # Deleting files that no longer in content.json
     for file_relative_path in files:
         file_inner_path = inner_dir + file_relative_path
         try:
             self.site.storage.delete(file_inner_path)
             self.log.debug("Deleted file: %s" % file_inner_path)
         except Exception, err:
             self.log.debug("Error deleting file %s: %s" %
                            (file_inner_path, err))
Beispiel #39
0
 def changeDb(self, auth_address, action):
     res = self.site.content_manager.contents.db.execute(
         "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path",
         {"inner_path": "%%/%s/%%" % auth_address})
     for row in res:
         site = self.server.sites.get(row["address"])
         if not site:
             continue
         dir_inner_path = helper.getDirname(row["inner_path"])
         for file_name in site.storage.list(dir_inner_path):
             if action == "remove":
                 site.storage.onUpdated(dir_inner_path + file_name, False)
             else:
                 site.storage.onUpdated(dir_inner_path + file_name)
             site.onFileDone(dir_inner_path + file_name)
Beispiel #40
0
    def getDbFiles(self):
        merger_types = merger_db.get(self.site.address)

        # First return the site's own db files
        for item in super(SiteStoragePlugin, self).getDbFiles():
            yield item

        # Not a merger site, that's all
        if not merger_types:
            raise StopIteration

        merged_sites = [
            site_manager.sites[address]
            for address, merged_type in merged_db.iteritems()
            if merged_type in merger_types
        ]
        for merged_site in merged_sites:
            merged_type = merged_db[merged_site.address]
            for content_inner_path, content in merged_site.content_manager.contents.iteritems(
            ):
                # content.json file itself
                if merged_site.storage.isFile(
                        content_inner_path):  # Missing content.json file
                    content_path = self.getPath(
                        "merged-%s/%s/%s" %
                        (merged_type, merged_site.address, content_inner_path))
                    yield content_path, merged_site.storage.open(
                        content_inner_path)
                else:
                    merged_site.log.error("[MISSING] %s" % content_inner_path)
                # Data files in content.json
                content_inner_path_dir = helper.getDirname(
                    content_inner_path)  # Content.json dir relative to site
                for file_relative_path in content["files"].keys():
                    if not file_relative_path.endswith(".json"):
                        continue  # We only interesed in json files
                    file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
                    file_inner_path = file_inner_path.strip(
                        "/")  # Strip leading /
                    if merged_site.storage.isFile(file_inner_path):
                        file_path = self.getPath(
                            "merged-%s/%s/%s" %
                            (merged_type, merged_site.address,
                             file_inner_path))
                        yield file_path, merged_site.storage.open(
                            file_inner_path)
                    else:
                        merged_site.log.error("[MISSING] %s" % file_inner_path)
Beispiel #41
0
 def removeContent(self, inner_path):
     inner_dir = helper.getDirname(inner_path)
     content = self.contents[inner_path]
     files = dict(
         content.get("files", {}),
         **content.get("files_optional", {})
     )
     files["content.json"] = True
     # Deleting files that no longer in content.json
     for file_relative_path in files:
         file_inner_path = inner_dir + file_relative_path
         try:
             self.site.storage.delete(file_inner_path)
             self.log.debug("Deleted file: %s" % file_inner_path)
         except Exception, err:
             self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
Beispiel #42
0
    def setContentFilesOptional(self,
                                site,
                                content_inner_path,
                                content,
                                cur=None):
        if not cur:
            cur = self

        num = 0
        site_id = self.site_ids[site.address]
        content_inner_dir = helper.getDirname(content_inner_path)
        for relative_inner_path, file in content.get("files_optional",
                                                     {}).items():
            file_inner_path = content_inner_dir + relative_inner_path
            hash_id = int(file["sha512"][0:4], 16)
            if hash_id in site.content_manager.hashfield:
                is_downloaded = 1
            else:
                is_downloaded = 0
            if site.address + "/" + content_inner_dir in self.my_optional_files:
                is_pinned = 1
            else:
                is_pinned = 0
            cur.insertOrUpdate("file_optional", {
                "hash_id": hash_id,
                "size": int(file["size"])
            }, {
                "site_id": site_id,
                "inner_path": file_inner_path
            },
                               oninsert={
                                   "time_added":
                                   int(time.time()),
                                   "time_downloaded":
                                   int(time.time()) if is_downloaded else 0,
                                   "is_downloaded":
                                   is_downloaded,
                                   "peer":
                                   is_downloaded,
                                   "is_pinned":
                                   is_pinned
                               })
            self.optional_files[site_id][file_inner_path[-8:]] = 1
            num += 1

        return num
 def changeDbs(self, auth_address, action):
     self.log.debug("Mute action %s on user %s" % (action, auth_address))
     res = list(self.site_manager.list().values(
     ))[0].content_manager.contents.db.execute(
         "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path",
         {"inner_path": "%%/%s/%%" % auth_address})
     for row in res:
         site = self.site_manager.sites.get(row["address"])
         if not site:
             continue
         dir_inner_path = helper.getDirname(row["inner_path"])
         for file_name in site.storage.walk(dir_inner_path):
             if action == "remove":
                 site.storage.onUpdated(dir_inner_path + file_name, False)
             else:
                 site.storage.onUpdated(dir_inner_path + file_name)
             site.onFileDone(dir_inner_path + file_name)
Beispiel #44
0
 def changeDb(self, auth_address, action):
     self.log.debug("Mute action %s on user %s" % (action, auth_address))
     res = self.site.content_manager.contents.db.execute(
         "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path",
         {"inner_path": "%%/%s/%%" % auth_address}
     )
     for row in res:
         site = self.server.sites.get(row["address"])
         if not site:
             continue
         dir_inner_path = helper.getDirname(row["inner_path"])
         for file_name in site.storage.walk(dir_inner_path):
             if action == "remove":
                 site.storage.onUpdated(dir_inner_path + file_name, False)
             else:
                 site.storage.onUpdated(dir_inner_path + file_name)
             site.onFileDone(dir_inner_path + file_name)
Beispiel #45
0
 def getDbFiles(self):
     for content_inner_path, content in self.site.content_manager.contents.iteritems():
         # content.json file itself
         if self.isFile(content_inner_path):  # Missing content.json file
             yield self.getPath(content_inner_path), self.open(content_inner_path)
         else:
             self.log.error("[MISSING] %s" % content_inner_path)
         # Data files in content.json
         content_inner_path_dir = helper.getDirname(content_inner_path)  # Content.json dir relative to site
         for file_relative_path in content["files"].keys():
             if not file_relative_path.endswith(".json"):
                 continue  # We only interesed in json files
             file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
             file_inner_path = file_inner_path.strip("/")  # Strip leading /
             if self.isFile(file_inner_path):
                 yield self.getPath(file_inner_path), self.open(file_inner_path)
             else:
                 self.log.error("[MISSING] %s" % file_inner_path)
Beispiel #46
0
    def actionServerShowdirectory(self, to, directory="backup", inner_path=""):
        if self.request.env["REMOTE_ADDR"] != "127.0.0.1":
            return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"})

        import webbrowser
        if directory == "backup":
            path = os.path.abspath(config.data_dir)
        elif directory == "log":
            path = os.path.abspath(config.log_dir)
        elif directory == "site":
            path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path)))

        if os.path.isdir(path):
            self.log.debug("Opening: %s" % path)
            webbrowser.open('file://' + path)
            return self.response(to, "ok")
        else:
            return self.response(to, {"error": "Not a directory"})
Beispiel #47
0
    def actionServerShowdirectory(self, to, directory="backup", inner_path=""):
        if self.request.env["REMOTE_ADDR"] != "127.0.0.1":
            return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"})

        import webbrowser
        if directory == "backup":
            path = os.path.abspath(config.data_dir)
        elif directory == "log":
            path = os.path.abspath(config.log_dir)
        elif directory == "site":
            path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path)))

        if os.path.isdir(path):
            self.log.debug("Opening: %s" % path)
            webbrowser.open('file://' + path)
            return self.response(to, "ok")
        else:
            return self.response(to, {"error": "Not a directory"})
Beispiel #48
0
 def getDbFiles(self):
     for content_inner_path, content in self.site.content_manager.contents.iteritems():
         # content.json file itself
         if self.isFile(content_inner_path):
             yield content_inner_path, self.getPath(content_inner_path)
         else:
             self.log.error("[MISSING] %s" % content_inner_path)
         # Data files in content.json
         content_inner_path_dir = helper.getDirname(content_inner_path)  # Content.json dir relative to site
         for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
             if not file_relative_path.endswith(".json"):
                 continue  # We only interesed in json files
             file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
             file_inner_path = file_inner_path.strip("/")  # Strip leading /
             if self.isFile(file_inner_path):
                 yield file_inner_path, self.getPath(file_inner_path)
             else:
                 self.log.error("[MISSING] %s" % file_inner_path)
Beispiel #49
0
    def setContentFilesOptional(self, site, content_inner_path, content, cur=None):
        if not cur:
            cur = self
            try:
                cur.execute("BEGIN")
            except Exception as err:
                self.log.warning("Transaction begin error %s %s: %s" % (site, content_inner_path, Debug.formatException(err)))

        num = 0
        site_id = self.site_ids[site.address]
        content_inner_dir = helper.getDirname(content_inner_path)
        for relative_inner_path, file in content.get("files_optional", {}).iteritems():
            file_inner_path = content_inner_dir + relative_inner_path
            hash_id = int(file["sha512"][0:4], 16)
            if hash_id in site.content_manager.hashfield:
                is_downloaded = 1
            else:
                is_downloaded = 0
            if site.address + "/" + content_inner_dir in self.my_optional_files:
                is_pinned = 1
            else:
                is_pinned = 0
            cur.insertOrUpdate("file_optional", {
                "hash_id": hash_id,
                "size": int(file["size"])
            }, {
                "site_id": site_id,
                "inner_path": file_inner_path
            }, oninsert={
                "time_added": int(time.time()),
                "time_downloaded": int(time.time()) if is_downloaded else 0,
                "is_downloaded": is_downloaded,
                "peer": is_downloaded,
                "is_pinned": is_pinned
            })
            self.optional_files[site_id][file_inner_path[-8:]] = 1
            num += 1

        if cur == self:
            try:
                cur.execute("END")
            except Exception as err:
                self.log.warning("Transaction end error %s %s: %s" % (site, content_inner_path, Debug.formatException(err)))
        return num
Beispiel #50
0
    def verifyFiles(self, quick_check=False):  # Fast = using file size
        bad_files = []
        if not self.site.content_manager.contents.get(
                "content.json"):  # No content.json, download it first
            self.site.needFile("content.json",
                               update=True)  # Force update to fix corrupt file
            self.site.content_manager.loadContent()  # Reload content.json
        for content_inner_path, content in self.site.content_manager.contents.items(
        ):
            if not os.path.isfile(self.getPath(
                    content_inner_path)):  # Missing content.json file
                self.log.debug("[MISSING] %s" % content_inner_path)
                bad_files.append(content_inner_path)
            for file_relative_path in content["files"].keys():
                file_inner_path = helper.getDirname(
                    content_inner_path
                ) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.log.debug("[MISSING] %s" % file_inner_path)
                    bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files"][
                        file_relative_path]["size"]
                else:
                    ok = self.site.content_manager.verifyFile(
                        file_inner_path, open(file_path, "rb"))

                if not ok:
                    self.log.debug("[CHANGED] %s" % file_inner_path)
                    bad_files.append(file_inner_path)
            self.log.debug(
                "%s verified: %s files, quick_check: %s, bad files: %s" %
                (content_inner_path, len(
                    content["files"]), quick_check, bad_files))

        return bad_files
Beispiel #51
0
    def addTask(self, inner_path, *args, **kwargs):
        file_info = kwargs.get("file_info")
        if file_info and "piecemap" in file_info:  # Bigfile
            self.site.settings["has_bigfile"] = True

            piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"]
            piecemap_task = None
            if not self.site.storage.isFile(piecemap_inner_path):
                # Start download piecemap
                piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30)
                autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)
                if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit:
                    gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all")  # Download all pieces

            if "|" in inner_path:
                # Start download piece
                task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)

                inner_path, file_range = inner_path.split("|")
                pos_from, pos_to = map(int, file_range.split("-"))
                task["piece_i"] = pos_from / file_info["piece_size"]
                task["sha512"] = file_info["sha512"]
            else:
                if inner_path in self.site.bad_files:
                    del self.site.bad_files[inner_path]
                if piecemap_task:
                    task = piecemap_task
                else:
                    fake_evt = gevent.event.AsyncResult()  # Don't download anything if no range specified
                    fake_evt.set(True)
                    task = {"evt": fake_evt}

            if not self.site.storage.isFile(inner_path):
                self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"])
                piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
                self.site.storage.piecefields[file_info["sha512"]].fromstring("0" * piece_num)
        else:
            task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
        return task
Beispiel #52
0
    def getDbFiles(self):
        merger_types = merger_db.get(self.site.address)

        # First return the site's own db files
        for item in super(SiteStoragePlugin, self).getDbFiles():
            yield item

        # Not a merger site, that's all
        if not merger_types:
            raise StopIteration

        merged_sites = [
            site_manager.sites[address]
            for address, merged_type in merged_db.iteritems()
            if merged_type in merger_types
        ]
        for merged_site in merged_sites:
            self.log.debug("Loading merged site: %s" % merged_site)
            merged_type = merged_db[merged_site.address]
            for content_inner_path, content in merged_site.content_manager.contents.iteritems():
                # content.json file itself
                if merged_site.storage.isFile(content_inner_path):  # Missing content.json file
                    merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
                    yield merged_inner_path, merged_site.storage.open(content_inner_path)
                else:
                    merged_site.log.error("[MISSING] %s" % content_inner_path)
                # Data files in content.json
                content_inner_path_dir = helper.getDirname(content_inner_path)  # Content.json dir relative to site
                for file_relative_path in content["files"].keys():
                    if not file_relative_path.endswith(".json"):
                        continue  # We only interesed in json files
                    file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
                    file_inner_path = file_inner_path.strip("/")  # Strip leading /
                    if merged_site.storage.isFile(file_inner_path):
                        merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path)
                        yield merged_inner_path, merged_site.storage.open(file_inner_path)
                    else:
                        merged_site.log.error("[MISSING] %s" % file_inner_path)
Beispiel #53
0
    def clone(self, address, privatekey=None, address_index=None, overwrite=False):
        import shutil
        new_site = SiteManager.site_manager.need(address, all_file=False)
        default_dirs = []  # Dont copy these directories (has -default version)
        for dir_name in os.listdir(self.storage.directory):
            if "-default" in dir_name:
                default_dirs.append(dir_name.replace("-default", ""))

        self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))

        # Copy root content.json
        if not new_site.storage.isFile("content.json") and not overwrite:
            # Content.json not exist yet, create a new one from source site
            content_json = self.storage.loadJson("content.json")
            if "domain" in content_json:
                del content_json["domain"]
            content_json["title"] = "my" + content_json["title"]
            content_json["cloned_from"] = self.address
            if address_index:
                content_json["address_index"] = address_index  # Site owner's BIP32 index
            new_site.storage.writeJson("content.json", content_json)
            new_site.content_manager.loadContent(
                "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False
            )

        # Copy files
        for content_inner_path, content in self.content_manager.contents.items():
            for file_relative_path in sorted(content["files"].keys()):
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to content.json
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                if file_inner_path.split("/")[0] in default_dirs:  # Dont copy directories that has -default postfixed alternative
                    self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
                    continue
                file_path = self.storage.getPath(file_inner_path)

                # Copy the file normally to keep the -default postfixed dir and file to allow cloning later
                file_path_dest = new_site.storage.getPath(file_inner_path)
                self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
                dest_dir = os.path.dirname(file_path_dest)
                if not os.path.isdir(dest_dir):
                    os.makedirs(dest_dir)
                shutil.copy(file_path, file_path_dest)

                # If -default in path, create a -default less copy of the file
                if "-default" in file_inner_path:
                    file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", ""))
                    if new_site.storage.isFile(file_path_dest) and not overwrite:  # Don't overwrite site files with default ones
                        self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path)
                        continue
                    self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
                    dest_dir = os.path.dirname(file_path_dest)
                    if not os.path.isdir(dest_dir):
                        os.makedirs(dest_dir)
                    shutil.copy(file_path, file_path_dest)
                    # Sign if content json
                    if file_path_dest.endswith("/content.json"):
                        new_site.storage.onUpdated(file_inner_path.replace("-default", ""))
                        new_site.content_manager.loadContent(
                            file_inner_path.replace("-default", ""), add_bad_files=False,
                            delete_removed_files=False, load_includes=False
                        )
                        if privatekey:
                            new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
                            new_site.content_manager.loadContent(
                                file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False
                            )

        if privatekey:
            new_site.content_manager.sign("content.json", privatekey)
            new_site.content_manager.loadContent(
                "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False
            )

        # Rebuild DB
        if new_site.storage.isFile("dbschema.json"):
            new_site.storage.closeDb()
            new_site.storage.rebuildDb()

        return new_site
Beispiel #54
0
    def verifyFiles(self,
                    quick_check=False,
                    add_optional=False,
                    add_changed=True):
        bad_files = []
        i = 0

        if not self.site.content_manager.contents.get(
                "content.json"):  # No content.json, download it first
            self.site.needFile("content.json",
                               update=True)  # Force update to fix corrupt file
            self.site.content_manager.loadContent()  # Reload content.json
        for content_inner_path, content in self.site.content_manager.contents.items(
        ):
            i += 1
            if i % 50 == 0:
                time.sleep(0.0001)  # Context switch to avoid gevent hangs
            if not os.path.isfile(self.getPath(
                    content_inner_path)):  # Missing content.json file
                self.log.debug("[MISSING] %s" % content_inner_path)
                bad_files.append(content_inner_path)

            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(
                    content_inner_path
                ) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.log.debug("[MISSING] %s" % file_inner_path)
                    bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files"][
                        file_relative_path]["size"]
                else:
                    ok = self.site.content_manager.verifyFile(
                        file_inner_path, open(file_path, "rb"))

                if not ok:
                    self.log.debug("[CHANGED] %s" % file_inner_path)
                    if add_changed or content.get(
                            "cert_sign"
                    ):  # If updating own site only add changed user files
                        bad_files.append(file_inner_path)

            # Optional files
            optional_added = 0
            optional_removed = 0
            for file_relative_path in content.get("files_optional", {}).keys():
                file_inner_path = helper.getDirname(
                    content_inner_path
                ) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.site.content_manager.hashfield.removeHash(
                        content["files_optional"][file_relative_path]
                        ["sha512"])
                    if add_optional:
                        bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content[
                        "files_optional"][file_relative_path]["size"]
                else:
                    ok = self.site.content_manager.verifyFile(
                        file_inner_path, open(file_path, "rb"))

                if ok:
                    self.site.content_manager.hashfield.appendHash(
                        content["files_optional"][file_relative_path]
                        ["sha512"])
                    optional_added += 1
                else:
                    self.site.content_manager.hashfield.removeHash(
                        content["files_optional"][file_relative_path]
                        ["sha512"])
                    optional_removed += 1
                    if add_optional:
                        bad_files.append(file_inner_path)
                    self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)

            if config.verbose:
                self.log.debug(
                    "%s verified: %s, quick: %s, bad: %s, optionals: +%s -%s" %
                    (content_inner_path, len(content["files"]), quick_check,
                     bad_files, optional_added, optional_removed))

        time.sleep(0.0001)  # Context switch to avoid gevent hangs
        return bad_files
Beispiel #55
0
        cur = self.db.getCursor()
        cur.execute("BEGIN")
        cur.logging = False
        found = 0
        s = time.time()
        for content_inner_path, content in self.site.content_manager.contents.items():
            content_path = self.getPath(content_inner_path)
            if os.path.isfile(content_path):  # Missing content.json file
                if self.db.loadJson(content_path, cur=cur):
                    found += 1
            else:
                self.log.error("[MISSING] %s" % content_inner_path)
            for file_relative_path in content["files"].keys():
                if not file_relative_path.endswith(".json"):
                    continue  # We only interesed in json files
                content_inner_path_dir = helper.getDirname(content_inner_path)  # Content.json dir relative to site
                file_inner_path = content_inner_path_dir + file_relative_path  # File Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if os.path.isfile(file_path):
                    if self.db.loadJson(file_path, cur=cur):
                        found += 1
                else:
                    self.log.error("[MISSING] %s" % file_inner_path)
        cur.execute("END")
        self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
        self.event_db_busy.set(True)  # Event done, notify waiters
        self.event_db_busy = None  # Clear event

    # Execute sql query or rebuild on dberror
    def query(self, query, params=None):
Beispiel #56
0
    def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
        content = self.contents.get(inner_path)
        if not content:  # Content not exist yet, load default one
            self.log.info("File %s not exist yet, loading default values..." % inner_path)
            content = {"files": {}, "signs": {}}  # Default content.json
            if inner_path == "content.json":  # It's the root content.json, add some more fields
                content["title"] = "%s - ZeroNet_" % self.site.address
                content["description"] = ""
                content["signs_required"] = 1
                content["ignore"] = ""
            if extend:
                content.update(extend)  # Add custom fields

        directory = helper.getDirname(self.site.storage.getPath(inner_path))
        inner_directory = helper.getDirname(inner_path)
        self.log.info("Opening site data directory: %s..." % directory)

        changed_files = [inner_path]
        files_node, files_optional_node = self.hashFiles(
            helper.getDirname(inner_path), content.get("ignore"), content.get("optional")
        )

        # Find changed files
        files_merged = files_node.copy()
        files_merged.update(files_optional_node)
        for file_relative_path, file_details in files_merged.iteritems():
            old_hash = content["files"].get(file_relative_path, {}).get("sha512")
            new_hash = files_merged[file_relative_path]["sha512"]
            if old_hash != new_hash:
                changed_files.append(inner_directory + file_relative_path)

        self.log.debug("Changed files: %s" % changed_files)
        if update_changed_files:
            for file_path in changed_files:
                self.site.storage.onUpdated(file_path)

        # Generate new content.json
        self.log.info("Adding timestamp and sha512sums to new content.json...")

        new_content = content.copy()  # Create a copy of current content.json
        new_content["files"] = files_node  # Add files sha512 hash
        if files_optional_node:
            new_content["files_optional"] = files_optional_node
        elif "files_optional" in new_content:
            del new_content["files_optional"]

        new_content["modified"] = time.time()  # Add timestamp
        if inner_path == "content.json":
            new_content["address"] = self.site.address
            new_content["zeronet_version"] = config.version
            new_content["signs_required"] = content.get("signs_required", 1)

        # Verify private key
        from Crypt import CryptBitcoin
        self.log.info("Verifying private key...")
        privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
        valid_signers = self.getValidSigners(inner_path, new_content)
        if privatekey_address not in valid_signers:
            return self.log.error(
                "Private key invalid! Valid signers: %s, Private key address: %s" %
                (valid_signers, privatekey_address)
            )
        self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))

        if inner_path == "content.json" and privatekey_address == self.site.address:
            # If signing using the root key, then sign the valid signers
            new_content["signers_sign"] = CryptBitcoin.sign(
                "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey
            )
            if not new_content["signers_sign"]:
                self.log.info("Old style address, signers_sign is none")

        self.log.info("Signing %s..." % inner_path)

        if "signs" in new_content:
            del(new_content["signs"])  # Delete old signs
        if "sign" in new_content:
            del(new_content["sign"])  # Delete old sign (backward compatibility)

        sign_content = json.dumps(new_content, sort_keys=True)
        sign = CryptBitcoin.sign(sign_content, privatekey)
        # new_content["signs"] = content.get("signs", {}) # TODO: Multisig
        if sign:  # If signing is successful (not an old address)
            new_content["signs"] = {}
            new_content["signs"][privatekey_address] = sign

        if inner_path == "content.json":  # To root content.json add old format sign for backward compatibility
            oldsign_content = json.dumps(new_content, sort_keys=True)
            new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey)

        if not self.verifyContent(inner_path, new_content):
            self.log.error("Sign failed: Invalid content")
            return False

        if filewrite:
            self.log.info("Saving to %s..." % inner_path)
            self.site.storage.writeJson(inner_path, new_content)

        self.log.info("File %s signed!" % inner_path)

        if filewrite:  # Written to file
            return True
        else:  # Return the new content
            return new_content
Beispiel #57
0
            self.closeDb()
            try:
                schema = self.loadJson("dbschema.json")
                db_path = self.getPath(schema["db_file"])
                if os.path.isfile(db_path):
                    os.unlink(db_path)
            except Exception, err:
                self.log.error("Db file delete error: %s" % err)

        self.log.debug("Deleting files from content.json...")
        files = []  # Get filenames
        for content_inner_path, content in self.site.content_manager.contents.items():
            files.append(content_inner_path)
            # Add normal files
            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)
            # Add optional files
            for file_relative_path in content.get("files_optional", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                files.append(file_inner_path)

        for inner_path in files:
            path = self.getPath(inner_path)
            if os.path.isfile(path):
                os.unlink(path)

        self.log.debug("Deleting empty dirs...")
        for root, dirs, files in os.walk(self.directory, topdown=False):
            for dir in dirs:
                path = os.path.join(root, dir)
Beispiel #58
0
    def actionBigfileUpload(self):
        nonce = self.get.get("upload_nonce")
        if nonce not in upload_nonces:
            return self.error403("Upload nonce error.")

        upload_info = upload_nonces[nonce]
        del upload_nonces[nonce]

        self.sendHeader(200, "text/html", noscript=True, extra_headers=[
            ("Access-Control-Allow-Origin", "null"),
            ("Access-Control-Allow-Credentials", "true")
        ])

        self.readMultipartHeaders(self.env['wsgi.input'])  # Skip http headers

        site = upload_info["site"]
        inner_path = upload_info["inner_path"]

        with site.storage.open(inner_path, "wb", create_dirs=True) as out_file:
            merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile(
                self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file
            )

        if len(piecemap_info["sha512_pieces"]) == 1:  # Small file, don't split
            hash = piecemap_info["sha512_pieces"][0].encode("hex")
            site.content_manager.optionalDownloaded(inner_path, hash, upload_info["size"], own=True)

        else:  # Big file
            file_name = helper.getFilename(inner_path)
            msgpack.pack({file_name: piecemap_info}, site.storage.open(upload_info["piecemap"], "wb"))

            # Find piecemap and file relative path to content.json
            file_info = site.content_manager.getFileInfo(inner_path, new_file=True)
            content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
            piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):]
            file_relative_path = inner_path[len(content_inner_path_dir):]

            # Add file to content.json
            if site.storage.isFile(file_info["content_inner_path"]):
                content = site.storage.loadJson(file_info["content_inner_path"])
            else:
                content = {}
            if "files_optional" not in content:
                content["files_optional"] = {}

            content["files_optional"][file_relative_path] = {
                "sha512": merkle_root,
                "size": upload_info["size"],
                "piecemap": piecemap_relative_path,
                "piece_size": piece_size
            }

            site.content_manager.optionalDownloaded(inner_path, merkle_root, upload_info["size"], own=True)
            site.storage.writeJson(file_info["content_inner_path"], content)

            site.content_manager.contents.loadItem(file_info["content_inner_path"])  # reload cache

        return {
            "merkle_root": merkle_root,
            "piece_num": len(piecemap_info["sha512_pieces"]),
            "piece_size": piece_size,
            "inner_path": inner_path
        }
Beispiel #59
0
    def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
        bad_files = []
        i = 0

        if not self.site.content_manager.contents.get("content.json"):  # No content.json, download it first
            self.log.debug("VerifyFile content.json not exists")
            self.site.needFile("content.json", update=True)  # Force update to fix corrupt file
            self.site.content_manager.loadContent()  # Reload content.json
        for content_inner_path, content in self.site.content_manager.contents.items():
            i += 1
            if i % 50 == 0:
                time.sleep(0.0001)  # Context switch to avoid gevent hangs
            if not os.path.isfile(self.getPath(content_inner_path)):  # Missing content.json file
                self.log.debug("[MISSING] %s" % content_inner_path)
                bad_files.append(content_inner_path)

            for file_relative_path in content.get("files", {}).keys():
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    self.log.debug("[MISSING] %s" % file_inner_path)
                    bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
                    if not ok:
                        err = "Invalid size"
                else:
                    try:
                        ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
                    except Exception, err:
                        ok = False

                if not ok:
                    self.log.debug("[INVALID] %s: %s" % (file_inner_path, err))
                    if add_changed or content.get("cert_user_id"):  # If updating own site only add changed user files
                        bad_files.append(file_inner_path)

            # Optional files
            optional_added = 0
            optional_removed = 0
            for file_relative_path in content.get("files_optional", {}).keys():
                file_node = content["files_optional"][file_relative_path]
                file_inner_path = helper.getDirname(content_inner_path) + file_relative_path  # Relative to site dir
                file_inner_path = file_inner_path.strip("/")  # Strip leading /
                file_path = self.getPath(file_inner_path)
                if not os.path.isfile(file_path):
                    if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
                        self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
                    if add_optional:
                        bad_files.append(file_inner_path)
                    continue

                if quick_check:
                    ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
                else:
                    try:
                        ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
                    except Exception, err:
                        ok = False

                if ok:
                    if not self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
                        self.site.content_manager.optionalDownloaded(file_inner_path, file_node["sha512"], file_node["size"])
                        optional_added += 1
                else:
                    if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
                        self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
                        optional_removed += 1
                    bad_files.append(file_inner_path)
                    self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)