コード例 #1
0
ファイル: Site.py プロジェクト: Emeraude/ZeroNet
    def __init__(self, address, allow_create=True, settings=None):
        self.address = re.sub("[^A-Za-z0-9]", "", address)  # Make sure its correct address
        self.address_short = "%s..%s" % (self.address[:6], self.address[-4:])  # Short address for logging
        self.log = logging.getLogger("Site:%s" % self.address_short)
        self.addEventListeners()

        self.content = None  # Load content.json
        self.peers = {}  # Key: ip:port, Value: Peer.Peer
        self.peer_blacklist = SiteManager.peer_blacklist  # Ignore this peers (eg. myself)
        self.time_announce = 0  # Last announce time to tracker
        self.last_tracker_id = random.randint(0, 10)  # Last announced tracker id
        self.worker_manager = WorkerManager(self)  # Handle site download from other peers
        self.bad_files = {}  # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
        self.content_updated = None  # Content.js update time
        self.notifications = []  # Pending notifications displayed once on page load [error|ok|info, message, timeout]
        self.page_requested = False  # Page viewed in browser
        self.websockets = []  # Active site websocket connections

        self.connection_server = None
        self.storage = SiteStorage(self, allow_create=allow_create)  # Save and load site files
        self.loadSettings(settings)  # Load settings from sites.json
        self.content_manager = ContentManager(self)
        self.content_manager.loadContents()  # Load content.json files
        if "main" in sys.modules and "file_server" in dir(sys.modules["main"]):  # Use global file server by default if possible
            self.connection_server = sys.modules["main"].file_server
        else:
            self.connection_server = None
        if not self.settings.get("auth_key"):  # To auth user in site (Obsolete, will be removed)
            self.settings["auth_key"] = CryptHash.random()
            self.log.debug("New auth key: %s" % self.settings["auth_key"])

        if not self.settings.get("wrapper_key"):  # To auth websocket permissions
            self.settings["wrapper_key"] = CryptHash.random()
            self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
コード例 #2
0
ファイル: Site.py プロジェクト: zhuomingliang/ZeroNet
    def __init__(self, address, allow_create=True, settings=None):
        self.address = re.sub("[^A-Za-z0-9]", "", address)  # Make sure its correct address
        self.address_short = "%s..%s" % (self.address[:6], self.address[-4:])  # Short address for logging
        self.log = logging.getLogger("Site:%s" % self.address_short)
        self.addEventListeners()

        self.content = None  # Load content.json
        self.peers = {}  # Key: ip:port, Value: Peer.Peer
        self.peer_blacklist = SiteManager.peer_blacklist  # Ignore this peers (eg. myself)
        self.time_announce = 0  # Last announce time to tracker
        self.last_tracker_id = random.randint(0, 10)  # Last announced tracker id
        self.worker_manager = WorkerManager(self)  # Handle site download from other peers
        self.bad_files = {}  # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
        self.content_updated = None  # Content.js update time
        self.notifications = []  # Pending notifications displayed once on page load [error|ok|info, message, timeout]
        self.page_requested = False  # Page viewed in browser
        self.websockets = []  # Active site websocket connections

        self.connection_server = None
        self.storage = SiteStorage(self, allow_create=allow_create)  # Save and load site files
        self.loadSettings(settings)  # Load settings from sites.json
        self.content_manager = ContentManager(self)
        self.content_manager.loadContents()  # Load content.json files
        if "main" in sys.modules and "file_server" in dir(sys.modules["main"]):  # Use global file server by default if possible
            self.connection_server = sys.modules["main"].file_server
        else:
            self.connection_server = None
        if not self.settings.get("auth_key"):  # To auth user in site (Obsolete, will be removed)
            self.settings["auth_key"] = CryptHash.random()
            self.log.debug("New auth key: %s" % self.settings["auth_key"])

        if not self.settings.get("wrapper_key"):  # To auth websocket permissions
            self.settings["wrapper_key"] = CryptHash.random()
            self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
コード例 #3
0
ファイル: BigfilePlugin.py プロジェクト: 6donote4/ZeroNet
    def hashBigfile(self,
                    read_func,
                    size,
                    piece_size=1024 * 1024,
                    file_out=None):
        self.site.settings["has_bigfile"] = True

        recv = 0
        try:
            piece_hash = CryptHash.sha512t()
            piece_hashes = []
            piece_recv = 0

            mt = merkletools.MerkleTools()
            mt.hash_function = CryptHash.sha512t

            part = ""
            for part in self.readFile(read_func, size):
                if file_out:
                    file_out.write(part)

                recv += len(part)
                piece_recv += len(part)
                piece_hash.update(part)
                if piece_recv >= piece_size:
                    piece_digest = piece_hash.digest()
                    piece_hashes.append(piece_digest)
                    mt.leaves.append(piece_digest)
                    piece_hash = CryptHash.sha512t()
                    piece_recv = 0

                    if len(piece_hashes) % 100 == 0 or recv == size:
                        self.log.info(
                            "- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" %
                            (float(recv) / size * 100, len(piece_hashes),
                             recv / 1024 / 1024, size / 1024 / 1024))
                        part = ""
            if len(part) > 0:
                piece_digest = piece_hash.digest()
                piece_hashes.append(piece_digest)
                mt.leaves.append(piece_digest)
        except Exception as err:
            raise err
        finally:
            if file_out:
                file_out.close()

        mt.make_tree()
        merkle_root = mt.get_merkle_root()
        if type(merkle_root) is bytes:  # Python <3.5
            merkle_root = merkle_root.decode()
        return merkle_root, piece_size, {"sha512_pieces": piece_hashes}
コード例 #4
0
 def verifyPiece(self, inner_path, pos, piece):
     piecemap = self.getPiecemap(inner_path)
     piece_i = pos / piecemap["piece_size"]
     if CryptHash.sha512sum(
             piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
         raise VerifyError("Invalid hash")
     return True
コード例 #5
0
ファイル: TestPeer.py プロジェクト: zzp0/ZeroNet
    def testHashfield(self, site):
        sample_hash = site.content_manager.contents["content.json"][
            "files_optional"].values()[0]["sha512"]

        site.storage.verifyFiles(
            quick_check=True)  # Find what optional files we have

        # Check if hashfield has any files
        assert site.content_manager.hashfield
        assert len(site.content_manager.hashfield) > 0

        # Check exsist hash
        assert site.content_manager.hashfield.getHashId(
            sample_hash) in site.content_manager.hashfield

        # Add new hash
        new_hash = CryptHash.sha512sum(StringIO("hello"))
        assert site.content_manager.hashfield.getHashId(
            new_hash) not in site.content_manager.hashfield
        assert site.content_manager.hashfield.appendHash(new_hash)
        assert not site.content_manager.hashfield.appendHash(
            new_hash)  # Don't add second time
        assert site.content_manager.hashfield.getHashId(
            new_hash) in site.content_manager.hashfield

        # Remove new hash
        assert site.content_manager.hashfield.removeHash(new_hash)
        assert site.content_manager.hashfield.getHashId(
            new_hash) not in site.content_manager.hashfield
コード例 #6
0
ファイル: ContentManager.py プロジェクト: xeddmc/ZeroNet
    def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
        files_node = {}
        files_optional_node = {}

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern, file_relative_path):
                ignored = True
            elif file_name.startswith("."):
                ignored = True
            elif optional_pattern and re.match(optional_pattern, file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, definied regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" + file_relative_path)
                sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
                else:
                    self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return files_node, files_optional_node
コード例 #7
0
    def testPackArchive(self, num_run=1, archive_type="gz"):
        """
        Test creating tar archive files
        """
        yield "x 100 x 5KB "
        from Crypt import CryptHash

        hash_valid_db = {
            "gz":
            "92caec5121a31709cbbc8c11b0939758e670b055bbbe84f9beb3e781dfde710f",
            "bz2":
            "b613f41e6ee947c8b9b589d3e8fa66f3e28f63be23f4faf015e2f01b5c0b032d",
            "xz":
            "ae43892581d770959c8d993daffab25fd74490b7cf9fafc7aaee746f69895bcb",
        }
        archive_path = '%s/test.tar.%s' % (config.data_dir, archive_type)
        for i in range(num_run):
            self.createArchiveFile(archive_path, archive_type=archive_type)
            yield "."

        archive_size = os.path.getsize(archive_path) / 1024
        yield "(Generated file size: %.2fkB)" % archive_size

        hash = CryptHash.sha512sum(
            open("%s/test.tar.%s" % (config.data_dir, archive_type), "rb"))
        valid = hash_valid_db[archive_type]
        assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)

        if os.path.isfile(archive_path):
            os.unlink(archive_path)
コード例 #8
0
ファイル: BigfilePlugin.py プロジェクト: zhilinwww/ZeroNet
    def actionBigfileUploadInit(self, to, inner_path, size):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers))
            return self.response(to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True)

        content_inner_path_dir = helper.getDirname(file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }
        return {
            "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
            "piece_size": piece_size,
            "inner_path": inner_path,
            "file_relative_path": file_relative_path
        }
コード例 #9
0
    def actionBigfileUploadInit(self, to, inner_path, size):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" %
                           (auth_address, valid_signers))
            return self.response(
                to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path,
                                                          new_file=True)

        content_inner_path_dir = helper.getDirname(
            file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }
        return {
            "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
            "piece_size": piece_size,
            "inner_path": inner_path,
            "file_relative_path": file_relative_path
        }
コード例 #10
0
	def __init__(self, address, allow_create=True, settings=None, public_key=None):
		if public_key:
			self.address = CryptArk.getAddress(public_key)
			self.publicKey = public_key
		else:
			self.address = address
			self.publicKey = SiteManager.getPublicKey(address)
		self.address_hash = hashlib.sha256(self.address).digest()
		self.address_short = "%s..%s" % (self.address[:6], self.address[-4:])  # Short address for logging
		self.log = logging.getLogger("Site:%s" % self.address_short)
		self.addEventListeners()

		self.content = None  # Load content.json
		self.peers = {}  # Key: ip:port, Value: Peer.Peer
		self.peers_recent = collections.deque(maxlen=100)
		self.peer_blacklist = SiteManager.peer_blacklist  # Ignore this peers (eg. myself)
		self.worker_manager = WorkerManager(self)  # Handle site download from other peers
		self.bad_files = {}  # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
		self.content_updated = None  # Content.js update time
		self.notifications = []  # Pending notifications displayed once on page load [error|ok|info, message, timeout]
		self.page_requested = False  # Page viewed in browser
		self.websockets = []  # Active site websocket connections

		self.connection_server = None
		self.loadSettings(settings)  # Load settings from sites.json
		self.storage = SiteStorage(self, allow_create=allow_create)  # Save and load site files
		self.content_manager = ContentManager(self)
		self.content_manager.loadContents()  # Load content.json files
		if "main" in sys.modules and "file_server" in dir(sys.modules["main"]):  # Use global file server by default if possible
			self.connection_server = sys.modules["main"].file_server
		else:
			self.log.debug("Creating connection server")   # remove
			self.connection_server = FileServer()

		self.announcer = SiteAnnouncer(self)  # Announce and get peer list from other nodes

		if not self.settings.get("auth_key"):  # To auth user in site (Obsolete, will be removed)
			self.settings["auth_key"] = CryptHash.random()
			self.log.debug("New auth key: %s" % self.settings["auth_key"])

		if not self.settings.get("wrapper_key"):  # To auth websocket permissions
			self.settings["wrapper_key"] = CryptHash.random()
			self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])

		if not self.settings.get("ajax_key"):  # To auth websocket permissions
			self.settings["ajax_key"] = CryptHash.random()
			self.log.debug("New ajax key: %s" % self.settings["ajax_key"])
コード例 #11
0
ファイル: BigfilePlugin.py プロジェクト: zhilinwww/ZeroNet
    def hashBigfile(self, file_in, size, piece_size=1024 * 1024, file_out=None):
        self.site.settings["has_bigfile"] = True

        recv = 0
        try:
            piece_hash = CryptHash.sha512t()
            piece_hashes = []
            piece_recv = 0

            mt = merkletools.MerkleTools()
            mt.hash_function = CryptHash.sha512t

            part = ""
            for part in self.readFile(file_in, size):
                if file_out:
                    file_out.write(part)

                recv += len(part)
                piece_recv += len(part)
                piece_hash.update(part)
                if piece_recv >= piece_size:
                    piece_digest = piece_hash.digest()
                    piece_hashes.append(piece_digest)
                    mt.leaves.append(piece_digest)
                    piece_hash = CryptHash.sha512t()
                    piece_recv = 0

                    if len(piece_hashes) % 100 == 0 or recv == size:
                        self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % (
                            float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024
                        ))
                        part = ""
            if len(part) > 0:
                piece_digest = piece_hash.digest()
                piece_hashes.append(piece_digest)
                mt.leaves.append(piece_digest)
        except Exception as err:
            raise err
        finally:
            if file_out:
                file_out.close()

        mt.make_tree()
        return mt.get_merkle_root(), piece_size, {
            "sha512_pieces": piece_hashes
        }
コード例 #12
0
ファイル: BigfilePlugin.py プロジェクト: 6donote4/ZeroNet
    def actionBigfileUploadInit(self, to, inner_path, size, protocol="xhr"):
        valid_signers = self.site.content_manager.getValidSigners(inner_path)
        auth_address = self.user.getAuthAddress(self.site.address)
        if not self.site.settings["own"] and auth_address not in valid_signers:
            self.log.error("FileWrite forbidden %s not in valid_signers %s" %
                           (auth_address, valid_signers))
            return self.response(
                to, {"error": "Forbidden, you can only modify your own files"})

        nonce = CryptHash.random()
        piece_size = 1024 * 1024
        inner_path = self.site.content_manager.sanitizePath(inner_path)
        file_info = self.site.content_manager.getFileInfo(inner_path,
                                                          new_file=True)

        content_inner_path_dir = helper.getDirname(
            file_info["content_inner_path"])
        file_relative_path = inner_path[len(content_inner_path_dir):]

        upload_nonces[nonce] = {
            "added": time.time(),
            "site": self.site,
            "inner_path": inner_path,
            "websocket_client": self,
            "size": size,
            "piece_size": piece_size,
            "piecemap": inner_path + ".piecemap.msgpack"
        }

        if protocol == "xhr":
            return {
                "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce,
                "piece_size": piece_size,
                "inner_path": inner_path,
                "file_relative_path": file_relative_path
            }
        elif protocol == "websocket":
            server_url = self.request.getWsServerUrl()
            if server_url:
                proto, host = server_url.split("://")
                origin = proto.replace("http", "ws") + "://" + host
            else:
                origin = "{origin}"
            return {
                "url":
                origin +
                "/ZeroNet-Internal/BigfileUploadWebsocket?upload_nonce=" +
                nonce,
                "piece_size":
                piece_size,
                "inner_path":
                inner_path,
                "file_relative_path":
                file_relative_path
            }
        else:
            return {"error": "Unknown protocol"}
コード例 #13
0
    def hashFiles(self,
                  dir_inner_path,
                  ignore_pattern=None,
                  optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path):
            ignored = True
            self.log.error(
                "- [ERROR] Only ascii encoded directories allowed: %s" %
                dir_inner_path)

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern,
                                             file_relative_path):
                ignored = True
            elif file_name.startswith(".") or file_name.endswith(
                    "-old") or file_name.endswith("-new"):
                ignored = True
            elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path):
                ignored = True
                self.log.error(
                    "- [ERROR] Only ascii encoded filenames allowed: %s" %
                    file_relative_path)
            elif optional_pattern and re.match(optional_pattern,
                                               file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" +
                                                      file_relative_path)
                sha512sum = CryptHash.sha512sum(
                    file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
                    self.hashfield.appendHash(sha512sum)
                else:
                    self.log.info("- %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
        return files_node, files_optional_node
コード例 #14
0
    def __init__(self, ip=None, port=None, request_handler=None):
        if not ip:
            if config.fileserver_ip_type == "ipv6":
                ip = "::1"
            else:
                ip = "127.0.0.1"
            port = 15441
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = {}
        self.peer_blacklist = SiteManager.peer_blacklist

        self.tor_manager = TorManager(self.ip, self.port)
        self.connections = []  # Connections
        self.whitelist = config.ip_local  # No flood protection on this ips
        self.ip_incoming = {
        }  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_ips = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip
        self.has_internet = True  # Internet outage detection

        self.stream_server = None
        self.stream_server_proxy = None
        self.running = False
        self.stopping = False
        self.thread_checker = None

        self.stat_recv = defaultdict(lambda: defaultdict(int))
        self.stat_sent = defaultdict(lambda: defaultdict(int))
        self.bytes_recv = 0
        self.bytes_sent = 0
        self.num_recv = 0
        self.num_sent = 0

        self.num_incoming = 0
        self.num_outgoing = 0
        self.had_external_incoming = False

        self.timecorrection = 0.0
        self.pool = Pool(500)  # do not accept more than 500 connections

        # Bittorrent style peerid
        self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64")

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`"
                % str(msgpack.version))
            sys.exit(0)

        if request_handler:
            self.handleRequest = request_handler
コード例 #15
0
ファイル: BigfilePlugin.py プロジェクト: filips123/ZeroNet
    def verifyPiece(self, inner_path, pos, piece):
        try:
            piecemap = self.getPiecemap(inner_path)
        except OSError as err:
            raise VerifyError("Unable to download piecemap: %s" % err)

        piece_i = int(pos / piecemap["piece_size"])
        if CryptHash.sha512sum(
                piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
            raise VerifyError("Invalid hash")
        return True
コード例 #16
0
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        back = {}
        file_inner_path = dir_inner_path + "/" + file_relative_path

        file_path = self.site.storage.getPath(file_inner_path)
        file_size = os.path.getsize(file_path)
        sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
        if optional and not self.hashfield.hasHash(sha512sum):
            self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True)

        back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return back
コード例 #17
0
ファイル: ContentManager.py プロジェクト: binerf/zeronet_tor
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        back = {}
        file_inner_path = dir_inner_path + "/" + file_relative_path

        file_path = self.site.storage.getPath(file_inner_path)
        file_size = os.path.getsize(file_path)
        sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
        if optional and not self.hashfield.hasHash(sha512sum):
            self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True)

        back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return back
コード例 #18
0
    def __init__(self, ip=None, port=None, request_handler=None):
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = None
        self.peer_blacklist = SiteManager.peer_blacklist

        if config.tor != "disabled":
            self.tor_manager = TorManager(self.ip, self.port)
        else:
            self.tor_manager = None

        self.connections = []  # Connections
        self.whitelist = config.ip_local  # No flood protection on this ips
        self.ip_incoming = {
        }  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_ips = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip
        self.has_internet = True  # Internet outage detection

        self.stream_server = None
        self.running = True
        self.thread_checker = gevent.spawn(self.checkConnections)

        self.stat_recv = defaultdict(lambda: defaultdict(int))
        self.stat_sent = defaultdict(lambda: defaultdict(int))
        self.bytes_recv = 0
        self.bytes_sent = 0
        self.num_recv = 0
        self.num_sent = 0

        # Bittorrent style peerid
        self.peer_id = "-ZN0%s-%s" % (config.version.replace(
            ".", ""), CryptHash.random(12, "base64"))

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`"
                % str(msgpack.version))
            sys.exit(0)

        if port:  # Listen server on a port
            self.pool = Pool(500)  # do not accept more than 500 connections
            self.stream_server = StreamServer(
                (ip.replace("*", "0.0.0.0"), port),
                self.handleIncomingConnection,
                spawn=self.pool,
                backlog=100)
            if request_handler:
                self.handleRequest = request_handler
コード例 #19
0
ファイル: BenchmarkPlugin.py プロジェクト: filips123/ZeroNet
    def actionBenchmark(self):
        global benchmark_key
        script_nonce = self.getScriptNonce()
        if not benchmark_key:
            benchmark_key = CryptHash.random(encoding="base64")
        self.sendHeader(script_nonce=script_nonce)

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            return

        data = self.render(plugin_dir + "/media/benchmark.html",
                           script_nonce=script_nonce,
                           benchmark_key=benchmark_key,
                           filter=re.sub("[^A-Za-z0-9]", "",
                                         self.get.get("filter", "")))
        yield data
コード例 #20
0
ファイル: ConnectionServer.py プロジェクト: zhilinwww/ZeroNet
    def __init__(self, ip=None, port=None, request_handler=None):
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = None

        if config.tor != "disabled":
            self.tor_manager = TorManager(self.ip, self.port)
        else:
            self.tor_manager = None

        self.connections = []  # Connections
        self.whitelist = config.ip_local  # No flood protection on this ips
        self.ip_incoming = {}  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_peer_ids = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip
        self.has_internet = True  # Internet outage detection

        self.stream_server = None
        self.running = True
        self.thread_checker = gevent.spawn(self.checkConnections)

        self.stat_recv = defaultdict(lambda: defaultdict(int))
        self.stat_sent = defaultdict(lambda: defaultdict(int))
        self.bytes_recv = 0
        self.bytes_sent = 0

        # Bittorrent style peerid
        self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64"))

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack-python --upgrade`" %
                str(msgpack.version)
            )
            sys.exit(0)

        if port:  # Listen server on a port
            self.pool = Pool(500)  # do not accept more than 500 connections
            self.stream_server = StreamServer(
                (ip.replace("*", "0.0.0.0"), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
            )
            if request_handler:
                self.handleRequest = request_handler
コード例 #21
0
    def testPackZip(self, num_run=1):
        """
        Test zip file creating
        """
        yield "x 100 x 5KB "
        from Crypt import CryptHash
        zip_path = '%s/test.zip' % config.data_dir
        for i in range(num_run):
            self.createZipFile(zip_path)
            yield "."

        archive_size = os.path.getsize(zip_path) / 1024
        yield "(Generated file size: %.2fkB)" % archive_size

        hash = CryptHash.sha512sum(open(zip_path, "rb"))
        valid = "cb32fb43783a1c06a2170a6bc5bb228a032b67ff7a1fd7a5efb9b467b400f553"
        assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)
        os.unlink(zip_path)
コード例 #22
0
    def hashFiles(self,
                  dir_inner_path,
                  ignore_pattern=None,
                  optional_pattern=None):
        files_node = {}
        files_optional_node = {}

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern,
                                             file_relative_path):
                ignored = True
            elif file_name.startswith("."):
                ignored = True
            elif optional_pattern and re.match(optional_pattern,
                                               file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, definied regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_path = self.site.storage.getPath(dir_inner_path + "/" +
                                                      file_relative_path)
                sha512sum = CryptHash.sha512sum(
                    file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_optional_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
                else:
                    self.log.info("- %s (SHA512: %s)" %
                                  (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
        return files_node, files_optional_node
コード例 #23
0
ファイル: TestPeer.py プロジェクト: xeddmc/ZeroNet
    def testHashfield(self, site):
        sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"]
        site.storage.verifyFiles(quick_check=True)  # Find what optional files we have

        # Check if hashfield has any files
        assert len(site.content_manager.hashfield) > 0

        # Check exsist hash
        assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield

        # Add new hash
        new_hash = CryptHash.sha512sum(StringIO("hello"))
        assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
        assert site.content_manager.hashfield.appendHash(new_hash)
        assert not site.content_manager.hashfield.appendHash(new_hash)  # Don't add second time
        assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield

        # Remove new hash
        assert site.content_manager.hashfield.removeHash(new_hash)
        assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
コード例 #24
0
ファイル: ContentManager.py プロジェクト: kustomzone/ZeroNet
    def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
        files_node = {}
        files_optional_node = {}
        if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path):
            ignored = True
            self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path)

        for file_relative_path in self.site.storage.list(dir_inner_path):
            file_name = helper.getFilename(file_relative_path)

            ignored = optional = False
            if file_name == "content.json":
                ignored = True
            elif ignore_pattern and re.match(ignore_pattern, file_relative_path):
                ignored = True
            elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"):
                ignored = True
            elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path):
                ignored = True
                self.log.error("- [ERROR] Only ascii encoded filenames allowed: %s" % file_relative_path)
            elif optional_pattern and re.match(optional_pattern, file_relative_path):
                optional = True

            if ignored:  # Ignore content.json, defined regexp and files starting with .
                self.log.info("- [SKIPPED] %s" % file_relative_path)
            else:
                file_inner_path = dir_inner_path + "/" + file_relative_path
                file_path = self.site.storage.getPath(file_inner_path)
                sha512sum = CryptHash.sha512sum(file_path)  # Calculate sha512 sum of file
                if optional:
                    self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    file_size = os.path.getsize(file_path)
                    files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": file_size}
                    if not self.hashfield.hasHash(sha512sum):
                        self.optionalDownloaded(file_inner_path, sha512sum, file_size, own=True)
                else:
                    self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum))
                    files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
        return files_node, files_optional_node
コード例 #25
0
    def __init__(self, ip=None, port=None, request_handler=None):
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = None

        self.connections = []  # Connections
        self.ip_incoming = {}  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_peer_ids = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip
        self.peer_ids = {}  # Connections by peer_ids

        self.running = True
        self.thread_checker = gevent.spawn(self.checkConnections)

        self.bytes_recv = 0
        self.bytes_sent = 0

        # Bittorrent style peerid
        self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64"))

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`"
                % str(msgpack.version)
            )
            sys.exit(0)

        if port:  # Listen server on a port
            self.pool = Pool(1000)  # do not accept more than 1000 connections
            self.stream_server = StreamServer(
                (ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
            )
            if request_handler:
                self.handleRequest = request_handler

        CryptConnection.manager.loadCerts()
コード例 #26
0
ファイル: ConnectionServer.py プロジェクト: 0-vortex/ZeroNet
    def __init__(self, ip=None, port=None, request_handler=None):
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = None
        self.peer_blacklist = SiteManager.peer_blacklist

        self.tor_manager = TorManager(self.ip, self.port)
        self.connections = []  # Connections
        self.whitelist = config.ip_local  # No flood protection on this ips
        self.ip_incoming = {}  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_ips = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip
        self.has_internet = True  # Internet outage detection

        self.stream_server = None
        self.running = False

        self.stat_recv = defaultdict(lambda: defaultdict(int))
        self.stat_sent = defaultdict(lambda: defaultdict(int))
        self.bytes_recv = 0
        self.bytes_sent = 0
        self.num_recv = 0
        self.num_sent = 0

        # Bittorrent style peerid
        self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64")

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" %
                str(msgpack.version)
            )
            sys.exit(0)

        if request_handler:
            self.handleRequest = request_handler
コード例 #27
0
    def __init__(self, ip=None, port=None, request_handler=None):
        self.ip = ip
        self.port = port
        self.last_connection_id = 1  # Connection id incrementer
        self.log = logging.getLogger("ConnServer")
        self.port_opened = None

        self.connections = []  # Connections
        self.ip_incoming = {
        }  # Incoming connections from ip in the last minute to avoid connection flood
        self.broken_ssl_peer_ids = {}  # Peerids of broken ssl connections
        self.ips = {}  # Connection by ip

        self.running = True
        self.thread_checker = gevent.spawn(self.checkConnections)

        self.bytes_recv = 0
        self.bytes_sent = 0

        # Bittorrent style peerid
        self.peer_id = "-ZN0%s-%s" % (config.version.replace(
            ".", ""), CryptHash.random(12, "base64"))

        # Check msgpack version
        if msgpack.version[0] == 0 and msgpack.version[1] < 4:
            self.log.error(
                "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`"
                % str(msgpack.version))
            sys.exit(0)

        if port:  # Listen server on a port
            self.pool = Pool(1000)  # do not accept more than 1000 connections
            self.stream_server = StreamServer((ip.replace("*", ""), port),
                                              self.handleIncomingConnection,
                                              spawn=self.pool,
                                              backlog=100)
            if request_handler:
                self.handleRequest = request_handler
コード例 #28
0
ファイル: UiRequest.py プロジェクト: videofindersTV/ZeroNet
 def getWrapperNonce(self):
     wrapper_nonce = CryptHash.random()
     self.server.wrapper_nonces.append(wrapper_nonce)
     return wrapper_nonce
コード例 #29
0
ファイル: ContentManager.py プロジェクト: Mi-616/ZeroNet
                    else:
                        return True
                else:  # Old style signing
                    if CryptBitcoin.verify(sign_content, self.site.address, sign):
                        return True
                    else:
                        raise VerifyError("Invalid old-style sign")

            except Exception, err:
                self.log.warning("Verify sign error: %s" % Debug.formatException(err))
                raise err

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if CryptHash.sha512sum(file) != file_info["sha512"]:
                    raise VerifyError("Invalid hash")

                if file_info.get("size", 0) != file.tell():
                    raise VerifyError(
                        "File size does not match %s <> %s" %
                        (inner_path, file.tell(), file_info.get("size", 0))
                    )

                return True

            else:  # File not in content.json
                raise VerifyError("File not in content.json")

    def optionalDownloaded(self, inner_path, hash, size=None, own=False):
        if size is None:
コード例 #30
0
ファイル: StatsPlugin.py プロジェクト: TamtamHero/ZeroNet
			for i in range(10):
				yield "."
				ok = CryptBitcoin.verify(data, address, sign)
			assert ok, "does not verify from %s" % address
		CryptBitcoin.opensslVerify = opensslVerify_bk


		yield "<br>CryptHash:<br>"
		from Crypt import CryptHash
		from cStringIO import StringIO

		data = StringIO("Hello"*1024*1024) #5m
		with benchmark("sha512 x 10 000", 1):
			for i in range(10):
				for y in range(10000):
					hash = CryptHash.sha512sum(data)
				yield "."
			valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
			assert hash == valid, "%s != %s" % (hash, valid)


		yield "<br>Db:<br>"
		from Db import Db

		schema = {
			"db_name": "TestDb",
			"db_file": "%s/benchmark.db" % config.data_dir,
			"maps": {
				".*": {
					"to_table": {
						"test": "test"
コード例 #31
0
ファイル: ContentManager.py プロジェクト: uchiyou/ZeroNet
                else:  # Old style signing
                    if CryptBitcoin.verify(sign_content, self.site.address,
                                           sign):
                        return True
                    else:
                        raise VerifyError("Invalid old-style sign")

            except Exception, err:
                self.log.warning("Verify sign error: %s" %
                                 Debug.formatException(err))
                raise err

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if CryptHash.sha512sum(file) != file_info["sha512"]:
                    raise VerifyError("Invalid hash")

                if file_info.get("size", 0) != file.tell():
                    raise VerifyError(
                        "File size does not match %s <> %s" %
                        (inner_path, file.tell(), file_info.get("size", 0)))

                return True

            else:  # File not in content.json
                raise VerifyError("File not in content.json")

    def optionalDownloaded(self, inner_path, hash, size=None, own=False):
        if size is None:
            size = self.site.storage.getSize(inner_path)
コード例 #32
0
 def getWrapperNonce(self):
     wrapper_nonce = CryptHash.random()
     self.server.wrapper_nonces.append(wrapper_nonce)
     return wrapper_nonce
コード例 #33
0
                    else:
                        return self.verifyContent(inner_path, new_content)
                else:  # Old style signing
                    if CryptBitcoin.verify(sign_content, self.site.address, sign):
                        return self.verifyContent(inner_path, new_content)
                    else:
                        raise VerifyError("Invalid old-style sign")

            except Exception, err:
                self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
                raise err

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if CryptHash.sha512sum(file) != file_info.get("sha512", ""):
                    raise VerifyError("Invalid hash")

                if file_info.get("size", 0) != file.tell():
                    raise VerifyError(
                        "File size does not match %s <> %s" %
                        (inner_path, file.tell(), file_info.get("size", 0))
                    )

                return True

            else:  # File not in content.json
                raise VerifyError("File not in content.json")

    def optionalDelete(self, inner_path):
        self.site.storage.delete(inner_path)
コード例 #34
0
    def sign(self,
             inner_path="content.json",
             privatekey=None,
             filewrite=True,
             update_changed_files=False):
        content = self.contents.get(inner_path)
        if not content:  # Content not exits yet, load default one
            self.log.info("File %s not exits yet, loading default values..." %
                          inner_path)
            content = {"files": {}, "signs": {}}  # Default content.json
            if inner_path == "content.json":  # Its the root content.json, add some more fields
                content["title"] = "%s - ZeroNet_" % self.site.address
                content["description"] = ""
                content["signs_required"] = 1
                content["ignore"] = ""

        directory = self.toDir(self.site.storage.getPath(inner_path))
        self.log.info("Opening site data directory: %s..." % directory)

        hashed_files = {}
        changed_files = [inner_path]
        for root, dirs, files in os.walk(directory):
            for file_name in files:
                file_path = self.site.storage.getPath(
                    "%s/%s" % (root.strip("/"), file_name))
                file_inner_path = re.sub(re.escape(directory), "", file_path)

                if file_name == "content.json" or (
                        content.get("ignore")
                        and re.match(content["ignore"], file_inner_path)
                ) or file_name.startswith(
                        "."
                ):  # Ignore content.json, definied regexp and files starting with .
                    self.log.info("- [SKIPPED] %s" % file_inner_path)
                else:
                    sha512sum = CryptHash.sha512sum(
                        file_path)  # Calculate sha512 sum of file
                    self.log.info("- %s (SHA512: %s)" %
                                  (file_inner_path, sha512sum))
                    hashed_files[file_inner_path] = {
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }
                    if file_inner_path in content["files"].keys(
                    ) and hashed_files[file_inner_path]["sha512"] != content[
                            "files"][file_inner_path].get("sha512"):
                        changed_files.append(file_path)

        self.log.debug("Changed files: %s" % changed_files)
        if update_changed_files:
            for file_path in changed_files:
                self.site.storage.onUpdated(file_path)

        # Generate new content.json
        self.log.info("Adding timestamp and sha512sums to new content.json...")

        new_content = content.copy()  # Create a copy of current content.json
        new_content["files"] = hashed_files  # Add files sha512 hash
        new_content["modified"] = time.time()  # Add timestamp
        if inner_path == "content.json":
            new_content["address"] = self.site.address
            new_content["zeronet_version"] = config.version
            new_content["signs_required"] = content.get("signs_required", 1)

        from Crypt import CryptBitcoin
        self.log.info("Verifying private key...")
        privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
        valid_signers = self.getValidSigners(inner_path)
        if privatekey_address not in valid_signers:
            return self.log.error(
                "Private key invalid! Valid signers: %s, Private key address: %s"
                % (valid_signers, privatekey_address))
        self.log.info("Correct %s in valid signers: %s" %
                      (privatekey_address, valid_signers))

        if inner_path == "content.json" and privatekey_address == self.site.address:  # If signing using the root key sign the valid signers
            new_content["signers_sign"] = CryptBitcoin.sign(
                "%s:%s" %
                (new_content["signs_required"], ",".join(valid_signers)),
                privatekey)
            if not new_content["signers_sign"]:
                self.log.info("Old style address, signers_sign is none")

        self.log.info("Signing %s..." % inner_path)

        if "signs" in new_content:
            del (new_content["signs"])  # Delete old signs
        if "sign" in new_content:
            del (new_content["sign"]
                 )  # Delete old sign (backward compatibility)

        sign_content = json.dumps(new_content, sort_keys=True)
        sign = CryptBitcoin.sign(sign_content, privatekey)
        #new_content["signs"] = content.get("signs", {}) # TODO: Multisig
        if sign:  # If signing is successful (not an old address)
            new_content["signs"] = {}
            new_content["signs"][privatekey_address] = sign

        if inner_path == "content.json":  # To root content.json add old format sign for backward compatibility
            oldsign_content = json.dumps(new_content, sort_keys=True)
            new_content["sign"] = CryptBitcoin.signOld(oldsign_content,
                                                       privatekey)

        if not self.validContent(inner_path, new_content):
            self.log.error("Sign failed: Invalid content")
            return False

        if filewrite:
            self.log.info("Saving to %s..." % inner_path)
            json.dump(new_content,
                      open(self.site.storage.getPath(inner_path), "w"),
                      indent=2,
                      sort_keys=True)

        self.log.info("File %s signed!" % inner_path)

        if filewrite:  # Written to file
            return True
        else:  # Return the new content
            return new_content
コード例 #35
0
ファイル: UiRequest.py プロジェクト: 52M/ZeroNet
 def getAddNonce(self):
     add_nonce = CryptHash.random()
     self.server.add_nonces.append(add_nonce)
     return add_nonce
コード例 #36
0
ファイル: StatsPlugin.py プロジェクト: Shankar7/Zero-Net
class UiRequestPlugin(object):
    def formatTableRow(self, row, class_name=""):
        back = []
        for format, val in row:
            if val is None:
                formatted = "n/a"
            elif format == "since":
                if val:
                    formatted = "%.0f" % (time.time() - val)
                else:
                    formatted = "n/a"
            else:
                formatted = format % val
            back.append("<td>%s</td>" % formatted)
        return "<tr class='%s'>%s</tr>" % (class_name, "".join(back))

    def getObjSize(self, obj, hpy=None):
        if hpy:
            return float(hpy.iso(obj).domisize) / 1024
        else:
            return 0

    # /Stats entry point
    def actionStats(self):
        import gc
        import sys
        from Ui import UiRequest
        from Db import Db
        from Crypt import CryptConnection

        hpy = None
        if self.get.get("size") == "1":  # Calc obj size
            try:
                import guppy
                hpy = guppy.hpy()
            except:
                pass
        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        s = time.time()
        main = sys.modules["main"]

        # Style
        yield """
        <style>
         * { font-family: monospace }
         table td, table th { text-align: right; padding: 0px 10px }
         .connections td { white-space: nowrap }
         .serving-False { opacity: 0.3 }
        </style>
        """

        # Memory
        try:
            yield "rev%s | " % config.rev
            yield "%s | " % config.ip_external
            yield "Opened: %s | " % main.file_server.port_opened
            yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
            yield "In: %.2fMB, Out: %.2fMB  | " % (
                float(main.file_server.bytes_recv) / 1024 / 1024,
                float(main.file_server.bytes_sent) / 1024 / 1024)
            yield "Peerid: %s  | " % main.file_server.peer_id
            import psutil
            process = psutil.Process(os.getpid())
            mem = process.get_memory_info()[0] / float(2**20)
            yield "Mem: %.2fMB | " % mem
            yield "Threads: %s | " % len(process.threads())
            yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times()
            yield "Files: %s | " % len(process.open_files())
            yield "Sockets: %s | " % len(process.connections())
            yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>"
        except Exception:
            pass
        yield "<br>"

        # Connections
        yield "<b>Connections</b> (%s, total made: %s):<br>" % (len(
            main.file_server.connections), main.file_server.last_connection_id)
        yield "<table class='connections'><tr> <th>id</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>"
        yield "<th>buff</th> <th>bad</th> <th>idle</th> <th>open</th> <th>delay</th> <th>cpu</th> <th>out</th> <th>in</th> <th>last sent</th>"
        yield "<th>wait</th> <th>version</th> <th>sites</th> </tr>"
        for connection in main.file_server.connections:
            if "cipher" in dir(connection.sock):
                cipher = connection.sock.cipher()[0]
            else:
                cipher = connection.crypt
            yield self.formatTableRow([
                ("%3d", connection.id), ("%s", connection.type),
                ("%s:%s", (connection.ip, connection.port)),
                ("%s", connection.handshake.get("port_opened")),
                ("<span title='%s'>%s</span>", (connection.crypt, cipher)),
                ("%6.3f", connection.last_ping_delay),
                ("%s", connection.incomplete_buff_recv),
                ("%s", connection.bad_actions),
                ("since",
                 max(connection.last_send_time, connection.last_recv_time)),
                ("since", connection.start_time),
                ("%.3f",
                 connection.last_sent_time - connection.last_send_time),
                ("%.3f", connection.cpu_time),
                ("%.0fkB", connection.bytes_sent / 1024),
                ("%.0fkB", connection.bytes_recv / 1024),
                ("%s", connection.last_cmd),
                ("%s", connection.waiting_requests.keys()),
                ("%s r%s", (connection.handshake.get("version"),
                            connection.handshake.get("rev", "?"))),
                ("%s", connection.sites)
            ])
        yield "</table>"

        # Tor hidden services
        yield "<br><br><b>Tor hidden services (status: %s):</b><br>" % main.file_server.tor_manager.status
        for site_address, onion in main.file_server.tor_manager.site_onions.items(
        ):
            yield "- %-34s: %s<br>" % (site_address, onion)

        # Db
        yield "<br><br><b>Db</b>:<br>"
        for db in sys.modules["Db.Db"].opened_dbs:
            yield "- %.3fs: %s<br>" % (time.time() - db.last_query_time,
                                       db.db_path.encode("utf8"))

        # Sites
        yield "<br><br><b>Sites</b>:"
        yield "<table>"
        yield "<tr><th>address</th> <th>connected</th> <th title='connected/good/total'>peers</th> <th>content.json</th> <th>out</th> <th>in</th>  </tr>"
        for site in sorted(self.server.sites.values(),
                           lambda a, b: cmp(a.address, b.address)):
            yield self.formatTableRow([
                ("""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""",
                 (site.address, site.address)),
                ("%s", [
                    peer.connection.id for peer in site.peers.values()
                    if peer.connection and peer.connection.connected
                ]),
                ("%s/%s/%s", (len([
                    peer for peer in site.peers.values()
                    if peer.connection and peer.connection.connected
                ]), len(site.getConnectablePeers(100)), len(site.peers))),
                ("%s (loaded: %s)",
                 (len(site.content_manager.contents),
                  len([
                      key for key, val in dict(
                          site.content_manager.contents).iteritems() if val
                  ]))),
                ("%.0fkB", site.settings.get("bytes_sent", 0) / 1024),
                ("%.0fkB", site.settings.get("bytes_recv", 0) / 1024),
            ], "serving-%s" % site.settings["serving"])
            yield "<tr><td id='peers_%s' style='display: none; white-space: pre' colspan=6>" % site.address
            for key, peer in site.peers.items():
                if peer.time_found:
                    time_found = int(time.time() - peer.time_found) / 60
                else:
                    time_found = "--"
                if peer.connection:
                    connection_id = peer.connection.id
                else:
                    connection_id = None
                if site.content_manager.hashfield:
                    yield "Optional files: %4s " % len(peer.hashfield)
                time_added = (time.time() - peer.time_added) / (60 * 60 * 24)
                yield "(#%4s, err: %s, found: %3s min, add: %.1f day) %30s -<br>" % (
                    connection_id, peer.connection_error, time_found,
                    time_added, key)
            yield "<br></td></tr>"
        yield "</table>"

        # No more if not in debug mode
        if not config.debug:
            raise StopIteration

        # Object types

        obj_count = {}
        for obj in gc.get_objects():
            obj_type = str(type(obj))
            if obj_type not in obj_count:
                obj_count[obj_type] = [0, 0]
            obj_count[obj_type][0] += 1  # Count
            obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024  # Size

        yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
            len(obj_count), sum([stat[0] for stat in obj_count.values()]),
            sum([stat[1] for stat in obj_count.values()]))

        for obj, stat in sorted(obj_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True):  # Sorted by count
            yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (
                stat[1], stat[0], obj, cgi.escape(obj))

        # Classes

        class_count = {}
        for obj in gc.get_objects():
            obj_type = str(type(obj))
            if obj_type != "<type 'instance'>":
                continue
            class_name = obj.__class__.__name__
            if class_name not in class_count:
                class_count[class_name] = [0, 0]
            class_count[class_name][0] += 1  # Count
            class_count[class_name][1] += float(
                sys.getsizeof(obj)) / 1024  # Size

        yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
            len(class_count), sum([stat[0] for stat in class_count.values()]),
            sum([stat[1] for stat in class_count.values()]))

        for obj, stat in sorted(class_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True):  # Sorted by count
            yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (
                stat[1], stat[0], obj, cgi.escape(obj))

        from greenlet import greenlet
        objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
        yield "<br>Greenlets (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj).encode("utf8")))

        from Worker import Worker
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
        yield "<br>Workers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Connection import Connection
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
        yield "<br>Connections (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from socket import socket
        objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)]
        yield "<br>Sockets (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from msgpack import Unpacker
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)]
        yield "<br>Msgpack unpacker (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Site import Site
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
        yield "<br>Sites (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        objs = [
            obj for obj in gc.get_objects()
            if isinstance(obj, self.server.log.__class__)
        ]
        yield "<br>Loggers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj.name)))

        objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
        yield "<br>UiRequests (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Peer import Peer
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
        yield "<br>Peers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        objs = [(key, val) for key, val in sys.modules.iteritems()
                if val is not None]
        objs.sort()
        yield "<br>Modules (%s):<br>" % len(objs)
        for module_name, module in objs:
            yield " - %.3fkb: %s %s<br>" % (self.getObjSize(
                module, hpy), module_name, cgi.escape(repr(module)))

        gc.collect()  # Implicit grabage collection
        yield "Done in %.1f" % (time.time() - s)

    def actionDumpobj(self):

        import gc
        import sys

        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        # No more if not in debug mode
        if not config.debug:
            yield "Not in debug mode"
            raise StopIteration

        class_filter = self.get.get("class")

        yield """
        <style>
         * { font-family: monospace; white-space: pre }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        objs = gc.get_objects()
        for obj in objs:
            obj_type = str(type(obj))
            if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter:
                continue
            yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024,
                                     cgi.escape(str(obj)))
            for attr in dir(obj):
                yield "- %s: %s<br>" % (attr,
                                        cgi.escape(str(getattr(obj, attr))))
            yield "<br>"

        gc.collect()  # Implicit grabage collection

    def actionListobj(self):

        import gc
        import sys

        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        # No more if not in debug mode
        if not config.debug:
            yield "Not in debug mode"
            raise StopIteration

        type_filter = self.get.get("type")

        yield """
        <style>
         * { font-family: monospace; white-space: pre }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Listing all %s objects in memory...<br>" % cgi.escape(
            type_filter)

        ref_count = {}
        objs = gc.get_objects()
        for obj in objs:
            obj_type = str(type(obj))
            if obj_type != type_filter:
                continue
            refs = [
                ref for ref in gc.get_referrers(obj) if
                hasattr(ref, "__class__") and ref.__class__.__name__ not in [
                    "list", "dict", "function", "type", "frame", "WeakSet",
                    "tuple"
                ]
            ]
            if not refs:
                continue
            try:
                yield "%.1fkb <span title=\"%s\">%s</span>... " % (
                    float(sys.getsizeof(obj)) / 1024, cgi.escape(
                        str(obj)), cgi.escape(str(obj)[0:100].ljust(100)))
            except:
                continue
            for ref in refs:
                yield " ["
                if "object at" in str(ref) or len(str(ref)) > 100:
                    yield str(ref.__class__.__name__)
                else:
                    yield str(ref.__class__.__name__) + ":" + cgi.escape(
                        str(ref))
                yield "] "
                ref_type = ref.__class__.__name__
                if ref_type not in ref_count:
                    ref_count[ref_type] = [0, 0]
                ref_count[ref_type][0] += 1  # Count
                ref_count[ref_type][1] += float(
                    sys.getsizeof(obj)) / 1024  # Size
            yield "<br>"

        yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (
            len(ref_count), sum([stat[1] for stat in ref_count.values()]))

        for obj, stat in sorted(ref_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True)[0:30]:  # Sorted by count
            yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0],
                                               cgi.escape(str(obj)))

        gc.collect()  # Implicit grabage collection

    def actionBenchmark(self):
        import sys
        import gc
        from contextlib import contextmanager

        output = self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        @contextmanager
        def benchmark(name, standard):
            s = time.time()
            output("- %s" % name)
            try:
                yield 1
            except Exception, err:
                output("<br><b>! Error: %s</b><br>" % err)
            taken = time.time() - s
            if taken > 0:
                multipler = standard / taken
            else:
                multipler = 99
            if multipler < 0.3:
                speed = "Sloooow"
            elif multipler < 0.5:
                speed = "Ehh"
            elif multipler < 0.8:
                speed = "Goodish"
            elif multipler < 1.2:
                speed = "OK"
            elif multipler < 1.7:
                speed = "Fine"
            elif multipler < 2.5:
                speed = "Fast"
            elif multipler < 3.5:
                speed = "WOW"
            else:
                speed = "Insane!!"
            output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed))
            time.sleep(0.01)

        yield """
        <style>
         * { font-family: monospace }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (
            config.version, config.rev, sys.version, sys.platform)

        t = time.time()

        # CryptBitcoin
        yield "<br>CryptBitcoin:<br>"
        from Crypt import CryptBitcoin

        # seed = CryptBitcoin.newSeed()
        # yield "- Seed: %s<br>" % seed
        seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"

        with benchmark("hdPrivatekey x 10", 0.7):
            for i in range(10):
                privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
                yield "."
            valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
            assert privatekey == valid, "%s != %s" % (privatekey, valid)

        data = "Hello" * 1024  # 5k
        with benchmark("sign x 10", 0.35):
            for i in range(10):
                yield "."
                sign = CryptBitcoin.sign(data, privatekey)
            valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
            assert sign == valid, "%s != %s" % (sign, valid)

        address = CryptBitcoin.privatekeyToAddress(privatekey)
        if CryptBitcoin.opensslVerify:  # Openssl avalible
            with benchmark("openssl verify x 100", 0.37):
                for i in range(100):
                    if i % 10 == 0:
                        yield "."
                    ok = CryptBitcoin.verify(data, address, sign)
                assert ok, "does not verify from %s" % address
        else:
            yield " - openssl verify x 100...not avalible :(<br>"

        openssl_verify_bk = CryptBitcoin.opensslVerify  # Emulate openssl not found in any way
        CryptBitcoin.opensslVerify = None
        with benchmark("pure-python verify x 10", 1.6):
            for i in range(10):
                yield "."
                ok = CryptBitcoin.verify(data, address, sign)
            assert ok, "does not verify from %s" % address
        CryptBitcoin.opensslVerify = openssl_verify_bk

        # CryptHash
        yield "<br>CryptHash:<br>"
        from Crypt import CryptHash
        from cStringIO import StringIO

        data = StringIO("Hello" * 1024 * 1024)  # 5m
        with benchmark("sha256 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha256sum(data)
                yield "."
            valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"
            assert hash == valid, "%s != %s" % (hash, valid)

        data = StringIO("Hello" * 1024 * 1024)  # 5m
        with benchmark("sha512 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha512sum(data)
                yield "."
            valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"
            assert hash == valid, "%s != %s" % (hash, valid)

        with benchmark("os.urandom(256) x 1000", 0.0065):
            for i in range(10):
                for y in range(100):
                    data = os.urandom(256)
                yield "."

        # Msgpack
        import msgpack
        yield "<br>Msgpack: (version: %s)<br>" % ".".join(
            map(str, msgpack.version))
        binary = 'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
        data = {
            "int": 1024 * 1024 * 1024,
            "float": 12345.67890,
            "text": "hello" * 1024,
            "binary": binary
        }
        with benchmark("pack 5K x 10 000", 0.78):
            for i in range(10):
                for y in range(1000):
                    data_packed = msgpack.packb(data)
                yield "."
            valid = """\x84\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa6binary\xda\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv"""
            assert data_packed == valid, "%s<br>!=<br>%s" % (repr(data_packed),
                                                             repr(valid))

        with benchmark("unpack 5K x 10 000", 1.2):
            for i in range(10):
                for y in range(1000):
                    data_unpacked = msgpack.unpackb(data_packed)
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpack, data)

        with benchmark("streaming unpack 5K x 10 000", 1.4):
            for i in range(10):
                unpacker = msgpack.Unpacker()
                for y in range(1000):
                    unpacker.feed(data_packed)
                    for data_unpacked in unpacker:
                        pass
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpack, data)

        # Db
        from Db import Db
        import sqlite3
        yield "<br>Db: (version: %s, API: %s)<br>" % (sqlite3.sqlite_version,
                                                      sqlite3.version)

        schema = {
            "db_name": "TestDb",
            "db_file": "%s/benchmark.db" % config.data_dir,
            "maps": {
                ".*": {
                    "to_table": {
                        "test": "test"
                    }
                }
            },
            "tables": {
                "test": {
                    "cols": [["test_id", "INTEGER"], ["title", "TEXT"],
                             ["json_id", "INTEGER REFERENCES json (json_id)"]],
                    "indexes":
                    ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
                    "schema_changed":
                    1426195822
                }
            }
        }

        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        with benchmark("Open x 10", 0.13):
            for i in range(10):
                db = Db(schema, "%s/benchmark.db" % config.data_dir)
                db.checkTables()
                db.close()
                yield "."

        db = Db(schema, "%s/benchmark.db" % config.data_dir)
        db.checkTables()
        import json

        with benchmark("Insert x 10 x 1000", 1.0):
            for u in range(10):  # 10 user
                data = {"test": []}
                for i in range(1000):  # 1000 line of data
                    data["test"].append({
                        "test_id":
                        i,
                        "title":
                        "Testdata for %s message %s" % (u, i)
                    })
                json.dump(data,
                          open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u))
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                yield "."

        with benchmark("Buffered insert x 100 x 100", 1.3):
            cur = db.getCursor()
            cur.execute("BEGIN")
            cur.logging = False
            for u in range(100, 200):  # 100 user
                data = {"test": []}
                for i in range(100):  # 1000 line of data
                    data["test"].append({
                        "test_id":
                        i,
                        "title":
                        "Testdata for %s message %s" % (u, i)
                    })
                json.dump(data,
                          open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u),
                              cur=cur)
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                if u % 10 == 0:
                    yield "."
            cur.execute("COMMIT")

        yield " - Total rows in db: %s<br>" % db.execute(
            "SELECT COUNT(*) AS num FROM test").fetchone()[0]

        with benchmark("Indexed query x 1000", 0.25):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(1000):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
                for row in res:
                    found += 1
                if i % 100 == 0:
                    yield "."

            assert found == 20000, "Found: %s != 20000" % found

        with benchmark("Not indexed query x 100", 0.6):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 18900, "Found: %s != 18900" % found

        with benchmark("Like query x 100", 1.8):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute(
                    "SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 38900, "Found: %s != 11000" % found

        db.close()
        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        gc.collect()  # Implicit grabage collection

        yield "<br>Done. Total: %.2fs" % (time.time() - t)
コード例 #37
0
ファイル: ContentManager.py プロジェクト: simbam1/ZeroNet
	def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False):
		content = self.contents.get(inner_path)
		if not content: # Content not exits yet, load default one
			self.log.info("File %s not exits yet, loading default values..." % inner_path)
			content = {"files": {}, "signs": {}} # Default content.json
			if inner_path == "content.json": # Its the root content.json, add some more fields
				content["title"] = "%s - ZeroNet_" % self.site.address
				content["description"] = ""
				content["signs_required"] = 1
				content["ignore"] = ""

		directory = self.toDir(self.site.storage.getPath(inner_path))
		self.log.info("Opening site data directory: %s..." % directory)

		hashed_files = {}
		changed_files = [inner_path]
		for root, dirs, files in os.walk(directory):
			for file_name in files:
				file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name))
				file_inner_path = re.sub(re.escape(directory), "", file_path)
				
				if file_name == "content.json" or (content.get("ignore") and re.match(content["ignore"], file_inner_path)) or file_name.startswith("."): # Ignore content.json, definied regexp and files starting with .
					self.log.info("- [SKIPPED] %s" % file_inner_path)
				else:
					sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
					self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum))
					hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
					if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"):
						changed_files.append(file_path)

		
		self.log.debug("Changed files: %s" % changed_files)
		if update_changed_files:
			for file_path in changed_files:
				self.site.storage.onUpdated(file_path)

		# Generate new content.json
		self.log.info("Adding timestamp and sha512sums to new content.json...")

		new_content = content.copy() # Create a copy of current content.json
		new_content["files"] = hashed_files # Add files sha512 hash
		new_content["modified"] = time.time() # Add timestamp
		if inner_path == "content.json": 
			new_content["address"] = self.site.address
			new_content["zeronet_version"] = config.version
			new_content["signs_required"] = content.get("signs_required", 1)

		from Crypt import CryptBitcoin
		self.log.info("Verifying private key...")
		privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
		valid_signers = self.getValidSigners(inner_path)
		if privatekey_address not in valid_signers:
			return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
		self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))

		if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
			new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
			if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none")

		self.log.info("Signing %s..." % inner_path)

		if "signs" in new_content: del(new_content["signs"]) # Delete old signs
		if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility)

		sign_content = json.dumps(new_content, sort_keys=True)
		sign = CryptBitcoin.sign(sign_content, privatekey)
		#new_content["signs"] = content.get("signs", {}) # TODO: Multisig
		if sign: # If signing is successful (not an old address)
			new_content["signs"] = {}
			new_content["signs"][privatekey_address] = sign
			
		if inner_path == "content.json":  # To root content.json add old format sign for backward compatibility
			oldsign_content = json.dumps(new_content, sort_keys=True)
			new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey)

		if not self.validContent(inner_path, new_content):
			self.log.error("Sign failed: Invalid content")
			return False

		if filewrite:
			self.log.info("Saving to %s..." % inner_path)
			json.dump(new_content, open(self.site.storage.getPath(inner_path), "w"), indent=2, sort_keys=True)

		self.log.info("File %s signed!" % inner_path)

		if filewrite: # Written to file
			return True
		else: # Return the new content
			return new_content
コード例 #38
0
ファイル: UiRequest.py プロジェクト: yangguangyu/ZeroNet
    def getScriptNonce(self):
        if not self.script_nonce:
            self.script_nonce = CryptHash.random(encoding="base64")

        return self.script_nonce
コード例 #39
0
        address = CryptBitcoin.privatekeyToAddress(privatekey)
        with benchmark("verify x 10", 1.6):
            for i in range(10):
                yield "."
                ok = CryptBitcoin.verify(data, address, sign)
            assert ok, "does not verify from %s" % address

        yield "<br>CryptHash:<br>"
        from Crypt import CryptHash
        from cStringIO import StringIO

        data = StringIO("Hello" * 1024 * 1024)  #5m
        with benchmark("sha512 x 10 000", 1):
            for i in range(10):
                for y in range(10000):
                    hash = CryptHash.sha512sum(data)
                yield "."
            valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
            assert hash == valid, "%s != %s" % (hash, valid)

        yield "<br>Db:<br>"
        from Db import Db

        schema = {
            "db_name": "TestDb",
            "db_file": "data/benchmark.db",
            "maps": {
                ".*": {
                    "to_table": {
                        "test": "test"
                    }
コード例 #40
0
ファイル: ContentManager.py プロジェクト: xeddmc/ZeroNet
                        if valid_signs >= signs_required:
                            break  # Break if we has enough signs
                    self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required))
                    return valid_signs >= signs_required
                else:  # Old style signing
                    return CryptBitcoin.verify(sign_content, self.site.address, sign)

            except Exception, err:
                self.log.error("Verify sign error: %s" % Debug.formatException(err))
                return False

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if "sha512" in file_info:
                    hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
                elif "sha1" in file_info:  # Backward compatibility
                    hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
                else:
                    hash_valid = False
                if file_info.get("size", 0) != file.tell():
                    self.log.error(
                        "%s file size does not match %s <> %s, Hash: %s" %
                        (inner_path, file.tell(), file_info.get("size", 0), hash_valid)
                    )
                    return False
                return hash_valid

            else:  # File not in content.json
                self.log.error("File not in content.json: %s" % inner_path)
                return False
コード例 #41
0
ファイル: ContentManager.py プロジェクト: binerf/zeronet_tor
                    else:
                        return self.verifyContent(inner_path, new_content)
                else:  # Old style signing
                    if CryptBitcoin.verify(sign_content, self.site.address, sign):
                        return self.verifyContent(inner_path, new_content)
                    else:
                        raise VerifyError("Invalid old-style sign")

            except Exception, err:
                self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
                raise err

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if CryptHash.sha512sum(file) != file_info.get("sha512", ""):
                    raise VerifyError("Invalid hash")

                if file_info.get("size", 0) != file.tell():
                    raise VerifyError(
                        "File size does not match %s <> %s" %
                        (inner_path, file.tell(), file_info.get("size", 0))
                    )

                return True

            else:  # File not in content.json
                raise VerifyError("File not in content.json")

    def optionalDelete(self, inner_path):
        self.site.storage.delete(inner_path)
コード例 #42
0
ファイル: BigfilePlugin.py プロジェクト: zhilinwww/ZeroNet
 def verifyPiece(self, inner_path, pos, piece):
     piecemap = self.getPiecemap(inner_path)
     piece_i = pos / piecemap["piece_size"]
     if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
         raise VerifyError("Invalid hash")
     return True
コード例 #43
0
ファイル: ContentManager.py プロジェクト: unique1o1/ZeroNet
                            break  # Break if we has enough signs
                    if config.verbose:
                        self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required))
                    return valid_signs >= signs_required
                else:  # Old style signing
                    return CryptBitcoin.verify(sign_content, self.site.address, sign)

            except Exception, err:
                self.log.error("Verify sign error: %s" % Debug.formatException(err))
                return False

        else:  # Check using sha512 hash
            file_info = self.getFileInfo(inner_path)
            if file_info:
                if "sha512" in file_info:
                    hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
                elif "sha1" in file_info:  # Backward compatibility
                    hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
                else:
                    hash_valid = False
                if file_info.get("size", 0) != file.tell():
                    self.log.error(
                        "%s file size does not match %s <> %s, Hash: %s" %
                        (inner_path, file.tell(), file_info.get("size", 0), hash_valid)
                    )
                    return False
                return hash_valid

            else:  # File not in content.json
                self.log.error("File not in content.json: %s" % inner_path)
                return False
コード例 #44
0
    def signContent(self, privatekey=None):
        if not self.content:  # New site
            self.log.info(
                "Site not exits yet, loading default content.json values...")
            self.content = {
                "files": {},
                "title": "%s - ZeroNet_" % self.address,
                "sign": "",
                "modified": 0.0,
                "description": "",
                "address": self.address,
                "ignore": "",
                "zeronet_version": config.version
            }  # Default content.json

        self.log.info("Opening site data directory: %s..." % self.directory)

        hashed_files = {}

        for root, dirs, files in os.walk(self.directory):
            for file_name in files:
                file_path = self.getPath("%s/%s" % (root, file_name))

                if file_name == "content.json" or (
                        self.content["ignore"] and re.match(
                            self.content["ignore"],
                            file_path.replace(self.directory + "/", ""))
                ):  # Dont add content.json and ignore regexp pattern definied in content.json
                    self.log.info("- [SKIPPED] %s" % file_path)
                else:
                    sha1sum = CryptHash.sha1sum(
                        file_path)  # Calculate sha1 sum of file
                    sha512sum = CryptHash.sha512sum(
                        file_path)  # Calculate sha512 sum of file
                    inner_path = re.sub("^%s/" % re.escape(self.directory), "",
                                        file_path)
                    self.log.info("- %s (SHA512: %s)" % (file_path, sha512sum))
                    hashed_files[inner_path] = {
                        "sha1": sha1sum,
                        "sha512": sha512sum,
                        "size": os.path.getsize(file_path)
                    }

        # Generate new content.json
        self.log.info("Adding timestamp and sha512sums to new content.json...")

        content = self.content.copy()  # Create a copy of current content.json
        content["address"] = self.address
        content["files"] = hashed_files  # Add files sha512 hash
        content["modified"] = time.time()  # Add timestamp
        content["zeronet_version"] = config.version  # Signer's zeronet version
        del (content["sign"])  # Delete old sign

        # Signing content
        from Crypt import CryptBitcoin

        self.log.info("Verifying private key...")
        privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
        if self.address != privatekey_address:
            return self.log.error(
                "Private key invalid! Site address: %s, Private key address: %s"
                % (self.address, privatekey_address))

        self.log.info("Signing modified content.json...")
        sign_content = json.dumps(content, sort_keys=True)
        sign = CryptBitcoin.sign(sign_content, privatekey)
        content["sign"] = sign

        # Saving modified content.json
        self.log.info("Saving to %s/content.json..." % self.directory)
        open("%s/content.json" % self.directory,
             "w").write(json.dumps(content, indent=4, sort_keys=True))

        self.log.info("Site signed!")
        return True
コード例 #45
0
    def actionBenchmark(self):
        import sys
        import gc
        from contextlib import contextmanager

        output = self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            return

        @contextmanager
        def benchmark(name, standard):
            self.log.debug("Benchmark: %s" % name)
            s = time.time()
            output(b"- %s" % name.encode())
            try:
                yield 1
            except Exception as err:
                self.log.exception(err)
                output(b"<br><b>! Error: %s</b><br>" % Debug.formatException(err).encode())
            taken = time.time() - s
            if taken > 0:
                multipler = standard / taken
            else:
                multipler = 99
            if multipler < 0.3:
                speed = "Sloooow"
            elif multipler < 0.5:
                speed = "Ehh"
            elif multipler < 0.8:
                speed = "Goodish"
            elif multipler < 1.2:
                speed = "OK"
            elif multipler < 1.7:
                speed = "Fine"
            elif multipler < 2.5:
                speed = "Fast"
            elif multipler < 3.5:
                speed = "WOW"
            else:
                speed = "Insane!!"
            output(b"%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed.encode()))
            time.sleep(0.01)

        yield """
        <style>
         * { font-family: monospace }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (config.version, config.rev, sys.version, sys.platform)

        t = time.time()

        # CryptBitcoin
        yield "<br>CryptBitcoin:<br>"
        from Crypt import CryptBitcoin

        # seed = CryptBitcoin.newSeed()
        # yield "- Seed: %s<br>" % seed
        seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"

        with benchmark("hdPrivatekey x 10", 0.7):
            for i in range(10):
                privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
                yield "."
            valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
            assert privatekey == valid, "%s != %s" % (privatekey, valid)

        data = "Hello" * 1024  # 5k
        with benchmark("sign x 10", 0.35):
            for i in range(10):
                yield "."
                sign = CryptBitcoin.sign(data, privatekey)
            valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
            assert sign == valid, "%s != %s" % (sign, valid)

        address = CryptBitcoin.privatekeyToAddress(privatekey)
        for lib_verify in ["btctools", "openssl", "libsecp256k1"]:
            try:
                CryptBitcoin.loadLib(lib_verify)
                loaded = True
                if lib_verify == "openssl":
                    yield "+ Loaded lib: %s<br>" % html.escape(str(CryptBitcoin.bitcoin.core.key._ssl))
                elif lib_verify == "libsecp256k1":
                    import coincurve
                    yield "+ Loaded lib: %s<br>" % type(coincurve._libsecp256k1.lib).__name__
            except Exception as err:
                yield "- Error loading %s: %s<br>" % (lib_verify, err)
                loaded = False
            if not loaded:
                continue
            with benchmark("%s verify x 100" % lib_verify, 0.37):
                for i in range(100):
                    if i % 10 == 0:
                        yield "."
                    ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify)
                assert ok, "does not verify from %s" % address

        # CryptHash
        yield "<br>CryptHash:<br>"
        from Crypt import CryptHash
        import io

        data = io.BytesIO(b"Hello" * 1024 * 1024)  # 5m
        with benchmark("sha256 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha256sum(data)
                yield "."
            valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"
            assert hash == valid, "%s != %s" % (hash, valid)

        data = io.BytesIO(b"Hello" * 1024 * 1024)  # 5m
        with benchmark("sha512 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha512sum(data)
                yield "."
            valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"
            assert hash == valid, "%s != %s" % (hash, valid)

        with benchmark("os.urandom(256) x 1000", 0.0065):
            for i in range(10):
                for y in range(100):
                    data = os.urandom(256)
                yield "."

        # Msgpack
        from util import Msgpack
        yield "<br>Msgpack: (version: %s)<br>" % ".".join(map(str, Msgpack.msgpack.version))
        binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
        data = OrderedDict(
            sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
        )
        data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello'
        with benchmark("pack 5K x 10 000", 0.78):
            for i in range(10):
                for y in range(1000):
                    data_packed = Msgpack.pack(data)
                yield "."
            assert data_packed == data_packed_valid, "%s<br>!=<br>%s" % (repr(data_packed), repr(data_packed_valid))

        with benchmark("unpack 5K x 10 000", 1.2):
            for i in range(10):
                for y in range(1000):
                    data_unpacked = Msgpack.unpack(data_packed, decode=False)
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpacked, data)

        for fallback in [True, False]:
            with benchmark("streaming unpack 5K x 10 000 (fallback: %s)" % fallback, 1.4):
                for i in range(10):
                    unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback)
                    for y in range(1000):
                        unpacker.feed(data_packed)
                        for data_unpacked in unpacker:
                            pass
                    yield "."
                assert data == data_unpacked, "%s != %s" % (data_unpacked, data)

        # Db
        import sqlite3
        yield "<br>Db: (version: %s, API: %s)<br>" % (sqlite3.sqlite_version, sqlite3.version)

        schema = {
            "db_name": "TestDb",
            "db_file": "%s/benchmark.db" % config.data_dir,
            "maps": {
                ".*": {
                    "to_table": {
                        "test": "test"
                    }
                }
            },
            "tables": {
                "test": {
                    "cols": [
                        ["test_id", "INTEGER"],
                        ["title", "TEXT"],
                        ["json_id", "INTEGER REFERENCES json (json_id)"]
                    ],
                    "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
                    "schema_changed": 1426195822
                }
            }
        }

        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        with benchmark("Open x 10", 0.13):
            for i in range(10):
                db = Db.Db(schema, "%s/benchmark.db" % config.data_dir)
                db.checkTables()
                db.close()
                yield "."

        db = Db.Db(schema, "%s/benchmark.db" % config.data_dir)
        db.checkTables()
        import json

        with benchmark("Insert x 10 x 1000", 1.0):
            for u in range(10):  # 10 user
                data = {"test": []}
                for i in range(1000):  # 1000 line of data
                    data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
                json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u))
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                yield "."

        with benchmark("Buffered insert x 100 x 100", 1.3):
            cur = db.getCursor()
            cur.logging = False
            for u in range(100, 200):  # 100 user
                data = {"test": []}
                for i in range(100):  # 1000 line of data
                    data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
                json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                if u % 10 == 0:
                    yield "."

        yield " + Total rows in db: %s<br>" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]

        with benchmark("Indexed query x 1000", 0.25):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(1000):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
                for row in res:
                    found += 1
                if i % 100 == 0:
                    yield "."

            assert found == 20000, "Found: %s != 20000" % found

        with benchmark("Not indexed query x 100", 0.6):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 18900, "Found: %s != 18900" % found

        with benchmark("Like query x 100", 1.8):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 38900, "Found: %s != 11000" % found

        db.close()
        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        gc.collect()  # Implicit grabage collection

        # Zip
        yield "<br>Compression:<br>"
        import zipfile
        test_data = b"Test" * 1024
        file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb0r\xc5\x91t\xc3\xbck\xc3\xb6r\xc3\xb3g\xc3\xa9p\xe4\xb8\xad\xe5\x8d\x8e%s.txt".decode("utf8")

        with benchmark("Zip pack x 10", 0.12):
            for i in range(10):
                with zipfile.ZipFile('%s/test.zip' % config.data_dir, 'w') as archive:
                    for y in range(100):
                        zip_info = zipfile.ZipInfo(file_name % y, (1980,1,1,0,0,0))
                        zip_info.compress_type = zipfile.ZIP_DEFLATED
                        zip_info.create_system = 3
                        zip_info.flag_bits = 0
                        zip_info.external_attr = 25165824
                        archive.writestr(zip_info, test_data)
                yield "."

            hash = CryptHash.sha512sum(open("%s/test.zip" % config.data_dir, "rb"))
            valid = "f630fece29fff1cc8dbf454e47a87fea2746a4dbbd2ceec098afebab45301562"
            assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)

        with benchmark("Zip unpack x 10", 0.2):
            for i in range(10):
                with zipfile.ZipFile('%s/test.zip' % config.data_dir) as archive:
                    for y in range(100):
                        data = archive.open(file_name % y).read()
                        assert archive.open(file_name % y).read() == test_data, "Invalid data: %s..." % data[0:30]
                yield "."

        if os.path.isfile("%s/test.zip" % config.data_dir):
            os.unlink("%s/test.zip" % config.data_dir)

        # gz, bz2, xz
        import tarfile
        import gzip

        # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime
        def nodate_write_gzip_header(self):
            self._write_mtime = 0
            original_write_gzip_header(self)

        original_write_gzip_header = gzip.GzipFile._write_gzip_header
        gzip.GzipFile._write_gzip_header = nodate_write_gzip_header

        test_data_io = io.BytesIO(b"Test" * 1024)
        archive_formats = {
            "gz": {"hash": "4704ebd8c987ed6f833059f1de9c475d443b0539b8d4c4cb8b49b26f7bbf2d19", "time_pack": 0.3, "time_unpack": 0.2},
            "bz2": {"hash": "90cba0b4d9abaa37b830bf37e4adba93bfd183e095b489ebee62aaa94339f3b5", "time_pack": 2.0, "time_unpack": 0.5},
            "xz": {"hash": "37abc16d552cfd4a495cb2acbf8b1d5877631d084f6571f4d6544bc548c69bae", "time_pack": 1.4, "time_unpack": 0.2}
        }
        for ext, format_data in archive_formats.items():
            archive_path = '%s/test.tar.%s' % (config.data_dir, ext)
            with benchmark("Tar.%s pack x 10" % ext, format_data["time_pack"]):
                for i in range(10):
                    with tarfile.open(archive_path, 'w:%s' % ext) as archive:
                        for y in range(100):
                            test_data_io.seek(0)
                            tar_info = tarfile.TarInfo(file_name % y)
                            tar_info.size = 4 * 1024
                            archive.addfile(tar_info, test_data_io)
                    yield "."

                hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, ext), "rb"))
                valid = format_data["hash"]
                assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)

            archive_size = os.path.getsize(archive_path) / 1024
            with benchmark("Tar.%s unpack (%.2fkB) x 10" % (ext, archive_size), format_data["time_unpack"]):
                for i in range(10):
                    with tarfile.open(archive_path, 'r:%s' % ext) as archive:
                        for y in range(100):
                            assert archive.extractfile(file_name % y).read() == test_data
                    yield "."

            if os.path.isfile(archive_path):
                os.unlink(archive_path)

        yield "<br>Done. Total: %.2fs" % (time.time() - t)
コード例 #46
0
 def getAddNonce(self):
     add_nonce = CryptHash.random()
     self.server.add_nonces.append(add_nonce)
     return add_nonce
コード例 #47
0
                del (content["sign"])  # The file signed without the sign
                sign_content = json.dumps(
                    content, sort_keys=True
                )  # Dump the json to string to remove whitepsace

                return CryptBitcoin.verify(sign_content, self.address, sign)
            except Exception, err:
                self.log.error("Verify sign error: %s" %
                               Debug.formatException(err))
                return False

        else:  # Check using sha1 hash
            if self.content and inner_path in self.content["files"]:
                if "sha512" in self.content["files"][
                        inner_path]:  # Use sha512 to verify if possible
                    return CryptHash.sha512sum(
                        file) == self.content["files"][inner_path]["sha512"]
                else:  # Backward compatiblity
                    return CryptHash.sha1sum(
                        file) == self.content["files"][inner_path]["sha1"]

            else:  # File not in content.json
                self.log.error("File not in content.json: %s" % inner_path)
                return False

    # Verify all files sha512sum using content.json
    def verifyFiles(self, quick_check=False):  # Fast = using file size
        bad_files = []
        if not self.content:  # No content.json, download it first
            self.needFile("content.json",
                          update=True)  # Force update to fix corrupt file
            self.loadContent()  # Reload content.json