Exemple #1
0
    def testFindHash(self, tor_manager, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection
        file_server.sites[site.address] = site
        file_server.tor_manager = tor_manager

        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert peer_file_server.findHashIds([1234]) == {}

        # Add fake peer with requred hash
        fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544)
        fake_peer_1.hashfield.append(1234)
        fake_peer_2 = site.addPeer("1.2.3.5", 1545)
        fake_peer_2.hashfield.append(1234)
        fake_peer_2.hashfield.append(1235)
        fake_peer_3 = site.addPeer("1.2.3.6", 1546)
        fake_peer_3.hashfield.append(1235)
        fake_peer_3.hashfield.append(1236)

        res = peer_file_server.findHashIds([1234, 1235])

        assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)]
        assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]

        # Test my address adding
        site.content_manager.hashfield.append(1234)

        res = peer_file_server.findHashIds([1234, 1235])
        assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)]
        assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
Exemple #2
0
    def testPex(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        connection = client.getConnection(file_server.ip, 1544)

        # Add new fake peer to site
        fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True)
        # Add fake connection to it
        fake_peer.connection = Connection(file_server, file_server.ip_external, 11337)
        fake_peer.connection.last_recv_time = time.time()
        assert fake_peer in site.getConnectablePeers()

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert "%s:11337" % file_server.ip_external not in site_temp.peers
        assert peer_file_server.pex()
        assert "%s:11337" % file_server.ip_external in site_temp.peers

        # Should not exchange private peers from local network
        fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
        assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
        fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
        fake_peer_private.connection.last_recv_time = time.time()

        assert "192.168.0.1:11337" not in site_temp.peers
        assert not peer_file_server.pex()
        assert "192.168.0.1:11337" not in site_temp.peers


        connection.close()
        client.stop()
Exemple #3
0
    def testRenameOptional(self, file_server, site, site_temp):
        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        site_temp.announce = mock.MagicMock(return_value=True)  # Don't try to find peers from the net


        site_temp.addPeer(file_server.ip, 1544)

        assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)

        assert site_temp.settings["optional_downloaded"] == 0

        site_temp.needFile("data/optional.txt")

        assert site_temp.settings["optional_downloaded"] > 0
        settings_before = site_temp.settings
        hashfield_before = site_temp.content_manager.hashfield.tobytes()

        # Rename optional file
        os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))

        site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)

        content = site.storage.loadJson("content.json")
        assert "data/optional-new.txt" in content["files_optional"]
        assert "data/optional.txt" not in content["files_optional"]
        assert not site_temp.storage.isFile("data/optional-new.txt")
        assert site_temp.storage.isFile("data/optional.txt")

        with Spy.Spy(FileRequest, "route") as requests:
            site.publish()
            time.sleep(0.1)
            assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)  # Wait for download
            assert "streamFile" not in [req[1] for req in requests]

        content = site_temp.storage.loadJson("content.json")
        assert "data/optional-new.txt" in content["files_optional"]
        assert "data/optional.txt" not in content["files_optional"]
        assert site_temp.storage.isFile("data/optional-new.txt")
        assert not site_temp.storage.isFile("data/optional.txt")

        assert site_temp.settings["size"] == settings_before["size"]
        assert site_temp.settings["size_optional"] == settings_before["size_optional"]
        assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
        assert site_temp.content_manager.hashfield.tobytes() == hashfield_before

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Exemple #4
0
    def testHugeContentSiteUpdate(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Connect peers
        site_temp.addPeer(file_server.ip, 1544)

        # Download site from site to site_temp
        assert site_temp.download(blind_includes=True,
                                  retry_bad_files=False).get(timeout=10)
        site_temp.settings["size_limit"] = int(20 * 1024 * 1024)
        site_temp.saveSettings()

        # Raise limit size to 20MB on site so it can be signed
        site.settings["size_limit"] = int(20 * 1024 * 1024)
        site.saveSettings()

        content_json = site.storage.loadJson("content.json")
        content_json["description"] = "PartirUnJour" * 1024 * 1024
        site.storage.writeJson("content.json", content_json)
        changed, deleted = site.content_manager.loadContent("content.json",
                                                            force=True)

        # Make sure we have 2 differents content.json
        assert site_temp.storage.open(
            "content.json").read() != site.storage.open("content.json").read()

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")

        # Publish with patch
        site.log.info("Publish new content.json bigger than 10MB")
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign(
                "content.json",
                privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
            )
            assert site.storage.getSize(
                "content.json") > 10 * 1024 * 1024  # verify it over 10MB
            time.sleep(0.1)
            site.publish(diffs=diffs)
            assert site_temp.download(blind_includes=True,
                                      retry_bad_files=False).get(timeout=10)

        assert site_temp.storage.getSize(
            "content.json") < site_temp.getSizeLimit() * 1024 * 1024
        assert site_temp.storage.open(
            "content.json").read() == site.storage.open("content.json").read()
Exemple #5
0
    def testBigUpdate(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Connect peers
        site_temp.addPeer(file_server.ip, 1544)

        # Download site from site to site_temp
        assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
        assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]

        # Update file
        data_original = site.storage.open("data/data.json").read()
        data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json-new", "wb").write(data_new)

        assert site.storage.open("data/data.json-new").read() == data_new
        assert site_temp.storage.open("data/data.json").read() != data_new

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")
        assert not site.storage.isFile("data/data.json-new")  # New data file removed
        assert site.storage.open("data/data.json").read() == data_new  # -new postfix removed
        assert "data/data.json" in diffs

        content_json = site.storage.loadJson("content.json")
        content_json["description"] = "BigZeroBlog" * 1024 * 10
        site.storage.writeJson("content.json", content_json)
        site.content_manager.loadContent("content.json", force=True)

        # Publish with patch
        site.log.info("Publish new data.json with patch")
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
            assert site.storage.getSize("content.json") > 10 * 1024  # Make it a big content.json
            site.publish(diffs=diffs)
            time.sleep(0.1)
            assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
            file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
            assert len(file_requests) == 1

        assert site_temp.storage.open("data/data.json").read() == data_new
        assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
Exemple #6
0
    def testUnicodeFilename(self, file_server, site, site_temp):
        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        site_temp.announce = mock.MagicMock(
            return_value=True)  # Don't try to find peers from the net

        site_temp.addPeer(file_server.ip, 1544)

        assert site_temp.download(blind_includes=True,
                                  retry_bad_files=False).get(timeout=10)

        site.storage.write("data/img/árvíztűrő.png", b"test")

        site.content_manager.sign(
            "content.json",
            privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")

        content = site.storage.loadJson("content.json")
        assert "data/img/árvíztűrő.png" in content["files"]
        assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
        settings_before = site_temp.settings

        with Spy.Spy(FileRequest, "route") as requests:
            site.publish()
            time.sleep(0.1)
            assert site_temp.download(blind_includes=True,
                                      retry_bad_files=False).get(
                                          timeout=10)  # Wait for download
            assert len([req[1] for req in requests
                        if req[1] == "streamFile"]) == 1

        content = site_temp.storage.loadJson("content.json")
        assert "data/img/árvíztűrő.png" in content["files"]
        assert site_temp.storage.isFile("data/img/árvíztűrő.png")

        assert site_temp.settings["size"] == settings_before["size"]
        assert site_temp.settings["size_optional"] == settings_before[
            "size_optional"]

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Exemple #7
0
    def testArchivedBeforeDownload(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Download normally
        site_temp.addPeer(file_server.ip, 1544)
        assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
        bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]

        assert not bad_files
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
        assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2

        # Add archived data
        assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
        assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)

        content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
        site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
        site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")

        date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
        assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
        assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
        assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1)  # Allow user to update archived data later

        # Push archived update
        assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
        site.publish()
        time.sleep(0.1)
        assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)  # Wait for download

        # The archived content should disappear from remote client
        assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
        assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
        assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Exemple #8
0
    def testPing(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        connection = client.getConnection(file_server.ip, 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert peer_file_server.ping() is not None

        assert peer_file_server in site_temp.peers.values()
        peer_file_server.remove()
        assert peer_file_server not in site_temp.peers.values()

        connection.close()
        client.stop()
Exemple #9
0
    def testDownloadFile(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        connection = client.getConnection(file_server.ip, 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        # Testing streamFile
        buff = peer_file_server.getFile(site_temp.address,
                                        "content.json",
                                        streaming=True)
        assert b"sign" in buff.getvalue()

        # Testing getFile
        buff = peer_file_server.getFile(site_temp.address, "content.json")
        assert b"sign" in buff.getvalue()

        connection.close()
        client.stop()
Exemple #10
0
    def testFindHash(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert peer_file_server.findHashIds([1234]) == {}

        # Add fake peer with requred hash
        fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
        fake_peer_1.hashfield.append(1234)
        fake_peer_2 = site.addPeer("1.2.3.5", 1545)
        fake_peer_2.hashfield.append(1234)
        fake_peer_2.hashfield.append(1235)
        fake_peer_3 = site.addPeer("1.2.3.6", 1546)
        fake_peer_3.hashfield.append(1235)
        fake_peer_3.hashfield.append(1236)

        res = peer_file_server.findHashIds([1234, 1235])
        assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544),
                                            ("1.2.3.5", 1545)])
        assert sorted(res[1235]) == sorted([("1.2.3.5", 1545),
                                            ("1.2.3.6", 1546)])

        # Test my address adding
        site.content_manager.hashfield.append(1234)

        res = peer_file_server.findHashIds([1234, 1235])
        assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544),
                                            ("1.2.3.5", 1545),
                                            (file_server.ip, 1544)])
        assert sorted(res[1235]) == sorted([("1.2.3.5", 1545),
                                            ("1.2.3.6", 1546)])
Exemple #11
0
    def testUpdate(self, file_server, site, site_temp):
        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Don't try to find peers from the net
        site.announce = mock.MagicMock(return_value=True)
        site_temp.announce = mock.MagicMock(return_value=True)

        # Connect peers
        site_temp.addPeer(file_server.ip, 1544)

        # Download site from site to site_temp
        assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
        assert len(site_temp.bad_files) == 1

        # Update file
        data_original = site.storage.open("data/data.json").read()
        data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json", "wb").write(data_new)

        assert site.storage.open("data/data.json").read() == data_new
        assert site_temp.storage.open("data/data.json").read() == data_original

        site.log.info("Publish new data.json without patch")
        # Publish without patch
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
            site.publish()
            time.sleep(0.1)
            site.log.info("Downloading site")
            assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
            assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1

        assert site_temp.storage.open("data/data.json").read() == data_new

        # Close connection to avoid update spam limit
        list(site.peers.values())[0].remove()
        site.addPeer(file_server.ip, 1545)
        list(site_temp.peers.values())[0].ping()  # Connect back
        time.sleep(0.1)

        # Update with patch
        data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json-new", "wb").write(data_new)

        assert site.storage.open("data/data.json-new").read() == data_new
        assert site_temp.storage.open("data/data.json").read() != data_new

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")
        assert not site.storage.isFile("data/data.json-new")  # New data file removed
        assert site.storage.open("data/data.json").read() == data_new  # -new postfix removed
        assert "data/data.json" in diffs
        assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]

        # Publish with patch
        site.log.info("Publish new data.json with patch")
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")

            event_done = gevent.event.AsyncResult()
            site.publish(diffs=diffs)
            time.sleep(0.1)
            assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
            assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []

        assert site_temp.storage.open("data/data.json").read() == data_new

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]