def testFindOptional(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init full source server (has optional files)
        site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
        file_server_full = FileServer("127.0.0.1", 1546)
        site_full.connection_server = file_server_full
        gevent.spawn(lambda: ConnectionServer.start(file_server_full))
        time.sleep(0)  # Port opening
        file_server_full.sites[site_full.address] = site_full  # Add site
        site_full.storage.verifyFiles(quick_check=True)  # Check optional files
        site_full_peer = site.addPeer("127.0.0.1", 1546)  # Add it to source server
        assert site_full_peer.updateHashfield()  # Update hashfield

        # Init client server
        site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
        site_temp.announce = mock.MagicMock(return_value=True)  # Don't try to find peers from the net
        site_temp.addPeer("127.0.0.1", 1544)  # Add source server

        # Download normal files
        site_temp.download(blind_includes=True).join(timeout=5)

        # Download optional data/optional.txt
        optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
        assert not site_temp.storage.isFile("data/optional.txt")
        assert not site.content_manager.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source server don't know he has the file
        assert site_full_peer.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source full peer on source server has the file
        assert site_full.content_manager.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source full server he has the file

        with Spy.Spy(FileRequest, "route") as requests:
            # Request 2 file same time
            threads = []
            threads.append(site_temp.needFile("data/optional.txt", blocking=False))
            threads.append(
                site_temp.needFile(
                    "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False
                )
            )
            gevent.joinall(threads)

            assert (
                len([request for request in requests if request[0] == "findHashIds"]) == 1
            )  # findHashids should call only once

        assert site_temp.storage.isFile("data/optional.txt")
        assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")

        assert site_temp.storage.deleteFiles()
        file_server_full.stop()
        [connection.close() for connection in file_server.connections]
Example #2
0
    def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
        global file_server
        from Site import SiteManager
        from File import FileServer  # We need fileserver to handle incoming file requests
        from Peer import Peer

        logging.info("Creating FileServer....")
        file_server = FileServer()
        file_server_thread = gevent.spawn(file_server.start, check_sites=False)  # Dont check every site integrity
        file_server.openport()

        site = SiteManager.site_manager.list()[address]
        site.settings["serving"] = True  # Serving the site even if its disabled

        # Notify local client on new content
        if config.ip_external:
            logging.info("Sending siteReload")
            my_peer = Peer(config.ip_external, config.fileserver_port)
            logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path}))

        if peer_ip:  # Announce ip specificed
            site.addPeer(peer_ip, peer_port)
        else:  # Just ask the tracker
            logging.info("Gathering peers from tracker")
            site.announce()  # Gather peers

        published = site.publish(20, inner_path)  # Push to 20 peers
        if published > 0:
            time.sleep(3)
            logging.info("Serving files (max 60s)...")
            gevent.joinall([file_server_thread], timeout=60)
            logging.info("Done.")
        else:
            logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
Example #3
0
    def testPex(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)

        # Add new fake peer to site
        fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True)
        # Add fake connection to it
        fake_peer.connection = Connection(file_server, "1.2.3.4", 11337)
        fake_peer.connection.last_recv_time = time.time()
        assert fake_peer in site.getConnectablePeers()

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        assert "1.2.3.4:11337" not in site_temp.peers
        assert peer_file_server.pex()
        assert "1.2.3.4:11337" in site_temp.peers

        # Should not exchange private peers from local network
        fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
        assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
        fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
        fake_peer_private.connection.last_recv_time = time.time()

        assert "192.168.0.1:11337" not in site_temp.peers
        assert not peer_file_server.pex()
        assert "192.168.0.1:11337" not in site_temp.peers


        connection.close()
        client.stop()
Example #4
0
def file_server(request):
    request.addfinalizer(CryptConnection.manager.removeCerts)  # Remove cert files after end
    file_server = FileServer("127.0.0.1", 1544)
    gevent.spawn(lambda: ConnectionServer.start(file_server))
    # Wait for port opening
    for retry in range(10):
        time.sleep(0.1)  # Port opening
        try:
            conn = file_server.getConnection("127.0.0.1", 1544)
            conn.close()
            break
        except Exception, err:
            print err
Example #5
0
    def testHashfieldExchange(self, file_server, site, site_temp):
        server1 = file_server
        server1.ip_incoming = {}  # Reset flood protection
        server1.sites[site.address] = site
        server2 = FileServer("127.0.0.1", 1545)
        server2.sites[site_temp.address] = site_temp
        site_temp.connection_server = server2
        site.storage.verifyFiles(quick_check=True)  # Find what optional files we have

        # Add file_server as peer to client
        server2_peer1 = site_temp.addPeer("127.0.0.1", 1544)

        # Check if hashfield has any files
        assert len(site.content_manager.hashfield) > 0

        # Testing hashfield sync
        assert len(server2_peer1.hashfield) == 0
        assert server2_peer1.updateHashfield()  # Query hashfield from peer
        assert len(server2_peer1.hashfield) > 0

        # Test force push new hashfield
        site_temp.content_manager.hashfield.appendHash("AABB")
        server1_peer2 = site.addPeer("127.0.0.1", 1545, return_peer=True)
        with Spy.Spy(FileRequest, "route") as requests:
            assert len(server1_peer2.hashfield) == 0
            server2_peer1.sendMyHashfield()
            assert len(server1_peer2.hashfield) == 1
            server2_peer1.sendMyHashfield()  # Hashfield not changed, should be ignored

            assert len(requests) == 1

            time.sleep(0.01)  # To make hashfield change date different

            site_temp.content_manager.hashfield.appendHash("AACC")
            server2_peer1.sendMyHashfield()  # Push hashfield

            assert len(server1_peer2.hashfield) == 2
            assert len(requests) == 2

            site_temp.content_manager.hashfield.appendHash("AADD")

            assert server1_peer2.updateHashfield(force=True)  # Request hashfield
            assert len(server1_peer2.hashfield) == 3
            assert len(requests) == 3

            assert not server2_peer1.sendMyHashfield()  # Not changed, should be ignored
            assert len(requests) == 3

        server2.stop()
Example #6
0
    def main(self):
        global ui_server, file_server
        from File import FileServer
        from Ui import UiServer
        logging.info("Creating FileServer....")
        file_server = FileServer()
        logging.info("Creating UiServer....")
        ui_server = UiServer()
        file_server.ui_server = ui_server

        logging.info("Removing old SSL certs...")
        from Crypt import CryptConnection
        CryptConnection.manager.removeCerts()

        logging.info("Starting servers....")
        gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
Example #7
0
    def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}):
        global file_server
        from Site import Site
        from Site import SiteManager
        from File import FileServer  # We need fileserver to handle incoming file requests
        from Peer import Peer
        SiteManager.site_manager.load()

        logging.info("Loading site...")
        site = Site(address, allow_create=False)
        site.settings["serving"] = True  # Serving the site even if its disabled

        logging.info("Creating FileServer....")
        file_server = FileServer()
        site.connection_server = file_server
        file_server_thread = gevent.spawn(file_server.start, check_sites=False)  # Dont check every site integrity
        time.sleep(0.001)

        if not file_server_thread.ready():
            # Started fileserver
            file_server.openport()
            if peer_ip:  # Announce ip specificed
                site.addPeer(peer_ip, peer_port)
            else:  # Just ask the tracker
                logging.info("Gathering peers from tracker")
                site.announce()  # Gather peers
            published = site.publish(5, inner_path, diffs=diffs)  # Push to peers
            if published > 0:
                time.sleep(3)
                logging.info("Serving files (max 60s)...")
                gevent.joinall([file_server_thread], timeout=60)
                logging.info("Done.")
            else:
                logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
        else:
            # Already running, notify local client on new content
            logging.info("Sending siteReload")
            if config.fileserver_ip == "*":
                my_peer = Peer("127.0.0.1", config.fileserver_port)
            else:
                my_peer = Peer(config.fileserver_ip, config.fileserver_port)

            logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path}))
            logging.info("Sending sitePublish")
            logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path, "diffs": diffs}))
            logging.info("Done.")
Example #8
0
    def testPing(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection
        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        assert peer_file_server.ping()

        assert peer_file_server in site_temp.peers.values()
        peer_file_server.remove()
        assert peer_file_server not in site_temp.peers.values()

        connection.close()
        client.stop()
Example #9
0
    def testRangedFileRequest(self, file_server, site, site_temp):
        inner_path = self.createBigfile(site)

        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))

        assert len(buff.getvalue()) == 1 * 1024 * 1024  # Correct block size
        assert buff.getvalue().startswith("Test524")  # Correct data
        buff.seek(0)
        assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff)  # Correct hash

        connection.close()
        client.stop()
Example #10
0
	def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
		global file_server
		from Site import Site
		from File import FileServer # We need fileserver to handle incoming file requests

		logging.info("Creating FileServer....")
		file_server = FileServer()
		file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
		file_server.openport()
		site = file_server.sites[address]
		site.settings["serving"] = True # Serving the site even if its disabled
		if peer_ip: # Announce ip specificed
			site.addPeer(peer_ip, peer_port)
		else: # Just ask the tracker
			logging.info("Gathering peers from tracker")
			site.announce() # Gather peers
		site.publish(20, inner_path) # Push to 20 peers
		time.sleep(3)
		logging.info("Serving files...")
		gevent.joinall([file_server_thread])
		logging.info("Done.")
Example #11
0
    def testDownloadFile(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection
        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        # Testing streamFile
        buff = peer_file_server.streamFile(site_temp.address, "content.json")
        assert "sign" in buff.getvalue()

        # Testing getFile
        buff = peer_file_server.getFile(site_temp.address, "content.json")
        assert "sign" in buff.getvalue()

        connection.close()
        client.stop()
Example #12
0
    def testFindOptional(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init full source server (has optional files)
        site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
        file_server_full = FileServer("127.0.0.1", 1546)
        site_full.connection_server = file_server_full
        gevent.spawn(lambda: ConnectionServer.start(file_server_full))
        time.sleep(0)  # Port opening
        file_server_full.sites[site_full.address] = site_full  # Add site
        site_full.storage.verifyFiles(quick_check=True)  # Check optional files
        site_full_peer = site.addPeer("127.0.0.1", 1546)  # Add it to source server
        assert site_full_peer.updateHashfield()  # Update hashfield

        # Init client server
        site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
        site_temp.announce = mock.MagicMock(return_value=True)  # Don't try to find peers from the net
        site_temp.addPeer("127.0.0.1", 1544)  # Add source server

        # Download normal files
        site_temp.download(blind_includes=True).join(timeout=5)

        # Download optional data/optional.txt
        optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
        assert not site_temp.storage.isFile("data/optional.txt")
        assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"])  # Source server don't know he has the file
        assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"])  # Source full peer on source server has the file
        assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"])  # Source full server he has the file

        with Spy.Spy(FileRequest, "route") as requests:
            site_temp.needFile("data/optional.txt")
            print requests

        assert site_temp.storage.deleteFiles()
        file_server_full.stop()
        [connection.close() for connection in file_server.connections]
Example #13
0
    def testHashfieldExchange(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection
        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)
        site.storage.verifyFiles(quick_check=True)  # Find what optional files we have

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        # Check if hashfield has any files
        assert len(site.content_manager.hashfield) > 0

        # Testing hashfield sync
        assert len(peer_file_server.hashfield) == 0
        assert peer_file_server.getHashfield()
        assert len(peer_file_server.hashfield) > 0

        connection.close()
        client.stop()
Example #14
0
    def testPex(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        connection = client.getConnection("127.0.0.1", 1544)

        # Add new fake peer to site
        fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True)
        # Add fake connection to it
        fake_peer.connection = Connection(file_server, "1.2.3.4", 11337)
        fake_peer.connection.last_recv_time = time.time()
        assert fake_peer in site.getConnectablePeers()

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer("127.0.0.1", 1544)

        assert "1.2.3.4:11337" not in site_temp.peers
        assert peer_file_server.pex()
        assert "1.2.3.4:11337" in site_temp.peers

        connection.close()
        client.stop()
Example #15
0
    def siteNeedFile(self, address, inner_path):
        from Site import Site
        from Site import SiteManager
        SiteManager.site_manager.load()

        address = getPublicKey(address)

        def checker():
            while 1:
                s = time.time()
                time.sleep(1)
                print "Switch time:", time.time() - s

        gevent.spawn(checker)

        logging.info("Opening a simple connection server")
        global file_server
        from File import FileServer
        file_server = FileServer("127.0.0.1", 1234)
        file_server_thread = gevent.spawn(file_server.start, check_sites=False)

        site = Site(address)
        site.announce()
        print site.needFile(inner_path, update=True)
Example #16
0
    def testFindHash(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert peer_file_server.findHashIds([1234]) == {}

        # Add fake peer with requred hash
        fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
        fake_peer_1.hashfield.append(1234)
        fake_peer_2 = site.addPeer("1.2.3.5", 1545)
        fake_peer_2.hashfield.append(1234)
        fake_peer_2.hashfield.append(1235)
        fake_peer_3 = site.addPeer("1.2.3.6", 1546)
        fake_peer_3.hashfield.append(1235)
        fake_peer_3.hashfield.append(1236)

        res = peer_file_server.findHashIds([1234, 1235])
        assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544),
                                            ("1.2.3.5", 1545)])
        assert sorted(res[1235]) == sorted([("1.2.3.5", 1545),
                                            ("1.2.3.6", 1546)])

        # Test my address adding
        site.content_manager.hashfield.append(1234)

        res = peer_file_server.findHashIds([1234, 1235])
        assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544),
                                            ("1.2.3.5", 1545),
                                            (file_server.ip, 1544)])
        assert sorted(res[1235]) == sorted([("1.2.3.5", 1545),
                                            ("1.2.3.6", 1546)])
Example #17
0
    def testWorkerManagerPiecefieldDownload(self, file_server, site,
                                            site_temp):
        inner_path = self.createBigfile(site)

        server1 = file_server
        server1.sites[site.address] = site
        server2 = FileServer("127.0.0.1", 1545)
        server2.sites[site_temp.address] = site_temp
        site_temp.connection_server = server2
        sha512 = site.content_manager.getFileInfo(inner_path)["sha512"]

        # Create 10 fake peer for each piece
        for i in range(10):
            peer = Peer("127.0.0.1", 1544, site_temp, server2)
            peer.piecefields[sha512][i] = "1"
            peer.updateHashfield = mock.MagicMock(return_value=False)
            peer.updatePiecefields = mock.MagicMock(return_value=False)
            peer.findHashIds = mock.MagicMock(return_value={"nope": []})
            peer.hashfield = site.content_manager.hashfield
            peer.has_hashfield = True
            peer.key = "Peer:%s" % i
            site_temp.peers["Peer:%s" % i] = peer

        site_temp.downloadContent("content.json", download_files=False)
        site_temp.needFile("data/optional.any.iso.piecemap.msgpack")

        with Spy.Spy(Peer, "getFile") as requests:
            for i in range(10):
                site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024,
                                                 (i + 1) * 1024 * 1024))

        assert len(requests) == 10
        for i in range(10):
            assert requests[i][0] == site_temp.peers[
                "Peer:%s" %
                i]  # Every part should be requested from piece owner peer
Example #18
0
    def testPex(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        connection = client.getConnection(file_server.ip, 1544)

        # Add new fake peer to site
        fake_peer = site.addPeer(file_server.ip_external,
                                 11337,
                                 return_peer=True)
        # Add fake connection to it
        fake_peer.connection = Connection(file_server, file_server.ip_external,
                                          11337)
        fake_peer.connection.last_recv_time = time.time()
        assert fake_peer in site.getConnectablePeers()

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        assert "%s:11337" % file_server.ip_external not in site_temp.peers
        assert peer_file_server.pex()
        assert "%s:11337" % file_server.ip_external in site_temp.peers

        # Should not exchange private peers from local network
        fake_peer_private = site.addPeer("192.168.0.1",
                                         11337,
                                         return_peer=True)
        assert fake_peer_private not in site.getConnectablePeers(
            allow_private=False)
        fake_peer_private.connection = Connection(file_server, "192.168.0.1",
                                                  11337)
        fake_peer_private.connection.last_recv_time = time.time()

        assert "192.168.0.1:11337" not in site_temp.peers
        assert not peer_file_server.pex()
        assert "192.168.0.1:11337" not in site_temp.peers

        connection.close()
        client.stop()
Example #19
0
    def testDownloadFile(self, file_server, site, site_temp):
        file_server.sites[site.address] = site
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client
        connection = client.getConnection(file_server.ip, 1544)

        # Add file_server as peer to client
        peer_file_server = site_temp.addPeer(file_server.ip, 1544)

        # Testing streamFile
        buff = peer_file_server.getFile(site_temp.address,
                                        "content.json",
                                        streaming=True)
        assert b"sign" in buff.getvalue()

        # Testing getFile
        buff = peer_file_server.getFile(site_temp.address, "content.json")
        assert b"sign" in buff.getvalue()

        connection.close()
        client.stop()
    def testArchivedBeforeDownload(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client

        # Download normally
        site_temp.addPeer(file_server.ip, 1544)
        site_temp.download(blind_includes=True).join(timeout=5)
        bad_files = site_temp.storage.verifyFiles(
            quick_check=True)["bad_files"]

        assert not bad_files
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
        assert site_temp.storage.isFile(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2

        # Add archived data
        assert not "archived_before" in site.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        assert not site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            time.time() - 1)

        content_modification_time = site.content_manager.contents[
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"][
                "modified"]
        site.content_manager.contents["data/users/content.json"][
            "user_contents"]["archived_before"] = content_modification_time
        site.content_manager.sign(
            "data/users/content.json",
            privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")

        date_archived = site.content_manager.contents[
            "data/users/content.json"]["user_contents"]["archived_before"]
        assert site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived - 1)
        assert site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived)
        assert not site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived + 1)  # Allow user to update archived data later

        # Push archived update
        assert not "archived_before" in site_temp.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        site.publish()
        time.sleep(0.1)
        site_temp.download(blind_includes=True).join(
            timeout=5)  # Wait for download

        # The archived content should disappear from remote client
        assert "archived_before" in site_temp.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
        assert not site_temp.storage.isDir(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
        assert len(
            list(
                site_temp.storage.query(
                    "SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"
                ))) == 0

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Example #21
0
    def sitePublish(self,
                    address,
                    peer_ip=None,
                    peer_port=15441,
                    inner_path="content.json",
                    diffs={}):
        global file_server
        from Site import Site
        from Site import SiteManager
        from File import FileServer  # We need fileserver to handle incoming file requests
        from Peer import Peer
        SiteManager.site_manager.load()

        logging.info("Loading site...")
        site = Site(address, allow_create=False)
        site.settings[
            "serving"] = True  # Serving the site even if its disabled

        logging.info("Creating FileServer....")
        file_server = FileServer()
        site.connection_server = file_server
        file_server_thread = gevent.spawn(
            file_server.start,
            check_sites=False)  # Dont check every site integrity
        time.sleep(0.001)

        if not file_server_thread.ready():
            # Started fileserver
            file_server.openport()
            if peer_ip:  # Announce ip specificed
                site.addPeer(peer_ip, peer_port)
            else:  # Just ask the tracker
                logging.info("Gathering peers from tracker")
                site.announce()  # Gather peers
            published = site.publish(5, inner_path,
                                     diffs=diffs)  # Push to peers
            if published > 0:
                time.sleep(3)
                logging.info("Serving files (max 60s)...")
                gevent.joinall([file_server_thread], timeout=60)
                logging.info("Done.")
            else:
                logging.info(
                    "No peers found, sitePublish command only works if you already have visitors serving your site"
                )
        else:
            # Already running, notify local client on new content
            logging.info("Sending siteReload")
            my_peer = Peer("127.0.0.1", config.fileserver_port)
            logging.info(
                my_peer.request("siteReload", {
                    "site": site.address,
                    "inner_path": inner_path
                }))
            logging.info("Sending sitePublish")
            logging.info(
                my_peer.request("sitePublish", {
                    "site": site.address,
                    "inner_path": inner_path,
                    "diffs": diffs
                }))
            logging.info("Done.")
Example #22
0
    def testFileRename(self, file_server, site, site_temp):
        inner_path = self.createBigfile(site)

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        site_temp.connection_server = FileServer(file_server.ip, 1545)
        site_temp.connection_server.sites[site_temp.address] = site_temp
        site_temp.addPeer(file_server.ip, 1544)

        # Download site
        site_temp.download(blind_includes=True).join(timeout=5)

        with Spy.Spy(FileRequest, "route") as requests:
            site_temp.needFile("%s|%s-%s" %
                               (inner_path, 0, 1 * self.piece_size))

        assert len([req for req in requests if req[1] == "streamFile"
                    ]) == 2  # 1 piece + piecemap

        # Rename the file
        inner_path_new = inner_path.replace(".iso", "-new.iso")
        site.storage.rename(inner_path, inner_path_new)
        site.storage.delete("data/optional.any.iso.piecemap.msgpack")
        assert site.content_manager.sign("content.json",
                                         self.privatekey,
                                         remove_missing_optional=True)

        files_optional = site.content_manager.contents["content.json"][
            "files_optional"].keys()

        assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional
        assert "data/optional.any.iso.piecemap.msgpack" not in files_optional
        assert "data/optional.any.iso" not in files_optional

        with Spy.Spy(FileRequest, "route") as requests:
            site.publish()
            time.sleep(0.1)
            site_temp.download(blind_includes=True).join(
                timeout=5)  # Wait for download

            assert len([req[1] for req in requests
                        if req[1] == "streamFile"]) == 0

            with site_temp.storage.openBigfile(inner_path_new,
                                               prebuffer=0) as f:
                f.read(1024)

                # First piece already downloaded
                assert [req for req in requests
                        if req[1] == "streamFile"] == []

                # Second piece needs to be downloaded + changed piecemap
                f.seek(self.piece_size)
                f.read(1024)
                assert [
                    req[3]["inner_path"] for req in requests
                    if req[1] == "streamFile"
                ] == [inner_path_new + ".piecemap.msgpack", inner_path_new]
Example #23
0
    def testArchivedDownload(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client

        # Don't try to find peers from the net
        site.announce = mock.MagicMock(return_value=True)
        site_temp.announce = mock.MagicMock(return_value=True)

        # Download normally
        site_temp.addPeer("127.0.0.1", 1544)
        site_temp.download(blind_includes=True).join(timeout=5)
        bad_files = site_temp.storage.verifyFiles(quick_check=True)
        assert not bad_files
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
        assert site_temp.storage.isFile(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2

        # Add archived data
        assert not "archived" in site.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        assert not site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            time.time() - 1)

        site.content_manager.contents["data/users/content.json"][
            "user_contents"]["archived"] = {
                "1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()
            }
        site.content_manager.sign(
            "data/users/content.json",
            privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")

        date_archived = site.content_manager.contents[
            "data/users/content.json"]["user_contents"]["archived"][
                "1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
        assert site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived - 1)
        assert site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived)
        assert not site.content_manager.isArchived(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json",
            date_archived + 1)  # Allow user to update archived data later

        # Push archived update
        assert not "archived" in site_temp.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        site.publish()
        site_temp.download(blind_includes=True).join(
            timeout=5)  # Wait for download

        # The archived content should disappear from remote client
        assert "archived" in site_temp.content_manager.contents[
            "data/users/content.json"]["user_contents"]
        assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
        assert not site_temp.storage.isDir(
            "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
        assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
        assert len(
            list(
                site_temp.storage.query(
                    "SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"
                ))) == 0

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Example #24
0
    def testUpdate(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client

        # Don't try to find peers from the net
        site.announce = mock.MagicMock(return_value=True)
        site_temp.announce = mock.MagicMock(return_value=True)

        # Connect peers
        site_temp.addPeer("127.0.0.1", 1544)

        # Download site from site to site_temp
        site_temp.download(blind_includes=True).join(timeout=5)

        # Update file
        data_original = site.storage.open("data/data.json").read()
        data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json", "wb").write(data_new)

        assert site.storage.open("data/data.json").read() == data_new
        assert site_temp.storage.open("data/data.json").read() == data_original

        # Publish without patch
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign(
                "content.json",
                privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
            )
            site.publish()
            site_temp.download(blind_includes=True).join(timeout=5)
            assert len([
                request for request in requests
                if request[0] in ("getFile", "streamFile")
            ]) == 1

        assert site_temp.storage.open("data/data.json").read() == data_new

        # Close connection to avoid update spam limit
        site.peers.values()[0].remove()
        site.addPeer("127.0.0.1", 1545)
        site_temp.peers.values()[0].ping()  # Connect back
        time.sleep(0.1)

        # Update with patch
        data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json-new", "wb").write(data_new)

        assert site.storage.open("data/data.json-new").read() == data_new
        assert site_temp.storage.open("data/data.json").read() != data_new

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")
        assert not site.storage.isFile(
            "data/data.json-new")  # New data file removed
        assert site.storage.open(
            "data/data.json").read() == data_new  # -new postfix removed
        assert "data/data.json" in diffs
        assert diffs["data/data.json"] == [
            ('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']),
            ('=', 31102)
        ]

        # Publish with patch
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign(
                "content.json",
                privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
            )
            site.publish(diffs=diffs)
            site_temp.download(blind_includes=True).join(timeout=5)
            assert len([
                request for request in requests
                if request[0] in ("getFile", "streamFile")
            ]) == 0

        assert site_temp.storage.open("data/data.json").read() == data_new

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]
Example #25
0
    def testFindOptional(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init full source server (has optional files)
        site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
        file_server_full = FileServer("127.0.0.1", 1546)
        site_full.connection_server = file_server_full
        gevent.spawn(lambda: ConnectionServer.start(file_server_full))
        time.sleep(0.001)  # Port opening
        file_server_full.sites[site_full.address] = site_full  # Add site
        site_full.storage.verifyFiles(quick_check=True)  # Check optional files
        site_full_peer = site.addPeer("127.0.0.1",
                                      1546)  # Add it to source server
        assert site_full_peer.updateHashfield()  # Update hashfield

        # Init client server
        site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
        site_temp.announce = mock.MagicMock(
            return_value=True)  # Don't try to find peers from the net
        site_temp.addPeer("127.0.0.1", 1544)  # Add source server

        # Download normal files
        site_temp.download(blind_includes=True).join(timeout=5)

        # Download optional data/optional.txt
        optional_file_info = site_temp.content_manager.getFileInfo(
            "data/optional.txt")
        assert not site_temp.storage.isFile("data/optional.txt")
        assert not site.content_manager.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source server don't know he has the file
        assert site_full_peer.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source full peer on source server has the file
        assert site_full.content_manager.hashfield.hasHash(
            optional_file_info["sha512"])  # Source full server he has the file

        with Spy.Spy(FileRequest, "route") as requests:
            # Request 2 file same time
            threads = []
            threads.append(
                site_temp.needFile("data/optional.txt", blocking=False))
            threads.append(
                site_temp.needFile(
                    "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif",
                    blocking=False))
            gevent.joinall(threads)

            assert len([
                request for request in requests if request[0] == "findHashIds"
            ]) == 1  # findHashids should call only once

        assert site_temp.storage.isFile("data/optional.txt")
        assert site_temp.storage.isFile(
            "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
        )

        assert site_temp.storage.deleteFiles()
        file_server_full.stop()
        [connection.close() for connection in file_server.connections]
Example #26
0
    def testBigUpdate(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites = {site_temp.address: site_temp}
        site_temp.connection_server = client

        # Connect peers
        site_temp.addPeer(file_server.ip, 1544)

        # Download site from site to site_temp
        assert site_temp.download(blind_includes=True,
                                  retry_bad_files=False).get(timeout=10)
        assert list(site_temp.bad_files.keys()) == [
            "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"
        ]

        # Update file
        data_original = site.storage.open("data/data.json").read()
        data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json-new", "wb").write(data_new)

        assert site.storage.open("data/data.json-new").read() == data_new
        assert site_temp.storage.open("data/data.json").read() != data_new

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")
        assert not site.storage.isFile(
            "data/data.json-new")  # New data file removed
        assert site.storage.open(
            "data/data.json").read() == data_new  # -new postfix removed
        assert "data/data.json" in diffs

        content_json = site.storage.loadJson("content.json")
        content_json["description"] = "BigZeroBlog" * 1024 * 10
        site.storage.writeJson("content.json", content_json)
        site.content_manager.loadContent("content.json", force=True)

        # Publish with patch
        site.log.info("Publish new data.json with patch")
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign(
                "content.json",
                privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
            )
            assert site.storage.getSize(
                "content.json") > 10 * 1024  # Make it a big content.json
            site.publish(diffs=diffs)
            time.sleep(0.1)
            assert site_temp.download(blind_includes=True,
                                      retry_bad_files=False).get(timeout=10)
            file_requests = [
                request for request in requests
                if request[1] in ("getFile", "streamFile")
            ]
            assert len(file_requests) == 1

        assert site_temp.storage.open("data/data.json").read() == data_new
        assert site_temp.storage.open(
            "content.json").read() == site.storage.open("content.json").read()
    def testFindOptional(self, file_server, site, site_temp):
        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init full source server (has optional files)
        site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
        file_server_full = FileServer(file_server.ip, 1546)
        site_full.connection_server = file_server_full

        def listen():
            ConnectionServer.start(file_server_full)
            ConnectionServer.listen(file_server_full)

        gevent.spawn(listen)
        time.sleep(0.001)  # Port opening
        file_server_full.sites[site_full.address] = site_full  # Add site
        site_full.storage.verifyFiles(quick_check=True)  # Check optional files
        site_full_peer = site.addPeer(file_server.ip,
                                      1546)  # Add it to source server
        hashfield = site_full_peer.updateHashfield()  # Update hashfield
        assert len(site_full.content_manager.hashfield) == 8
        assert hashfield
        assert site_full.storage.isFile("data/optional.txt")
        assert site_full.storage.isFile(
            "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
        )
        assert len(site_full_peer.hashfield) == 8

        # Remove hashes from source server
        for hash in list(site.content_manager.hashfield):
            site.content_manager.hashfield.remove(hash)

        # Init client server
        site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
        site_temp.addPeer(file_server.ip, 1544)  # Add source server

        # Download normal files
        site_temp.log.info("Start Downloading site")
        site_temp.download(blind_includes=True).join(timeout=5)

        # Download optional data/optional.txt
        optional_file_info = site_temp.content_manager.getFileInfo(
            "data/optional.txt")
        optional_file_info2 = site_temp.content_manager.getFileInfo(
            "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
        )
        assert not site_temp.storage.isFile("data/optional.txt")
        assert not site_temp.storage.isFile(
            "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
        )
        assert not site.content_manager.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source server don't know he has the file
        assert not site.content_manager.hashfield.hasHash(
            optional_file_info2["sha512"]
        )  # Source server don't know he has the file
        assert site_full_peer.hashfield.hasHash(
            optional_file_info["sha512"]
        )  # Source full peer on source server has the file
        assert site_full_peer.hashfield.hasHash(
            optional_file_info2["sha512"]
        )  # Source full peer on source server has the file
        assert site_full.content_manager.hashfield.hasHash(
            optional_file_info["sha512"])  # Source full server he has the file
        assert site_full.content_manager.hashfield.hasHash(
            optional_file_info2["sha512"]
        )  # Source full server he has the file

        site_temp.log.info("Request optional files")
        with Spy.Spy(FileRequest, "route") as requests:
            # Request 2 file same time
            threads = []
            threads.append(
                site_temp.needFile("data/optional.txt", blocking=False))
            threads.append(
                site_temp.needFile(
                    "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif",
                    blocking=False))
            gevent.joinall(threads)

            assert len([
                request for request in requests if request[1] == "findHashIds"
            ]) == 1  # findHashids should call only once

        assert site_temp.storage.isFile("data/optional.txt")
        assert site_temp.storage.isFile(
            "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
        )

        assert site_temp.storage.deleteFiles()
        file_server_full.stop()
        [connection.close() for connection in file_server.connections]
        site_full.content_manager.contents.db.close()
Example #28
0
    def testRenameOptional(self, file_server, site, site_temp):
        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer(file_server.ip, 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client
        site_temp.announce = mock.MagicMock(
            return_value=True)  # Don't try to find peers from the net

        site_temp.addPeer(file_server.ip, 1544)

        site_temp.download(blind_includes=True).join(timeout=5)

        assert site_temp.settings["optional_downloaded"] == 0

        site_temp.needFile("data/optional.txt")

        assert site_temp.settings["optional_downloaded"] > 0
        settings_before = site_temp.settings
        hashfield_before = site_temp.content_manager.hashfield.tobytes()

        # Rename optional file
        os.rename(site.storage.getPath("data/optional.txt"),
                  site.storage.getPath("data/optional-new.txt"))

        site.content_manager.sign(
            "content.json",
            privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv",
            remove_missing_optional=True)

        content = site.storage.loadJson("content.json")
        assert "data/optional-new.txt" in content["files_optional"]
        assert "data/optional.txt" not in content["files_optional"]
        assert not site_temp.storage.isFile("data/optional-new.txt")
        assert site_temp.storage.isFile("data/optional.txt")

        with Spy.Spy(FileRequest, "route") as requests:
            site.publish()
            time.sleep(0.1)
            site_temp.download(blind_includes=True).join(
                timeout=5)  # Wait for download
            assert "streamFile" not in [req[1] for req in requests]

        content = site_temp.storage.loadJson("content.json")
        assert "data/optional-new.txt" in content["files_optional"]
        assert "data/optional.txt" not in content["files_optional"]
        assert site_temp.storage.isFile("data/optional-new.txt")
        assert not site_temp.storage.isFile("data/optional.txt")

        assert site_temp.settings["size"] == settings_before["size"]
        assert site_temp.settings["size_optional"] == settings_before[
            "size_optional"]
        assert site_temp.settings["optional_downloaded"] == settings_before[
            "optional_downloaded"]
        assert site_temp.content_manager.hashfield.tobytes(
        ) == hashfield_before

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]