def testDiscoverRequest(self, announcer, announcer_remote): assert len(announcer_remote.known_peers) == 0 with Spy.Spy(announcer_remote, "handleMessage") as responses: announcer_remote.broadcast({ "cmd": "discoverRequest", "params": {} }, port=announcer.listen_port) time.sleep(0.1) response_cmds = [response[1]["cmd"] for response in responses] assert response_cmds == ["discoverResponse", "siteListResponse"] assert len(responses[-1][1]["params"]["sites"]) == 1 # It should only request siteList if sites_changed value is different from last response with Spy.Spy(announcer_remote, "handleMessage") as responses: announcer_remote.broadcast({ "cmd": "discoverRequest", "params": {} }, port=announcer.listen_port) time.sleep(0.1) response_cmds = [response[1]["cmd"] for response in responses] assert response_cmds == ["discoverResponse"]
def testDownloadAllPieces(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Open virtual file assert not site_temp.storage.isFile(inner_path) with Spy.Spy(FileRequest, "route") as requests: site_temp.needFile("%s|all" % inner_path) assert len( requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece # Don't re-download already got pieces with Spy.Spy(FileRequest, "route") as requests: site_temp.needFile("%s|all" % inner_path) assert len(requests) == 0
def testRecentPeerList(self, announcer, announcer_remote, site): assert len(site.peers_recent) == 0 assert len(site.peers) == 0 with Spy.Spy(announcer, "handleMessage") as responses: announcer.broadcast({ "cmd": "discoverRequest", "params": {} }, port=announcer_remote.listen_port) time.sleep(0.1) assert [response[1]["cmd"] for response in responses ] == ["discoverResponse", "siteListResponse"] assert len(site.peers_recent) == 1 assert len(site.peers) == 1 # It should update peer without siteListResponse last_time_found = list(site.peers.values())[0].time_found site.peers_recent.clear() with Spy.Spy(announcer, "handleMessage") as responses: announcer.broadcast({ "cmd": "discoverRequest", "params": {} }, port=announcer_remote.listen_port) time.sleep(0.1) assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] assert len(site.peers_recent) == 1 assert list(site.peers.values())[0].time_found > last_time_found
def testOpenBigfile(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) # Open virtual file assert not site_temp.storage.isFile(inner_path) with site_temp.storage.openBigfile(inner_path) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) assert f.read(7) == "Test524" f.seek(9 * 1024 * 1024) assert f.read(7) == "943---T" assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" assert f.sha512 in site_temp.getSettingsCache()["piecefields"] # Test requesting already downloaded with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) assert f.read(7) == "Test524" assert len(requests) == 0 # Test requesting multi-block overflow reads with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) # We already have this block data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block assert data.startswith("Test524") assert data.endswith("Test838-") assert "\0" not in data # No null bytes allowed assert len(requests) == 2 # Two block download # Test out of range request f.seek(5 * 1024 * 1024) data = f.read(1024 * 1024 * 30) assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024) f.seek(30 * 1024 * 1024) data = f.read(1024 * 1024 * 30) assert len(data) == 0
def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 # Add file_server as peer to client server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") # Add fake peers with optional files downloaded for i in range(5): fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544) fake_peer.hashfield = site.content_manager.hashfield fake_peer.has_hashfield = True with Spy.Spy(WorkerManager, "addWorker") as requests: site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024)) # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields assert len([ request[1] for request in requests if request[1] != server2_peer1 ]) == 0
def testPrebuffer(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) # Open virtual file assert not site_temp.storage.isFile(inner_path) with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) assert f.read(7) == "Test524" # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 time.sleep(0.5) # Wait prebuffer download sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] assert site_temp.storage.piecefields[sha512].tostring() == "0000011100" # No prebuffer beyond end of the file f.seek(9 * 1024 * 1024) assert "\0" not in f.read(7) assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] # Create 10 fake peer for each piece for i in range(10): peer = Peer(file_server.ip, 1544, site_temp, server2) peer.piecefields[sha512][i] = "1" peer.updateHashfield = mock.MagicMock(return_value=False) peer.updatePiecefields = mock.MagicMock(return_value=False) peer.findHashIds = mock.MagicMock(return_value={"nope": []}) peer.hashfield = site.content_manager.hashfield peer.has_hashfield = True peer.key = "Peer:%s" % i site_temp.peers["Peer:%s" % i] = peer site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") with Spy.Spy(Peer, "getFile") as requests: for i in range(10): site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) assert len(requests) == 10 for i in range(10): assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
def testFileRename(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server site_temp.connection_server = FileServer(file_server.ip, 1545) site_temp.connection_server.sites[site_temp.address] = site_temp site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) with Spy.Spy(FileRequest, "route") as requests: site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size)) assert len([req for req in requests if req[1] == "streamFile" ]) == 2 # 1 piece + piecemap # Rename the file inner_path_new = inner_path.replace(".iso", "-new.iso") site.storage.rename(inner_path, inner_path_new) site.storage.delete("data/optional.any.iso.piecemap.msgpack") assert site.content_manager.sign("content.json", self.privatekey, remove_missing_optional=True) files_optional = site.content_manager.contents["content.json"][ "files_optional"].keys() assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional assert "data/optional.any.iso.piecemap.msgpack" not in files_optional assert "data/optional.any.iso" not in files_optional with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) site_temp.download(blind_includes=True).join( timeout=5) # Wait for download assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0 with site_temp.storage.openBigfile(inner_path_new, prebuffer=0) as f: f.read(1024) # First piece already downloaded assert [req for req in requests if req[1] == "streamFile"] == [] # Second piece needs to be downloaded + changed piecemap f.seek(self.piece_size) f.read(1024) assert [ req[3]["inner_path"] for req in requests if req[1] == "streamFile" ] == [inner_path_new + ".piecemap.msgpack", inner_path_new]