def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 # Add file_server as peer to client server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") # Add fake peers with optional files downloaded for i in range(5): fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544) fake_peer.hashfield = site.content_manager.hashfield fake_peer.has_hashfield = True with Spy.Spy(WorkerManager, "addWorker") as requests: site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024)) # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields assert len([ request[1] for request in requests if request[1] != server2_peer1 ]) == 0
def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] # Create 10 fake peer for each piece for i in range(10): peer = Peer(file_server.ip, 1544, site_temp, server2) peer.piecefields[sha512][i] = "1" peer.updateHashfield = mock.MagicMock(return_value=False) peer.updatePiecefields = mock.MagicMock(return_value=False) peer.findHashIds = mock.MagicMock(return_value={"nope": []}) peer.hashfield = site.content_manager.hashfield peer.has_hashfield = True peer.key = "Peer:%s" % i site_temp.peers["Peer:%s" % i] = peer site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") with Spy.Spy(Peer, "getFile") as requests: for i in range(10): site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) assert len(requests) == 10 for i in range(10): assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
def testFileGet(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server site_temp.connection_server = FileServer(file_server.ip, 1545) site_temp.connection_server.sites[site_temp.address] = site_temp site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Download second block with site_temp.storage.openBigfile(inner_path) as f: f.seek(1024 * 1024) assert f.read(1024)[0:1] != b"\0" # Make sure first block not download with site_temp.storage.open(inner_path) as f: assert f.read(1024)[0:1] == b"\0" peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) # Should drop error on first block request assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1)) # Should not drop error for second block request assert peer2.getFile( site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2))
def testPex(self, file_server, site, site_temp): # Register site to currently running fileserver site.connection_server = file_server file_server.sites[site.address] = site # Create a new file server to emulate new peer connecting to our peer file_server_temp = FileServer("127.0.0.1", 1545) site_temp.connection_server = file_server_temp file_server_temp.sites[site_temp.address] = site_temp # We will request peers from this peer_source = site_temp.addPeer("127.0.0.1", 1544) # Get ip4 peers from source site assert peer_source.pex( need_num=10 ) == 1 # Need >5 to return also return non-connected peers assert len(site_temp.peers) == 2 # Me, and the other peer site.addPeer("1.2.3.4", 1555) # Add peer to source site assert peer_source.pex(need_num=10) == 1 assert len(site_temp.peers) == 3 assert "1.2.3.4:1555" in site_temp.peers # Get onion peers from source site site.addPeer("bka4ht2bzxchy44r.onion", 1555) assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers assert peer_source.pex( need_num=10 ) == 1 # Need >5 to return also return non-connected peers assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
def file_server6(request): time.sleep(0.1) file_server6 = FileServer("::1", 1544) file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip def listen(): ConnectionServer.start(file_server6) ConnectionServer.listen(file_server6) gevent.spawn(listen) # Wait for port opening for retry in range(10): time.sleep(0.1) # Port opening try: conn = file_server6.getConnection("::1", 1544) conn.close() break except Exception as err: print("FileServer6 startup error", Debug.formatException(err)) assert file_server6.running file_server6.ip_incoming = {} # Reset flood protection def stop(): file_server6.stop() request.addfinalizer(stop) return file_server6
def testFindHash(self, tor_manager, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site file_server.tor_manager = tor_manager client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Add file_server as peer to client peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert peer_file_server.findHashIds([1234]) == {} # Add fake peer with requred hash fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544) fake_peer_1.hashfield.append(1234) fake_peer_2 = site.addPeer("1.2.3.5", 1545) fake_peer_2.hashfield.append(1234) fake_peer_2.hashfield.append(1235) fake_peer_3 = site.addPeer("1.2.3.6", 1546) fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) res = peer_file_server.findHashIds([1234, 1235]) assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)] assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] # Test my address adding site.content_manager.hashfield.append(1234) res = peer_file_server.findHashIds([1234, 1235]) assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)] assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
def testFindHash(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site client = FileServer("127.0.0.1", 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.findHashIds([1234]) == {} # Add fake peer with requred hash fake_peer_1 = site.addPeer("1.2.3.4", 1544) fake_peer_1.hashfield.append(1234) fake_peer_2 = site.addPeer("1.2.3.5", 1545) fake_peer_2.hashfield.append(1234) fake_peer_2.hashfield.append(1235) fake_peer_3 = site.addPeer("1.2.3.6", 1546) fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) assert peer_file_server.findHashIds([1234, 1235]) == { 1234: [('1.2.3.4', 1544), ('1.2.3.5', 1545)], 1235: [('1.2.3.5', 1545), ('1.2.3.6', 1546)] } # Test my address adding site.content_manager.hashfield.append(1234) res = peer_file_server.findHashIds([1234, 1235]) assert res[1234] == [('1.2.3.4', 1544), ('1.2.3.5', 1545), ("127.0.0.1", 1544)] assert res[1235] == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
def siteDownload(self, address): from Site import Site from Site import SiteManager SiteManager.site_manager.load() logging.info("Opening a simple connection server") global file_server from File import FileServer file_server = FileServer("127.0.0.1", 1234) site = Site(address) on_completed = gevent.event.AsyncResult() def onComplete(evt): evt.set(True) site.onComplete.once(lambda: onComplete(on_completed)) print "Announcing..." site.announce() s = time.time() print "Downloading..." site.downloadContent("content.json", check_modifications=True) print on_completed.get() print "Downloaded in %.3fs" % (time.time() - s)
def file_server4(request): time.sleep(0.1) file_server = FileServer("127.0.0.1", 1544) file_server.ip_external = "1.2.3.4" # Fake external ip def listen(): ConnectionServer.start(file_server) ConnectionServer.listen(file_server) gevent.spawn(listen) # Wait for port opening for retry in range(10): time.sleep(0.1) # Port opening try: conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break except Exception as err: print("FileServer6 startup error", Debug.formatException(err)) assert file_server.running file_server.ip_incoming = {} # Reset flood protection def stop(): file_server.stop() request.addfinalizer(stop) return file_server
def testNullFileRead(self, file_server, site, site_temp, size): inner_path = "data/optional.iso" f = site.storage.open(inner_path, "w") f.write("\0" * size) f.close() assert site.content_manager.sign("content.json", self.privatekey) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server site_temp.connection_server = FileServer(file_server.ip, 1545) site_temp.connection_server.sites[site_temp.address] = site_temp site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) if "piecemap" in site.content_manager.getFileInfo( inner_path): # Bigfile site_temp.needFile(inner_path + "|all") else: site_temp.needFile(inner_path) assert site_temp.storage.getSize(inner_path) == size
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server from Site import Site from File import FileServer # We need fileserver to handle incoming file requests logging.info("Creating FileServer....") file_server = FileServer() file_server_thread = gevent.spawn( file_server.start, check_sites=False) # Dont check every site integrity file_server.openport() site = file_server.sites[address] site.settings[ "serving"] = True # Serving the site even if its disabled if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(20, inner_path) # Push to 20 peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info( "No peers found, sitePublish command only works if you already have visitors serving your site" )
def testPex(self, file_server, site, site_temp): # Register site to currently running fileserver site.connection_server = file_server file_server.sites[site.address] = site # Create a new file server to emulate new peer connecting to our peer file_server_temp = FileServer(file_server.ip, 1545) site_temp.connection_server = file_server_temp file_server_temp.sites[site_temp.address] = site_temp # We will request peers from this peer_source = site_temp.addPeer(file_server.ip, 1544) # Get ip4 peers from source site site.addPeer("1.2.3.4", 1555) # Add peer to source site assert peer_source.pex(need_num=10) == 1 assert len(site_temp.peers) == 2 assert "1.2.3.4:1555" in site_temp.peers # Get onion peers from source site site.addPeer("bka4ht2bzxchy44r.onion", 1555) assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers # Don't add onion peers if not supported assert "onion" not in file_server_temp.supported_ip_types assert peer_source.pex(need_num=10) == 0 file_server_temp.supported_ip_types.append("onion") assert peer_source.pex(need_num=10) == 1 assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) # Add new fake peer to site fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True) # Add fake connection to it fake_peer.connection = Connection(file_server, file_server.ip_external, 11337) fake_peer.connection.last_recv_time = time.time() assert fake_peer in site.getConnectablePeers() # Add file_server as peer to client peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert "%s:11337" % file_server.ip_external not in site_temp.peers assert peer_file_server.pex() assert "%s:11337" % file_server.ip_external in site_temp.peers # Should not exchange private peers from local network fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True) assert fake_peer_private not in site.getConnectablePeers(allow_private=False) fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337) fake_peer_private.connection.last_recv_time = time.time() assert "192.168.0.1:11337" not in site_temp.peers assert not peer_file_server.pex() assert "192.168.0.1:11337" not in site_temp.peers connection.close() client.stop()
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}): global file_server from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer logging.info("Loading site...") site = SiteManager.site_manager.list()[address] site.settings[ "serving"] = True # Serving the site even if its disabled logging.info("Creating FileServer....") file_server = FileServer() site.connection_server = file_server file_server_thread = gevent.spawn( file_server.start, check_sites=False) # Dont check every site integrity time.sleep(0.001) if not file_server_thread.ready(): # Started fileserver file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(5, inner_path, diffs=diffs) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info( "No peers found, sitePublish command only works if you already have visitors serving your site" ) else: # Already running, notify local client on new content logging.info("Sending siteReload") my_peer = Peer("127.0.0.1", config.fileserver_port) logging.info( my_peer.request("siteReload", { "site": site.address, "inner_path": inner_path })) logging.info("Sending sitePublish") logging.info( my_peer.request("sitePublish", { "site": site.address, "inner_path": inner_path, "diffs": diffs })) logging.info("Done.")
def sitePublish(address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server from Site import Site from File import FileServer # We need fileserver to handle incoming file requests logging.info("Creating FileServer....") file_server = FileServer() file_server_thread = gevent.spawn( file_server.start, check_sites=False) # Dont check every site integrity file_server.openport() if file_server.port_opened == False: logging.info( "Port not opened, passive publishing not supported yet :(") return site = file_server.sites[address] site.settings["serving"] = True # Serving the site even if its disabled if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers site.publish(20, inner_path) # Push to 20 peers time.sleep(1) logging.info("Serving files...") gevent.joinall([file_server_thread]) logging.info("Done.")
def testBigUpdate(self, file_server, site, site_temp): # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Connect peers site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp site_temp.download(blind_includes=True).join(timeout=5) # Update file data_original = site.storage.open("data/data.json").read() data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json-new", "wb").write(data_new) assert site.storage.open("data/data.json-new").read() == data_new assert site_temp.storage.open("data/data.json").read() != data_new # Generate diff diffs = site.content_manager.getDiffs("content.json") assert not site.storage.isFile( "data/data.json-new") # New data file removed assert site.storage.open( "data/data.json").read() == data_new # -new postfix removed assert "data/data.json" in diffs content_json = site.storage.loadJson("content.json") content_json["title"] = "BigZeroBlog" * 1024 * 10 site.storage.writeJson("content.json", content_json) site.content_manager.loadContent("content.json", force=True) # Publish with patch site.log.info("Publish new data.json with patch") with Spy.Spy(FileRequest, "route") as requests: site.content_manager.sign( "content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" ) assert site.storage.getSize( "content.json") > 10 * 1024 # Make it a big content.json site.publish(diffs=diffs) site_temp.download(blind_includes=True).join(timeout=5) file_requests = [ request for request in requests if request[1] in ("getFile", "streamFile") ] assert len(file_requests) == 1 assert site_temp.storage.open("data/data.json").read() == data_new assert site_temp.storage.open( "content.json").read() == site.storage.open("content.json").read()
def testRenameOptional(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) assert site_temp.settings["optional_downloaded"] == 0 site_temp.needFile("data/optional.txt") assert site_temp.settings["optional_downloaded"] > 0 settings_before = site_temp.settings hashfield_before = site_temp.content_manager.hashfield.tobytes() # Rename optional file os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt")) site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True) content = site.storage.loadJson("content.json") assert "data/optional-new.txt" in content["files_optional"] assert "data/optional.txt" not in content["files_optional"] assert not site_temp.storage.isFile("data/optional-new.txt") assert site_temp.storage.isFile("data/optional.txt") with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download assert "streamFile" not in [req[1] for req in requests] content = site_temp.storage.loadJson("content.json") assert "data/optional-new.txt" in content["files_optional"] assert "data/optional.txt" not in content["files_optional"] assert site_temp.storage.isFile("data/optional-new.txt") assert not site_temp.storage.isFile("data/optional.txt") assert site_temp.settings["size"] == settings_before["size"] assert site_temp.settings["size_optional"] == settings_before["size_optional"] assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"] assert site_temp.content_manager.hashfield.tobytes() == hashfield_before assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server from Site import Site from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer site = SiteManager.site_manager.get(address) logging.info("Loading site...") site.settings[ "serving"] = True # Serving the site even if its disabled try: ws = self.getWebsocket(site) logging.info("Sending siteReload") self.siteCmd(address, "siteReload", inner_path) logging.info("Sending sitePublish") self.siteCmd(address, "sitePublish", { "inner_path": inner_path, "sign": False }) logging.info("Done.") except Exception as err: logging.info("Can't connect to local websocket client: %s" % err) logging.info("Creating FileServer....") file_server = FileServer() file_server.start() site.connection_server = file_server file_server_thread = gevent.spawn( file_server.start, check_sites=False) # Dont check every site integrity time.sleep(0.001) # Started fileserver file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(5, inner_path) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info( "No peers found, sitePublish command only works if you already have visitors serving your site" )
def testHashfieldExchange(self, file_server, site, site_temp): server1 = file_server server1.sites[site.address] = site site.connection_server = server1 server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 site.storage.verifyFiles( quick_check=True) # Find what optional files we have # Add file_server as peer to client server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Check if hashfield has any files assert len(site.content_manager.hashfield) > 0 # Testing hashfield sync assert len(server2_peer1.hashfield) == 0 assert server2_peer1.updateHashfield() # Query hashfield from peer assert len(server2_peer1.hashfield) > 0 # Test force push new hashfield site_temp.content_manager.hashfield.appendHash("AABB") server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) with Spy.Spy(FileRequest, "route") as requests: assert len(server1_peer2.hashfield) == 0 server2_peer1.sendMyHashfield() assert len(server1_peer2.hashfield) == 1 server2_peer1.sendMyHashfield( ) # Hashfield not changed, should be ignored assert len(requests) == 1 time.sleep(0.01) # To make hashfield change date different site_temp.content_manager.hashfield.appendHash("AACC") server2_peer1.sendMyHashfield() # Push hashfield assert len(server1_peer2.hashfield) == 2 assert len(requests) == 2 site_temp.content_manager.hashfield.appendHash("AADD") assert server1_peer2.updateHashfield( force=True) # Request hashfield assert len(server1_peer2.hashfield) == 3 assert len(requests) == 3 assert not server2_peer1.sendMyHashfield( ) # Not changed, should be ignored assert len(requests) == 3 server2.stop()
def testHugeContentSiteUpdate(self, file_server, site, site_temp): # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Connect peers site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) site_temp.settings["size_limit"] = int(20 * 1024 * 1024) site_temp.saveSettings() # Raise limit size to 20MB on site so it can be signed site.settings["size_limit"] = int(20 * 1024 * 1024) site.saveSettings() content_json = site.storage.loadJson("content.json") content_json["description"] = "PartirUnJour" * 1024 * 1024 site.storage.writeJson("content.json", content_json) changed, deleted = site.content_manager.loadContent("content.json", force=True) # Make sure we have 2 differents content.json assert site_temp.storage.open( "content.json").read() != site.storage.open("content.json").read() # Generate diff diffs = site.content_manager.getDiffs("content.json") # Publish with patch site.log.info("Publish new content.json bigger than 10MB") with Spy.Spy(FileRequest, "route") as requests: site.content_manager.sign( "content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" ) assert site.storage.getSize( "content.json") > 10 * 1024 * 1024 # verify it over 10MB time.sleep(0.1) site.publish(diffs=diffs) assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) assert site_temp.storage.getSize( "content.json") < site_temp.getSizeLimit() * 1024 * 1024 assert site_temp.storage.open( "content.json").read() == site.storage.open("content.json").read()
def main(self): global ui_server, file_server from File import FileServer from Ui import UiServer file_server = FileServer() ui_server = UiServer() from Crypt import CryptConnection CryptConnection.manager.removeCerts() gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
def testRename(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) site_temp.download(blind_includes=True).join(timeout=5) # Rename non-optional file os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) site.content_manager.sign( "content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") content = site.storage.loadJson("content.json") assert "data/img/domain-new.png" in content["files"] assert "data/img/domain.png" not in content["files"] assert not site_temp.storage.isFile("data/img/domain-new.png") assert site_temp.storage.isFile("data/img/domain.png") settings_before = site_temp.settings with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) site_temp.download(blind_includes=True).join( timeout=5) # Wait for download assert "streamFile" not in [req[1] for req in requests] content = site_temp.storage.loadJson("content.json") assert "data/img/domain-new.png" in content["files"] assert "data/img/domain.png" not in content["files"] assert site_temp.storage.isFile("data/img/domain-new.png") assert not site_temp.storage.isFile("data/img/domain.png") assert site_temp.settings["size"] == settings_before["size"] assert site_temp.settings["size_optional"] == settings_before[ "size_optional"] assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def file_server(request): request.addfinalizer( CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) gevent.spawn(lambda: ConnectionServer.start(file_server)) time.sleep(0) # Port opening assert file_server.running def stop(): file_server.stop() request.addfinalizer(stop) return file_server
def file_server(request): request.addfinalizer(CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) gevent.spawn(lambda: ConnectionServer.start(file_server)) # Wait for port opening for retry in range(10): time.sleep(0.1) # Port opening try: conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break except Exception, err: print err
def main(): from File import FileServer from Ui import UiServer logging.info("Creating UiServer....") ui_server = UiServer() logging.info("Creating FileServer....") file_server = FileServer() logging.info("Starting servers....") gevent.joinall( [gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
def testUnicodeFilename(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer(file_server.ip, 1545) client.sites = {site_temp.address: site_temp} site_temp.connection_server = client site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) site.storage.write("data/img/árvíztűrő.png", b"test") site.content_manager.sign( "content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") content = site.storage.loadJson("content.json") assert "data/img/árvíztűrő.png" in content["files"] assert not site_temp.storage.isFile("data/img/árvíztűrő.png") settings_before = site_temp.settings with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) assert site_temp.download(blind_includes=True, retry_bad_files=False).get( timeout=10) # Wait for download assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 content = site_temp.storage.loadJson("content.json") assert "data/img/árvíztűrő.png" in content["files"] assert site_temp.storage.isFile("data/img/árvíztűrő.png") assert site_temp.settings["size"] == settings_before["size"] assert site_temp.settings["size_optional"] == settings_before[ "size_optional"] assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def testArchivedBeforeDownload(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = FileServer("127.0.0.1", 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Download normally site_temp.addPeer("127.0.0.1", 1544) site_temp.download(blind_includes=True).join(timeout=5) bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 # Add archived data assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"] assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"] site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later # Push archived update assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() time.sleep(0.1) site_temp.download(blind_includes=True).join(timeout=5) # Wait for download # The archived content should disappear from remote client assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def announcer_remote(site_temp): file_server_remote = FileServer("127.0.0.1", 1545) file_server_remote.sites[site_temp.address] = site_temp announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101) file_server_remote.local_announcer = announcer announcer.listen_port = 1101 announcer.sender_info["broadcast_port"] = 1101 announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically gevent.spawn(announcer.start) time.sleep(0.5) assert file_server_remote.local_announcer.running return file_server_remote.local_announcer
def main(self): global ui_server, file_server from File import FileServer from Ui import UiServer logging.info("Creating FileServer....") file_server = FileServer() logging.info("Creating UiServer....") ui_server = UiServer() logging.info("Removing old SSL certs...") from Crypt import CryptConnection CryptConnection.manager.removeCerts() logging.info("Starting servers....") gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
def testFindOptional(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full gevent.spawn(lambda: ConnectionServer.start(file_server_full)) time.sleep(0) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server assert site_full_peer.updateHashfield() # Update hashfield # Init client server site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo( "data/optional.txt") assert not site_temp.storage.isFile("data/optional.txt") assert not site.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash( optional_file_info["sha512"] ) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info["sha512"]) # Source full server he has the file with Spy.Spy(FileRequest, "route") as requests: site_temp.needFile("data/optional.txt") print requests assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections]