def testStreamFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff) assert "stream_bytes" in response assert "sign" in buff.getvalue() # Invalid file buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) assert "File read error" in response["error"] # Location over size buff = StringIO.StringIO() response = connection.request( "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff ) assert "File read error" in response["error"] # Stream from parent dir buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) assert "File read error" in response["error"] connection.close() client.stop()
def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port): self.site_manager = SiteManager.site_manager self.log = logging.getLogger("FileServer") ip = ip.replace("*", "0.0.0.0") if config.tor == "always": port = config.tor_hs_port config.fileserver_port = port elif port == 0: # Use random port port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-")) port = self.getRandomPort(ip, port_range_from, port_range_to) config.fileserver_port = port if not port: raise Exception("Can't find bindable port") if not config.tor == "always": config.saveValue("fileserver_port", port) # Save random port value for next restart ConnectionServer.__init__(self, ip, port, self.handleRequest) if config.ip_external: # Ip external defined in arguments self.port_opened = True SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist else: self.port_opened = None # Is file server opened on router self.upnp_port_opened = False self.sites = {} self.last_request = time.time() self.files_parsing = {} self.ui_server = None
def __init__(self): ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest) if config.ip_external: # Ip external definied in arguments self.port_opened = True SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist else: self.port_opened = None # Is file server opened on router self.sites = SiteManager.site_manager.list()
def testPing(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) assert connection.ping() connection.close() client.stop()
def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port): ConnectionServer.__init__(self, ip, port, self.handleRequest) if config.ip_external: # Ip external defined in arguments self.port_opened = True SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist else: self.port_opened = None # Is file server opened on router self.sites = {} self.last_request = time.time() self.files_parsing = {}
def testFindOptional(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full gevent.spawn(lambda: ConnectionServer.start(file_server_full)) time.sleep(0) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server assert site_full_peer.updateHashfield() # Update hashfield # Init client server site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") assert not site_temp.storage.isFile("data/optional.txt") assert not site.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash( optional_file_info["sha512"] ) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source full server he has the file with Spy.Spy(FileRequest, "route") as requests: # Request 2 file same time threads = [] threads.append(site_temp.needFile("data/optional.txt", blocking=False)) threads.append( site_temp.needFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False ) ) gevent.joinall(threads) assert ( len([request for request in requests if request[0] == "findHashIds"]) == 1 ) # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections]
def start(self, check_sites=True): self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.start(self) self.log.debug("Stopped.")
def testSslConnection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Connect to myself with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert connection.handshake assert connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) assert len(file_server.connections) == 0
def testSslConnection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Connect to myself connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert len(file_server.ips) == 1 assert connection.handshake assert connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) assert len(file_server.connections) == 0 assert len(file_server.ips) == 0
def testFloodProtection(self, file_server): file_server.ip_incoming = {} # Reset flood protection whitelist = file_server.whitelist # Save for reset file_server.whitelist = [] # Disable 127.0.0.1 whitelist client = ConnectionServer("127.0.0.1", 1545) # Only allow 6 connection in 1 minute for reconnect in range(6): connection = client.getConnection("127.0.0.1", 1544) assert connection.handshake connection.close() # The 7. one will timeout with pytest.raises(gevent.Timeout): with gevent.Timeout(0.1): connection = client.getConnection("127.0.0.1", 1544) # Reset whitelist file_server.whitelist = whitelist
def start(self, check_sites=True): self.log = logging.getLogger("FileServer") if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.start(self) # thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer")) # thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer")) self.log.debug("Stopped.")
def file_server(request): CryptConnection.manager.loadCerts() # Load and create certs request.addfinalizer(CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) gevent.spawn(lambda: ConnectionServer.start(file_server)) time.sleep(0) # Port opening assert file_server.running def stop(): file_server.stop() request.addfinalizer(stop) return file_server
def testRawConnection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Remove all supported crypto crypt_supported_bk = CryptConnection.manager.crypt_supported CryptConnection.manager.crypt_supported = [] connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert not connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) assert len(file_server.connections) == 0 # Reset supported crypts CryptConnection.manager.crypt_supported = crypt_supported_bk
def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) assert "sign" in response["body"] response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) assert "sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) assert "File read error" in response["error"] # Location over size response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}) assert "File read error" in response["error"] # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) assert "File read error" in response["error"] # Invalid site response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0}) assert "Unknown site" in response["error"] response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0}) assert "Unknown site" in response["error"] # Invalid size response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) assert "File size does not match" in response["error"] connection.close() client.stop()
def file_server(request): request.addfinalizer(CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) gevent.spawn(lambda: ConnectionServer.start(file_server)) # Wait for port opening for retry in range(10): time.sleep(0.1) # Port opening try: conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break except Exception, err: print err
def testRawConnection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Remove all supported crypto crypt_supported_bk = CryptConnection.manager.crypt_supported CryptConnection.manager.crypt_supported = [] print "---" with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert not connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) assert len(file_server.connections) == 0 # Reset supported crypts CryptConnection.manager.crypt_supported = crypt_supported_bk
def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) assert "sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) assert "No such file or directory" in response["error"] # Location over size response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}) assert "File read error" in response["error"] # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) assert "File not allowed" in response["error"] connection.close() client.stop()
def testGetConnection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) # Get connection by ip/port connection2 = client.getConnection("127.0.0.1", 1544) assert connection == connection2 # Get connection by peerid assert not client.getConnection("127.0.0.1", 1544, peer_id="notexists", create=False) connection2 = client.getConnection("127.0.0.1", 1544, peer_id=connection.handshake["peer_id"], create=False) assert connection2 == connection connection.close() client.stop()
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() if benchmark: for i in range(10): peer.getFile(site, filename), print "Response time: %.3fs" % (time.time() - s) raw_input("Check memory") else: print peer.getFile(site, filename).read()
def siteNeedFile(self, address, inner_path): from Site import Site from Site import SiteManager SiteManager.site_manager.load() def checker(): while 1: s = time.time() time.sleep(1) print "Switch time:", time.time() - s gevent.spawn(checker) logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) site = Site(address) site.announce() print site.needFile(inner_path, update=True)
def testDownload(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer("127.0.0.1", 1544) with Spy.Spy(FileRequest, "route") as requests: def boostRequest(inner_path): # I really want these file if inner_path == "index.html": site_temp.needFile("data/img/multiuser.png", priority=5, blocking=False) site_temp.needFile("data/img/direct_domains.png", priority=5, blocking=False) site_temp.onFileDone.append(boostRequest) site_temp.download(blind_includes=True).join(timeout=5) file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")] # Test priority assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files assert file_requests[2:4] == ["css/all.css", "js/all.js"] # Important assets assert file_requests[4] == "dbschema.json" # Database map assert file_requests[5:7] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files assert "-default" in file_requests[-1] # Put default files for cloning to the end # Check files bad_files = site_temp.storage.verifyFiles(quick_check=True) # -1 because data/users/1J6... user has invalid cert assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1 assert not bad_files assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def testDownloadStats(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.addPeer("127.0.0.1", 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) # Open virtual file assert not site_temp.storage.isFile(inner_path) # Check size before downloads assert site_temp.settings["size"] < 10 * 1024 * 1024 assert site_temp.settings["optional_downloaded"] == 0 size_piecemap = site_temp.content_manager.getFileInfo( inner_path + ".piecemap.msgpack")["size"] size_bigfile = site_temp.content_manager.getFileInfo( inner_path)["size"] with site_temp.storage.openBigfile(inner_path) as f: assert not "\0" in f.read(1024) assert site_temp.settings[ "optional_downloaded"] == size_piecemap + size_bigfile with site_temp.storage.openBigfile(inner_path) as f: # Don't count twice assert not "\0" in f.read(1024) assert site_temp.settings[ "optional_downloaded"] == size_piecemap + size_bigfile # Add second block assert not "\0" in f.read(1024 * 1024) assert site_temp.settings[ "optional_downloaded"] == size_piecemap + size_bigfile
def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer peer = Peer(peer_ip, peer_port) import json if parameters: parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} try: res = peer.request(cmd, parameters) print json.dumps(res, indent=2, ensure_ascii=False) except Exception, err: print "Unknown response (%s): %s" % (err, res)
def testOptionalDownload(self, file_server, site, site_temp): # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) # Download site assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Download optional data/optional.txt site.storage.verifyFiles(quick_check=True) # Find what optional files we have optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) assert not site_temp.storage.isFile("data/optional.txt") assert site.storage.isFile("data/optional.txt") site_temp.needFile("data/optional.txt") assert site_temp.storage.isFile("data/optional.txt") # Optional user file assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") optional_file_info = site_temp.content_manager.getFileInfo( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"]) assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections]
def testFindOptional(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full gevent.spawn(lambda: ConnectionServer.start(file_server_full)) time.sleep(0) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server assert site_full_peer.updateHashfield() # Update hashfield # Init client server site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") assert not site_temp.storage.isFile("data/optional.txt") assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file with Spy.Spy(FileRequest, "route") as requests: site_temp.needFile("data/optional.txt") print requests assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections]
def peerPing(self, peer_ip, peer_port=None): if not peer_port: peer_port = config.fileserver_port logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) peer = Peer(peer_ip, peer_port) for i in range(5): print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1) peer.remove() print "Reconnect test..." peer = Peer(peer_ip, peer_port) for i in range(5): print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1)
def testFileSize(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) # Open virtual file assert not site_temp.storage.isFile(inner_path) # Download first block site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024)) assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path)
def testFloodProtection(self, file_server): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) # Only allow 3 connection in 1 minute connection = client.getConnection("127.0.0.1", 1544) assert connection.handshake connection.close() connection = client.getConnection("127.0.0.1", 1544) assert connection.handshake connection.close() connection = client.getConnection("127.0.0.1", 1544) assert connection.handshake connection.close() # The 4. one will timeout with pytest.raises(gevent.Timeout): with gevent.Timeout(0.1): connection = client.getConnection("127.0.0.1", 1544)
def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer("127.0.0.1", 1545) connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site # Normal request response = connection.request("getFile", { "site": site.address, "inner_path": "content.json", "location": 0 }) assert "sign" in response["body"] response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json") }) assert "sign" in response["body"] # Invalid file response = connection.request("getFile", { "site": site.address, "inner_path": "invalid.file", "location": 0 }) assert "File read error" in response["error"] # Location over size response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 1024 * 1024 }) assert "File read error" in response["error"] # Stream from parent dir response = connection.request("getFile", { "site": site.address, "inner_path": "../users.json", "location": 0 }) assert "File read error" in response["error"] # Invalid site response = connection.request("getFile", { "site": "", "inner_path": "users.json", "location": 0 }) assert "Unknown site" in response["error"] response = connection.request("getFile", { "site": ".", "inner_path": "users.json", "location": 0 }) assert "Unknown site" in response["error"] # Invalid size response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234 }) assert "File size does not match" in response["error"] connection.close() client.stop()
def start(self, check_sites=True): self.sites = SiteManager.site_manager.list() self.log = logging.getLogger("FileServer") if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.start(self) self.log.debug("Stopped.") def stop(self): if self.running and self.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) self.log.info('Closed port via upnp.') except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: self.log.info( "Failed at attempt to use upnp to close port: %s" % err) ConnectionServer.stop(self)
def start(self, check_sites=True): ConnectionServer.start(self) self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.listen(self) self.log.debug("Stopped.") def stop(self): if self.running and self.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) self.log.info('Closed port via upnp.') except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: self.log.info( "Failed at attempt to use upnp to close port: %s" % err) return ConnectionServer.stop(self)
# Bind and start serving sites def start(self, check_sites=True): self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.start(self) self.log.debug("Stopped.") def stop(self): if self.running and self.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) self.log.info('Closed port via upnp.') except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: self.log.info("Failed at attempt to use upnp to close port: %s" % err) return ConnectionServer.stop(self)
# Bind and start serving sites def start(self, check_sites=True): self.sites = SiteManager.site_manager.list() self.log = logging.getLogger("FileServer") if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) ConnectionServer.start(self) self.log.debug("Stopped.") def stop(self): if self.running and self.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) self.log.info('Closed port via upnp.') except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: self.log.info("Failed at attempt to use upnp to close port: %s" % err) ConnectionServer.stop(self)
def testOpenBigfile(self, file_server, site, site_temp): inner_path = self.createBigfile(site) # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True, retry_bad_files=False).join(timeout=10) # Open virtual file assert not site_temp.storage.isFile(inner_path) with site_temp.storage.openBigfile(inner_path) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) assert f.read(7) == b"Test524" f.seek(9 * 1024 * 1024) assert f.read(7) == b"943---T" assert len(requests ) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces assert set(site_temp.content_manager.hashfield) == set( [18343, 43727]) assert site_temp.storage.piecefields[ f.sha512].tostring() == "0000010001" assert f.sha512 in site_temp.getSettingsCache()["piecefields"] # Test requesting already downloaded with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) assert f.read(7) == b"Test524" assert len(requests) == 0 # Test requesting multi-block overflow reads with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) # We already have this block data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block assert data.startswith(b"Test524") assert data.endswith(b"Test838-") assert b"\0" not in data # No null bytes allowed assert len(requests) == 2 # Two block download # Test out of range request f.seek(5 * 1024 * 1024) data = f.read(1024 * 1024 * 30) assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024) f.seek(30 * 1024 * 1024) data = f.read(1024 * 1024 * 30) assert len(data) == 0
def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection client = ConnectionServer(file_server.ip, 1545) connection = client.getConnection(file_server.ip, 1544) file_server.sites[site.address] = site # Normal request response = connection.request("getFile", { "site": site.address, "inner_path": "content.json", "location": 0 }) assert b"sign" in response["body"] response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json") }) assert b"sign" in response["body"] # Invalid file response = connection.request("getFile", { "site": site.address, "inner_path": "invalid.file", "location": 0 }) assert "File read error" in response["error"] # Location over size response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 1024 * 1024 }) assert "File read error" in response["error"] # Stream from parent dir response = connection.request("getFile", { "site": site.address, "inner_path": "../users.json", "location": 0 }) assert "File read exception" in response["error"] # Invalid site response = connection.request("getFile", { "site": "", "inner_path": "users.json", "location": 0 }) assert "Unknown site" in response["error"] response = connection.request("getFile", { "site": ".", "inner_path": "users.json", "location": 0 }) assert "Unknown site" in response["error"] # Invalid size response = connection.request( "getFile", { "site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234 }) assert "File size does not match" in response["error"] # Invalid path for path in [ "../users.json", "./../users.json", "data/../content.json", ".../users.json" ]: for sep in ["/", "\\"]: response = connection.request( "getFile", { "site": site.address, "inner_path": path.replace("/", sep), "location": 0 }) assert response["error"] == 'File read exception' connection.close() client.stop()
def listen(): ConnectionServer.start(file_server6) ConnectionServer.listen(file_server6)
def testFindOptional(self, file_server, site, site_temp): file_server.ip_incoming = {} # Reset flood protection # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full gevent.spawn(lambda: ConnectionServer.start(file_server_full)) time.sleep(0.001) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server assert site_full_peer.updateHashfield() # Update hashfield # Init client server site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo( "data/optional.txt") assert not site_temp.storage.isFile("data/optional.txt") assert not site.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash( optional_file_info["sha512"] ) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info["sha512"]) # Source full server he has the file with Spy.Spy(FileRequest, "route") as requests: # Request 2 file same time threads = [] threads.append( site_temp.needFile("data/optional.txt", blocking=False)) threads.append( site_temp.needFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) assert len([ request for request in requests if request[0] == "findHashIds" ]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections]
def listen(): ConnectionServer.start(file_server_full) ConnectionServer.listen(file_server_full)
def testFindOptional(self, file_server, site, site_temp): # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer(file_server.ip, 1546) site_full.connection_server = file_server_full def listen(): ConnectionServer.start(file_server_full) ConnectionServer.listen(file_server_full) gevent.spawn(listen) time.sleep(0.001) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server hashfield = site_full_peer.updateHashfield() # Update hashfield assert len(site_full.content_manager.hashfield) == 8 assert hashfield assert site_full.storage.isFile("data/optional.txt") assert site_full.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert len(site_full_peer.hashfield) == 8 # Remove hashes from source server for hash in list(site.content_manager.hashfield): site.content_manager.hashfield.remove(hash) # Init client server site_temp.connection_server = ConnectionServer(file_server.ip, 1545) site_temp.addPeer(file_server.ip, 1544) # Add source server # Download normal files site_temp.log.info("Start Downloading site") site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo( "data/optional.txt") optional_file_info2 = site_temp.content_manager.getFileInfo( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert not site_temp.storage.isFile("data/optional.txt") assert not site_temp.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert not site.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source server don't know he has the file assert not site.content_manager.hashfield.hasHash( optional_file_info2["sha512"] ) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash( optional_file_info["sha512"] ) # Source full peer on source server has the file assert site_full_peer.hashfield.hasHash( optional_file_info2["sha512"] ) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info["sha512"]) # Source full server he has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info2["sha512"] ) # Source full server he has the file site_temp.log.info("Request optional files") with Spy.Spy(FileRequest, "route") as requests: # Request 2 file same time threads = [] threads.append( site_temp.needFile("data/optional.txt", blocking=False)) threads.append( site_temp.needFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) assert len([ request for request in requests if request[1] == "findHashIds" ]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] site_full.content_manager.contents.db.close()