def __init__(self, max_peers: int, name: str, bcast_ip: str = "255.255.255.255"): self.peer = Peer(name, bcast_ip) self.max_peers = int(max_peers) self.threads = [] self.threads += [Thread(target=self.peer.peer_listen)] self.threads += [Thread(target=self.peer.receive_bcast)] for thread in self.threads: thread.daemon = True thread.start() self.handlers = { PONG: self.handle_insert_peer, QUERYFILELIST: self.handle_query_file_list, REPLYFILELIST: self.handle_reply_file_list, GETFILE: self.handle_get_file, PING: self.handle_ping, ERROR: self.handle_error, REPLY: self.handle_reply } for message_type in self.handlers: self.peer.add_handlers(message_type, self.handlers[message_type]) self.files_available = {} self.local_files = [] self.no_of_peers = 0 self.home_path = Path.home() # if Path.is_dir(self.home_path / 'BanyanWatchDirectory'): self.watch_directory = self.home_path / 'BanyanWatchDirectory' self.download_directory = self.home_path / 'BanyanDownloads' Path.mkdir(self.watch_directory, exist_ok=True) Path.mkdir(self.download_directory, exist_ok=True)
def peerPing(self, peer_ip, peer_port=None): if not peer_port: peer_port = config.fileserver_port logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) peer = Peer(peer_ip, peer_port) for i in range(5): s = time.time() print peer.ping(), print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt) time.sleep(1) peer.remove() print "Reconnect test..." peer = Peer(peer_ip, peer_port) for i in range(5): s = time.time() print peer.ping(), print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt) time.sleep(1)
def peerPing(self, peer_ip, peer_port=None): if not peer_port: peer_port = 15441 logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) s = time.time() peer = Peer(peer_ip, peer_port) peer.connect() if not peer.connection: print "Error: Can't connect to peer (connection error: %s)" % peer.connection_error return False print "Connection time: %.3fs (connection error: %s)" % ( time.time() - s, peer.connection_error) for i in range(5): print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1) peer.remove() print "Reconnect test..." peer = Peer(peer_ip, peer_port) for i in range(5): print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1)
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}): global file_server from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer logging.info("Loading site...") site = SiteManager.site_manager.list()[address] site.settings[ "serving"] = True # Serving the site even if its disabled logging.info("Creating FileServer....") file_server = FileServer() site.connection_server = file_server file_server_thread = gevent.spawn( file_server.start, check_sites=False) # Dont check every site integrity time.sleep(0.001) if not file_server_thread.ready(): # Started fileserver file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(5, inner_path, diffs=diffs) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info( "No peers found, sitePublish command only works if you already have visitors serving your site" ) else: # Already running, notify local client on new content logging.info("Sending siteReload") my_peer = Peer("127.0.0.1", config.fileserver_port) logging.info( my_peer.request("siteReload", { "site": site.address, "inner_path": inner_path })) logging.info("Sending sitePublish") logging.info( my_peer.request("sitePublish", { "site": site.address, "inner_path": inner_path, "diffs": diffs })) logging.info("Done.")
def testBackwardCompatibility(self, file_server, bootstrapper_db): peer = Peer(file_server.ip, 1544, connection_server=file_server) hash1 = hashlib.sha256(b"site1").digest() bootstrapper_db.peerAnnounce("ipv4", file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) # Test with ipv4 need type res = peer.request( "announce", { "hashes": [hash1], "port": 15441, "need_types": ["ipv4"], "need_num": 10, "add": [] }) assert len(res["peers"][0]["ipv4"]) == 1 # Test with ip4 need type res = peer.request( "announce", { "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] }) assert len(res["peers"][0]["ip4"]) == 1
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer logging.info("Creating FileServer....") file_server = FileServer() file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity file_server.openport() site = SiteManager.site_manager.list()[address] site.settings["serving"] = True # Serving the site even if its disabled # Notify local client on new content if config.ip_external: logging.info("Sending siteReload") my_peer = Peer(config.ip_external, config.fileserver_port) logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path})) if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(20, inner_path) # Push to 20 peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
def peerPing(ip, port): from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (ip, port)) peer = Peer(ip, port) for i in range(5): s = time.time() print peer.ping(), print "Response time: %.3fs" % (time.time() - s) time.sleep(1)
def testPassive(self, file_server, bootstrapper_db): peer = Peer("127.0.0.1", 1544, connection_server=file_server) hash1 = hashlib.sha256("hash1").digest() bootstrapper_db.peerAnnounce(ip4=None, port=15441, hashes=[hash1]) res = peer.request("announce", { "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] }) assert len(res["peers"][0]["ip4"]) == 0 # Empty result
def peerGetFile(ip, port, site, filename=None): from Peer import Peer if not site: site = config.homepage if not filename: filename = "content.json" logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, ip, port)) peer = Peer(ip, port) s = time.time() print peer.getFile(site, filename).read() print "Response time: %.3fs" % (time.time() - s)
def __receive_login(peer: Peer, command: bytes, message: bytes): if command == Protocol.Flags.LOGIN: username = message.split(bytes([Protocol.Flags.SEPARATOR ]))[0].decode() passwd = message.split(bytes([Protocol.Flags.SEPARATOR ]))[1].decode() hashed = str( hashpw(passwd.encode("utf-8"), b"$2a$12$" + b"SZ4R4Z3G3SZ4DJ4LS0RT..")).split("..")[1][:-1] logging.info("LOGIN from \"" + username + "\"") peer_id = SQLModule.PeersSQLModule.get_id(username) if peer_id == -1 or hashed == SQLModule.PeersSQLModule.get_hashed_pwd( username): if peer_id == -1: SQLModule.PeersSQLModule.add_peer(username, hashed) logging.info("Account created for \"" + username + "\"") peer.send( Protocol.server_message( Protocol.ServerFlags.ACK, "Account created for \"" + username + "\"")) peer.name = username peer.logged_in = True logging.info("\"" + username + "\" has logged in succesfully") peer.send( Protocol.server_message(Protocol.ServerFlags.ACK, "Successful login")) else: logging.warning("\"" + username + "\" failed to log in") peer.send( Protocol.server_message(Protocol.ServerFlags.NAK, "Wrong password for this user")) peer.logged_in = False
def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] # Create 10 fake peer for each piece for i in range(10): peer = Peer(file_server.ip, 1544, site_temp, server2) peer.piecefields[sha512][i] = "1" peer.updateHashfield = mock.MagicMock(return_value=False) peer.updatePiecefields = mock.MagicMock(return_value=False) peer.findHashIds = mock.MagicMock(return_value={"nope": []}) peer.hashfield = site.content_manager.hashfield peer.has_hashfield = True peer.key = "Peer:%s" % i site_temp.peers["Peer:%s" % i] = peer site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") with Spy.Spy(Peer, "getFile") as requests: for i in range(10): site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) assert len(requests) == 10 for i in range(10): assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
def testAnnounceList(self, file_server): peer = Peer("127.0.0.1", 1544, connection_server=file_server) assert peer.request("getTrackers")["trackers"] == [] tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage.onTrackerFound("zero://127.0.0.1:15441") assert peer.request("getTrackers")["trackers"] == [] # It needs to have at least one successfull announce to be shared to other peers tracker_storage.onTrackerSuccess("zero://127.0.0.1:15441", 1.0) assert peer.request("getTrackers")["trackers"] == ["zero://127.0.0.1:15441"]
def peerGetFile(self, peer_ip, peer_port, site, filename): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() print peer.getFile(site, filename).read() print "Response time: %.3fs" % (time.time()-s)
def peerGetFile(peer_ip, peer_port, site, filename): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() print peer.getFile(site, filename).read() print "Response time: %.3fs" % (time.time() - s)
def testAnnounceList(self, file_server): peer = Peer("127.0.0.1", 1544, connection_server=file_server) assert peer.request("getTrackers")["trackers"] == [] tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage.onTrackerFound("zero://127.0.0.1:15441") assert peer.request("getTrackers")["trackers"] == [] # It needs to have at least one successfull announce to be shared to other peers tracker_storage.onTrackerSuccess("zero://127.0.0.1:15441", 1.0) assert peer.request("getTrackers")["trackers"] == [ "zero://127.0.0.1:15441" ]
def __init__(self, nnombre, npuerto, nlinks): self.nombre = nnombre self.puerto = npuerto self.server = None self.peers = [] self._msg = queue.Queue() self._old = [] for l in nlinks: tp = Peer() tp.puerto = l tp.es_fijo = True self.peers.append(tp)
def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): inner_path = self.createBigfile(site) server1 = file_server server1.sites[site.address] = site server2 = FileServer("127.0.0.1", 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] # Create 10 fake peer for each piece for i in range(10): peer = Peer("127.0.0.1", 1544, site_temp, server2) peer.piecefields[sha512][i] = "1" peer.updateHashfield = mock.MagicMock(return_value=False) peer.updatePiecefields = mock.MagicMock(return_value=False) peer.findHashIds = mock.MagicMock(return_value={"nope": []}) peer.hashfield = site.content_manager.hashfield peer.has_hashfield = True peer.key = "Peer:%s" % i site_temp.peers["Peer:%s" % i] = peer site_temp.downloadContent("content.json", download_files=False) site_temp.needFile("data/optional.any.iso.piecemap.msgpack") with Spy.Spy(Peer, "getFile") as requests: for i in range(10): site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) assert len(requests) == 10 for i in range(10): assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer
def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Peer import Peer peer = Peer(peer_ip, peer_port) import json if parameters: parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} logging.info("Response: %s" % peer.request(cmd, parameters))
def peerPing(peer_ip, peer_port): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) for i in range(5): s = time.time() print peer.ping(), print "Response time: %.3fs" % (time.time() - s) time.sleep(1)
def peerPing(self, peer_ip, peer_port): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) peer = Peer(peer_ip, peer_port) for i in range(5): s = time.time() print peer.ping(), print "Response time: %.3fs" % (time.time()-s) time.sleep(1)
def testPassive(self, file_server, bootstrapper_db): peer = Peer("127.0.0.1", 1544, connection_server=file_server) hash1 = hashlib.sha256("hash1").digest() bootstrapper_db.peerAnnounce(ip4=None, port=15441, hashes=[hash1]) res = peer.request( "announce", { "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] }) assert len(res["peers"][0]["ip4"]) == 0 # Empty result
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() peer.getFile(site, filename) if benchmark: for i in range(10): print peer.getFile(site, filename), print "Response time: %.3fs" % (time.time() - s) raw_input("Check memory")
def testAnnounceList(self, file_server): open("%s/trackers.json" % config.data_dir, "w").write("{}") tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage.load() peer = Peer(file_server.ip, 1544, connection_server=file_server) assert peer.request("getTrackers")["trackers"] == [] tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip) assert peer.request("getTrackers")["trackers"] == [] # It needs to have at least one successfull announce to be shared to other peers tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0) assert peer.request("getTrackers")["trackers"] == [ "zero://%s:15441" % file_server.ip ]
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}): global file_server from Site import Site from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer SiteManager.site_manager.load() logging.info("Loading site...") site = Site(address, allow_create=False) site.settings["serving"] = True # Serving the site even if its disabled logging.info("Creating FileServer....") file_server = FileServer() site.connection_server = file_server file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity time.sleep(0.001) if not file_server_thread.ready(): # Started fileserver file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers published = site.publish(5, inner_path, diffs=diffs) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") gevent.joinall([file_server_thread], timeout=60) logging.info("Done.") else: logging.info("No peers found, sitePublish command only works if you already have visitors serving your site") else: # Already running, notify local client on new content logging.info("Sending siteReload") if config.fileserver_ip == "*": my_peer = Peer("127.0.0.1", config.fileserver_port) else: my_peer = Peer(config.fileserver_ip, config.fileserver_port) logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path})) logging.info("Sending sitePublish") logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path, "diffs": diffs})) logging.info("Done.")
def connect_server(self,ip,port,node=False): handshake = 'Salamatsyzby' + self.content_id + self.peer_id + struct.pack('!H',self.port) handshake = pack('!I',len(handshake)) + handshake stream_data = {'content_id':self.content_id,'chunk_length':0,'piece_length':0, 'handshake':handshake} p = Peer(stream_data,self,self.Buffer,ip=ip,port=port,server=True,node=node) if p.socket is not None: r,w,e = select.select([p],[p],[p],SOCKET_TIMEOUT) if not w: self.logger.error('Server (%s:%s) is not available' % (ip,port)) return p.request_peers() p.handle_write() r,w,e = select.select([p],[p],[p],SOCKET_TIMEOUT) if not r: self.logger.error('Server (%s:%s) does not respond' % (ip,port)) return p.handle_read() if not p.handshaked: self.logger.error('Could not connect to Server (%s:%s)' % (ip,port)) return self.logger.info('Connection with Server (%s:%s) established' % (ip,port)) return p else: self.logger.error('Server (%s:%s) is not available' % (ip,port)) return
def peerJoin(self, good=True): # TODO: Don't assume all are trusted trusted = None if len(self.peers) > 0: trusted = random.sample(self.cPeers, 1) trusted = trusted[0] # No need for the array # Add this peer to our network through the trusted node peer = Peer(trusted, self.peers, self.msgs, self.dkg, good) self.peers.append(peer) if good == True: self.cPeers.append(peer) # Recalculate n and ln(n) so our swarms are correct self.n = len(self.peers) self.lnn = math.log(self.n) return peer.getId()
def addPeer(self, ip, port, return_peer=False, connection=None, source="other"): if not ip or ip == "0.0.0.0": return False key = "%s:%s" % (ip, port) peer = self.peers.get(key) if peer: # Already has this ip peer.found(source) if return_peer: # Always return peer return peer else: return False else: # New peer if (ip, port) in self.peer_blacklist: return False # Ignore blacklist (eg. myself) peer = Peer(ip, port, self) self.peers[key] = peer peer.found(source) return peer
def testPassive(self, file_server, bootstrapper_db): peer = Peer(file_server.ip, 1544, connection_server=file_server) ip_type = helper.getIpType(file_server.ip) hash1 = hashlib.sha256(b"hash1").digest() bootstrapper_db.peerAnnounce(ip_type, address=None, port=15441, hashes=[hash1]) res = peer.request( "announce", { "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [] }) assert len(res["peers"][0]["ipv4"]) == 0 # Empty result
def peerPing(self, peer_ip, peer_port=None): if not peer_port: peer_port = 15441 logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) s = time.time() peer = Peer(peer_ip, peer_port) peer.connect() if not peer.connection: print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error) return False if "shared_ciphers" in dir(peer.connection.sock): print("Shared ciphers:", peer.connection.sock.shared_ciphers()) if "cipher" in dir(peer.connection.sock): print("Cipher:", peer.connection.sock.cipher()[0]) if "version" in dir(peer.connection.sock): print("TLS version:", peer.connection.sock.version()) print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)) for i in range(5): ping_delay = peer.ping() print("Response time: %.3fs" % ping_delay) time.sleep(1) peer.remove() print("Reconnect test...") peer = Peer(peer_ip, peer_port) for i in range(5): ping_delay = peer.ping() print("Response time: %.3fs" % ping_delay) time.sleep(1)
def __processHeartBeats(self, heartbeats): """ Process all new peer discoveries """ for ipAddr, macAddr, randomBits in heartbeats: if macAddr in self.peers.keys(): self.peers[macAddr].checkIP(ipAddr, randomBits) else: peer = Peer.createPeer( ipAddr, self.httpPort, macAddr, randomBits, self.testDistributor, self.resultWorker ) if peer: self.peers[macAddr] = peer
def setup(self, messages): ''' setup elevator to be ready ''' # networking.do_stuff() # spawn networking thread # this thread sets up all the necessary connections # and listens for incoming messages for a set time period last_alive = time() for message in messages: self.peerList[message.id] = Peer(message.id, message.status, message.dispatcher, last_alive)
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) peer = Peer(peer_ip, peer_port) s = time.time() if benchmark: for i in range(10): peer.getFile(site, filename), print("Response time: %.3fs" % (time.time() - s)) input("Check memory") else: print(peer.getFile(site, filename).read())
def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer peer = Peer(peer_ip, peer_port) import json if parameters: parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} try: res = peer.request(cmd, parameters) print json.dumps(res, indent=2, ensure_ascii=False) except Exception, err: print "Unknown response (%s): %s" % (err, res)
def addPeer(self, ip, port, return_peer=False): key = "%s:%s" % (ip, port) if key in self.peers: # Already has this ip self.peers[key].found() if return_peer: # Always return peer return self.peers[key] else: return False else: # New peer peer = Peer(ip, port, self) self.peers[key] = peer return peer
def __receive(peer: Peer, received: bytes) -> (bytes, bytes, bytes, bool): terminator_index = received.find(bytes([Protocol.Flags.TERMINATOR])) while terminator_index == -1: part: bytes = peer.receive() received += part terminator_index = received.find(bytes([Protocol.Flags.TERMINATOR ])) if len(part) == 0: logging.warning("Connection unexpectedly closed") peer.terminate() return None, None, None, True logging.debug("Message received: " + str(received)) command = received[0] body = received[1:terminator_index] received = received[terminator_index + 1:] return command, body, received, False
def testIp4(self, file_server, bootstrapper_db): peer = Peer("127.0.0.1", 1544, connection_server=file_server) hash1 = hashlib.sha256("site1").digest() hash2 = hashlib.sha256("site2").digest() hash3 = hashlib.sha256("site3").digest() # Verify empty result res = peer.request("announce", { "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] }) assert len(res["peers"][0]["ip4"]) == 0 # Empty result # Verify added peer on previous request bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] }) assert len(res["peers"][0]["ip4"]) == 1 assert len(res["peers"][1]["ip4"]) == 1 # hash2 deleted from 1.2.3.4 bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] }) assert len(res["peers"][0]["ip4"]) == 1 assert len(res["peers"][1]["ip4"]) == 0 # Announce 3 hash again bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] }) assert len(res["peers"][0]["ip4"]) == 1 assert len(res["peers"][1]["ip4"]) == 1 assert len(res["peers"][2]["ip4"]) == 1 # Single hash announce res = peer.request("announce", { "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] }) assert len(res["peers"][0]["ip4"]) == 1 # Test DB cleanup assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 1 # 127.0.0.1 never get added to db # Delete peers bootstrapper_db.execute("DELETE FROM peer WHERE ip4 = '1.2.3.4'") assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer_to_hash").fetchone()["num"] == 0 assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM hash").fetchone()["num"] == 3 # 3 sites assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 0 # 0 peer
def addPeer(self, ip, port, return_peer = False): if not ip: return False if (ip, port) in self.peer_blacklist: return False # Ignore blacklist (eg. myself) key = "%s:%s" % (ip, port) if key in self.peers: # Already has this ip #self.peers[key].found() if return_peer: # Always return peer return self.peers[key] else: return False else: # New peer peer = Peer(ip, port, self) self.peers[key] = peer return peer
def testHashCache(self, file_server, bootstrapper_db): ip_type = helper.getIpType(file_server.ip) peer = Peer(file_server.ip, 1544, connection_server=file_server) hash1 = hashlib.sha256(b"site1").digest() hash2 = hashlib.sha256(b"site2").digest() hash3 = hashlib.sha256(b"site3").digest() # Verify empty result res = peer.request( "announce", { "hashes": [hash1, hash2], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) assert len(res["peers"][0][ip_type]) == 0 # Empty result hash_ids_before = bootstrapper_db.hash_ids.copy() bootstrapper_db.updateHashCache() assert hash_ids_before == bootstrapper_db.hash_ids
def __receive_hello(peer: Peer, command: bytes): """ Tries to receives HELLO, if the message is not HELLO, terminates the peer :param peer: :param command: :return: """ if command == Protocol.Flags.HELLO: logging.info("HELLO message received") peer.send(Protocol.hello_message()) peer.send( Protocol.server_message(Protocol.ServerFlags.ACK, "Welcome to the server")) peer.hello_done = True else: logging.warning("No HELLO received, closing connection") peer.terminate() peer.hello_done = False
def testAnnounce(self, file_server, tor_manager): file_server.tor_manager = tor_manager hash1 = hashlib.sha256(b"1Nekos4fiBqfcazyG1bAxdBT5oBvA76Z").digest() hash2 = hashlib.sha256(b"1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr").digest() peer = Peer("zero.booth.moe", 443, connection_server=file_server) assert peer.request("ping") peer = Peer("boot3rdez4rzn36x.onion", 15441, connection_server=file_server) assert peer.request("ping") res = peer.request( "announce", { "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 100, "add": [""] }) assert res
def __init__(self): self.messageHandler = MessageHandler() self.communicator = Communicator(self.messageHandler) self.controller = Controller(self.communicator) self.messageHandler.setController(self.controller) print "MAIN initialize" ## test code. print "--------------" sleep(2) print "testing start" peer = Peer("78.91.5.10") message = Message("stillAlive", peer.IP, "", "", "", "") self.communicator.broadcast(message) #self.communicator.sendToOne(peer, message) print "testing complete" print "--------------"
def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): if tracker_protocol != "zero": return super(SitePlugin, self).announceTracker( tracker_protocol, tracker_address, fileserver_port, add_types, my_peer_id, mode ) s = time.time() need_types = ["ip4"] if self.connection_server and self.connection_server.tor_manager and self.connection_server.tor_manager.enabled: need_types.append("onion") if mode == "start" or mode == "more": # Single: Announce only this site sites = [self] full_announce = False else: # Multi: Announce all currently serving site full_announce = True if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 5: # No reannounce all sites within 5 minute return True time_full_announced[tracker_address] = time.time() from Site import SiteManager sites = [site for site in SiteManager.site_manager.sites.values() if site.settings["serving"]] # Create request request = { "hashes": [], "onions": [], "port": fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types } for site in sites: if "onion" in add_types: onion = self.connection_server.tor_manager.getOnion(site.address) request["onions"].append(onion) request["hashes"].append(hashlib.sha256(site.address).digest()) # Tracker can remove sites that we don't announce if full_announce: request["delete"] = True # Sent request to tracker tracker = connection_pool.get(tracker_address) # Re-use tracker connection if possible if not tracker: tracker_ip, tracker_port = tracker_address.split(":") tracker = Peer(tracker_ip, tracker_port, connection_server=self.connection_server) connection_pool[tracker_address] = tracker res = tracker.request("announce", request) if not res or "peers" not in res: self.log.debug("Announce to %s failed: %s" % (tracker_address, res)) if full_announce: time_full_announced[tracker_address] = 0 return False # Add peers from response to site site_index = 0 for site_res in res["peers"]: site = sites[site_index] processPeerRes(site, site_res) site_index += 1 # Check if we need to sign prove the onion addresses if "onion_sign_this" in res: self.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) request["onion_signs"] = {} request["onion_sign_this"] = res["onion_sign_this"] request["need_num"] = 0 for site in sites: onion = self.connection_server.tor_manager.getOnion(site.address) sign = CryptRsa.sign(res["onion_sign_this"], self.connection_server.tor_manager.getPrivatekey(onion)) request["onion_signs"][self.connection_server.tor_manager.getPublickey(onion)] = sign res = tracker.request("announce", request) if not res or "onion_sign_this" in res: self.log.debug("Announce onion address to %s failed: %s" % (tracker_address, res)) if full_announce: time_full_announced[tracker_address] = 0 return False if full_announce: tracker.remove() # Close connection, we don't need it in next 5 minute return time.time() - s
def __init__(self, username): Peer.__init__(self, username) self.state = StateCodes.offline self.data = UserData.UserData(username)
def message(self, content): print 'message added to friend ' + str(self.username) + ': ' + str(content) msg = Message.Message(Shared.my_data.username, content) Peer.message(self, msg) self.data.append_message_to_chat(msg) Shared.main_window.calls.put((Shared.main_window.append_chat_message, self.username, msg))
def close(self): Peer.close(self) self.data.close_chat_data_file()
from Peer import Peer from time import strftime, sleep from threading import Thread if __name__ == "__main__": #print "test2" p = 5090 nodeA = Peer(p, "1,A,1", p) #id, name, connectTo nodeB = Peer(p+1, "2,B,1", p) nodeC = Peer(p+2, "3,C,2", p+1) tA = Thread( target = nodeA.mainLoop ) tA.start() tB = Thread( target = nodeB.mainLoop ) tB.start() tC = Thread( target = nodeC.mainLoop ) tC.start() try: # A joins nodeA.join(0, 0) sleep(1) #nodeA.list() #sleep(1) # B joins nodeB.join(1, p) sleep(1) #nodeA.list()
def announceTrackerZero(self, tracker_address, mode="start", num_want=10): global time_full_announced s = time.time() need_types = ["ip4"] if self.site.connection_server.tor_manager.enabled: need_types.append("onion") if mode == "start" or mode == "more": # Single: Announce only this site sites = [self.site] full_announce = False else: # Multi: Announce all currently serving site full_announce = True if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 5: # No reannounce all sites within 5 minute return [] time_full_announced[tracker_address] = time.time() from Site import SiteManager sites = [site for site in SiteManager.site_manager.sites.values() if site.settings["serving"]] # Create request add_types = self.getOpenedServiceTypes() request = { "hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types } for site in sites: if "onion" in add_types: onion = self.site.connection_server.tor_manager.getOnion(site.address) request["onions"].append(onion) request["hashes"].append(site.address_hash) # Tracker can remove sites that we don't announce if full_announce: request["delete"] = True # Sent request to tracker tracker = connection_pool.get(tracker_address) # Re-use tracker connection if possible if not tracker: tracker_ip, tracker_port = tracker_address.split(":") tracker = Peer(tracker_ip, tracker_port, connection_server=self.site.connection_server) connection_pool[tracker_address] = tracker res = tracker.request("announce", request) if not res or "peers" not in res: if full_announce: time_full_announced[tracker_address] = 0 raise AnnounceError("Invalid response: %s" % res) # Add peers from response to site site_index = 0 peers_added = 0 for site_res in res["peers"]: site = sites[site_index] peers_added += processPeerRes(tracker_address, site, site_res) site_index += 1 # Check if we need to sign prove the onion addresses if "onion_sign_this" in res: self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) request["onion_signs"] = {} request["onion_sign_this"] = res["onion_sign_this"] request["need_num"] = 0 for site in sites: onion = self.site.connection_server.tor_manager.getOnion(site.address) publickey = self.site.connection_server.tor_manager.getPublickey(onion) if publickey not in request["onion_signs"]: sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion)) request["onion_signs"][publickey] = sign res = tracker.request("announce", request) if not res or "onion_sign_this" in res: if full_announce: time_full_announced[tracker_address] = 0 raise AnnounceError("Announce onion address to failed: %s" % res) if full_announce: tracker.remove() # Close connection, we don't need it in next 5 minute self.site.log.debug( "Tracker announce result: zero://%s (sites: %s, new peers: %s) in %.3fs" % (tracker_address, site_index, peers_added, time.time() - s) ) return None
ipServeur = input("Saisir ip du serveur de hash et d'accueil #!> ") sock = ss.socket() sock.connect( (ipServeur, 8001) ) sock.send( str.encode(ipClient + "\n") ) hashClient = sock.recv( 1024 ).decode() hashClient = hashClient[:-1] sock.close() print("\u001B[31m" + "> Votre identifiant : " + hashClient + "\u001B[0m") # Création pair peer1 = Peer( ipClient, hashClient) sock = ss.socket() sock.connect( (ipServeur, 8000) ) # print("yo:" + hashClient + ":" + ipClient + "\n") sock.send( str.encode("yo:" + hashClient + ":" + ipClient + "\n") ) welcomeAnswer = sock.recv(1024).decode() sock.close() # print(welcomeAnswer) if welcomeAnswer == "yaf\n" : peer1.enterNetwork() else : peer1.enterNetwork(welcomeAnswer)
def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): onion1 = tor_manager.addOnion() onion2 = tor_manager.addOnion() peer = Peer("127.0.0.1", 1544, connection_server=file_server) hash1 = hashlib.sha256("site1").digest() hash2 = hashlib.sha256("site2").digest() bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=1234, hashes=[hash1, hash2]) res = peer.request("announce", { "onions": [onion1, onion2], "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] }) assert len(res["peers"][0]["ip4"]) == 1 assert "onion_sign_this" in res # Onion address not added yet site_peers = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) assert len(site_peers["onion"]) == 0 assert "onion_sign_this" in res # Sign the nonces sign1 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion1)) sign2 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion2)) # Bad sign (different address) res = peer.request("announce", { "onions": [onion1], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion2): sign2}, "hashes": [hash1], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" in res site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 0 # Not added # Bad sign (missing one) res = peer.request("announce", { "onions": [onion1, onion2], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion1): sign1}, "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" in res site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 0 # Not added # Good sign res = peer.request("announce", { "onions": [onion1, onion2], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion1): sign1, tor_manager.getPublickey(onion2): sign2}, "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" not in res # Onion addresses added site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 1 site_peers2 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash2) assert len(site_peers2["onion"]) == 1 assert site_peers1["onion"][0] != site_peers2["onion"][0] assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1+".onion" assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion2+".onion" tor_manager.delOnion(onion1) tor_manager.delOnion(onion2)