def subtest_invalid_permid(self): """ Send crawler messages from a non-crawler peer """ print >> sys.stderr, "-" * 80, "\ntest: invalid_permid" # make sure that the OLConnection is NOT in the crawler_db crawler_db = CrawlerDBHandler.getInstance() assert not self.my_permid in crawler_db.getCrawlers() # We are not a registered crawler, any request from us should # be denied messages = [ CRAWLER_REQUEST, CRAWLER_REQUEST + CRAWLER_DATABASE_QUERY, CRAWLER_REQUEST + CRAWLER_DATABASE_QUERY, CRAWLER_REQUEST + chr(0) ] for msg in messages: s = OLConnection(self.my_keypair, "localhost", self.hisport) s.send(msg) response = s.recv() assert response == "", "response type is %s" % getMessageName( response[0]) time.sleep(1) s.close()
def subtest_valid_messageid(self): """ Send a valid message-id from a registered crawler peer """ print >> sys.stderr, "-" * 80, "\ntest: valid_messageid" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = [ "SELECT name FROM category", "SELECT * FROM peer", "SELECT * FROM torrent" ] for query in queries: self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply( s, CRAWLER_DATABASE_QUERY, 0) assert error == 0 if DEBUG: print >> sys.stderr, cPickle.loads(payload) time.sleep(1) s.close()
def subtest_invalid_sql_query(self): """ Send an invalid sql query from a registered crawler peer """ print >>sys.stderr, "-"*80, "\ntest: invalid_sql_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = ["FOO BAR"] for query in queries: self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 0) assert error == 1 if DEBUG: print >>sys.stderr, payload time.sleep(1) s.close()
def subtest_invalid_frequency(self): """ Send two valid requests shortly after each other. However, indicate that the frequency should be large. This should result in a frequency error """ print >>sys.stderr, "-"*80, "\ntest: invalid_invalid_frequency" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 0 # try on the same connection self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 1000, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 254 # should give a frequency erro s.close() # try on a new connection s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 1000, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 254 # should give a frequency error time.sleep(1) s.close()
def subtest_dialback(self): """ Send a valid request, disconnect, and wait for an incomming connection with the reply """ print >>sys.stderr, "-"*80, "\ntest: dialback" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM peer") s.close() # wait for reply try: conn, addr = self.listen_socket.accept() except socket.timeout: if DEBUG: print >> sys.stderr,"test_crawler: timeout, bad, peer didn't connect to send the crawler reply" assert False, "test_crawler: timeout, bad, peer didn't connect to send the crawler reply" s = OLConnection(self.my_keypair, "", 0, conn, mylistenport=self.listen_port) # read reply error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 0 if DEBUG: print >>sys.stderr, cPickle.loads(payload) time.sleep(1)
def subtest_invalid_sql_query(self): """ Send an invalid sql query from a registered crawler peer """ print >> sys.stderr, "-" * 80, "\ntest: invalid_sql_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = ["FOO BAR"] for query in queries: self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply( s, CRAWLER_DATABASE_QUERY, 0) assert error == 1 if DEBUG: print >> sys.stderr, payload time.sleep(1) s.close()
def subtest_valid_nat_check(self): """ Send a CRAWLER_NATCHECK message to the Tribler instance. A reply containing a nat type should be returned. """ print >>sys.stderr, "-"*80, "\ntest: subtest_valid_nat_check" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) self.send_crawler_request(s, CRAWLER_NATCHECK, 42, 0, "") s.close() if DEBUG: print >>sys.stderr, "test_natcheck: the nat-check code allows for a 10 minute delay in reporting the nat stats" self.listen_socket.settimeout(11 * 60) # wait for reply try: conn, addr = self.listen_socket.accept() except socket.timeout: if DEBUG: print >> sys.stderr,"test_natcheck: timeout, bad, peer didn't connect to send the crawler reply" assert False, "test_natcheck: timeout, bad, peer didn't connect to send the crawler reply" s = OLConnection(self.my_keypair, "", 0, conn, mylistenport=self.listen_port) # read reply error, payload = self.receive_crawler_reply(s, CRAWLER_NATCHECK, 42) assert error == 0 if DEBUG: print >>sys.stderr, "test_natcheck:", bdecode(payload) time.sleep(1)
def subtest_valid_query(self): """ Send a CRAWLER_SEEDINGSTATS_QUERY message to the Tribler instance. Execute a valid SQL query. """ print >>sys.stderr, "-"*80, "\ntest: subtest_valid_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) # test with valid data seedingstats_db = SQLiteSeedingStatsCacheDB.getInstance() seedingstats_db.insertMany("SeedingStats", [(50000, 'foobar', 'dummy_seed', 500, 0, 0), (80000, 'bar', 'dummy_seed', 800, 1, 0)]) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) queries = [cPickle.dumps([("read", "SELECT * FROM SeedingStats"), ("read", "SELECT * FROM SeedingStats WHERE crawled = 0")])] for query in queries: self.send_crawler_request(s, CRAWLER_SEEDINGSTATS_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply(s, CRAWLER_SEEDINGSTATS_QUERY, 0) assert error == 0, (error, payload) if DEBUG: print >>sys.stderr, "test_seeding_stats:", cPickle.loads(payload)
def setUpPostSession(self): """ override TestAsServer """ TestCrawler.setUpPostSession(self) self.some_keypair = EC.gen_params(EC.NID_sect233k1) self.some_keypair.gen_key() self.some_permid = str(self.some_keypair.pub().get_der()) self.friendshipStatistics_db = FriendshipStatisticsDBHandler.getInstance() self.friendshipStatistics_db.insertFriendshipStatistics( bin2str(self.his_permid), bin2str(self.some_permid), int(time.time()), 0, commit=True) self.friendshipStatistics_db.insertFriendshipStatistics( bin2str(self.my_permid), bin2str(self.some_permid), int(time.time()), 0, commit=True) # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid)
def subtest_invalid_tablename(self): """ Send an invalid query and check that we get the actual sql exception back """ print >>sys.stderr, "-"*80, "\ntest: invalid_tablename" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM nofoobar") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error != 0 assert payload == "SQLError: no such table: nofoobar", payload
def subtest_invalid_tablename(self): """ Send an invalid query and check that we get the actual sql exception back """ print >> sys.stderr, "-" * 80, "\ntest: invalid_tablename" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM nofoobar") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error != 0 assert payload == "SQLError: no such table: nofoobar", payload
def subtest_valid_nat_check(self): """ Send a CRAWLER_NATCHECK message to the Tribler instance. A reply containing a nat type should be returned. """ print >> sys.stderr, "-" * 80, "\ntest: subtest_valid_nat_check" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) self.send_crawler_request(s, CRAWLER_NATCHECK, 42, 0, "") s.close() if DEBUG: print >> sys.stderr, "test_natcheck: the nat-check code allows for a 10 minute delay in reporting the nat stats" self.listen_socket.settimeout(11 * 60) # wait for reply try: conn, addr = self.listen_socket.accept() except socket.timeout: if DEBUG: print >> sys.stderr, "test_natcheck: timeout, bad, peer didn't connect to send the crawler reply" assert False, "test_natcheck: timeout, bad, peer didn't connect to send the crawler reply" s = OLConnection(self.my_keypair, "", 0, conn, mylistenport=self.listen_port) # read reply error, payload = self.receive_crawler_reply(s, CRAWLER_NATCHECK, 42) assert error == 0 if DEBUG: print >> sys.stderr, "test_natcheck:", bdecode(payload) time.sleep(1)
def subtest_invalid_query(self): """ Send a CRAWLER_SEEDINGSTATS_QUERY message to the Tribler instance. Execute an invalid SQL query. """ print >>sys.stderr, "-"*80, "\ntest: subtest_invalid_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = ["FOO BAR", cPickle.dumps(["select * from category", ""])] for query in queries: self.send_crawler_request(s, CRAWLER_SEEDINGSTATS_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply(s, CRAWLER_SEEDINGSTATS_QUERY, 0) assert error != 0, error if DEBUG: print >>sys.stderr, "test_seeding_stats:", payload
def subtest_dialback(self): """ Send a valid request, disconnect, and wait for an incomming connection with the reply """ print >> sys.stderr, "-" * 80, "\ntest: dialback" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM peer") s.close() # wait for reply try: conn, addr = self.listen_socket.accept() except socket.timeout: if DEBUG: print >> sys.stderr, "test_crawler: timeout, bad, peer didn't connect to send the crawler reply" assert False, "test_crawler: timeout, bad, peer didn't connect to send the crawler reply" s = OLConnection(self.my_keypair, "", 0, conn, mylistenport=self.listen_port) # read reply error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 0 if DEBUG: print >> sys.stderr, cPickle.loads(payload) time.sleep(1)
def __init__(self, session): if self.__singleton: raise RuntimeError, "Crawler is Singleton" self._overlay_bridge = OverlayThreadingBridge.getInstance() self._session = session self._crawler_db = CrawlerDBHandler.getInstance() # _message_handlers contains message-id:(request-callback, reply-callback, last-request-timestamp) # the handlers are called when either a CRAWL_REQUEST or CRAWL_REPLY message is received self._message_handlers = {} # _crawl_initiators is a list with (initiator-callback, # frequency, accept_frequency) tuples the initiators are called # when a new connection is received self._crawl_initiators = [] # _initiator_dealines contains [deadline, frequency, # accept_frequency, initiator-callback, permid, selversion, # failure-counter] deadlines register information on when to # call the crawl initiators again for a specific permid self._initiator_deadlines = [] # _dialback_deadlines contains message_id:(deadline, permid) pairs # client peers should connect back to -a- crawler indicated by # permid after deadline expired self._dialback_deadlines = {} # _channels contains permid:buffer-dict pairs. Where # buffer_dict contains channel-id:(timestamp, buffer, # channel_data) pairs. Where buffer is the payload from # multipart messages that are received so far. Channels are # used to match outstanding replies to given requests self._channels = {} # start checking for expired deadlines self._check_deadlines(True) # start checking for ancient channels self._check_channels()
def subtest_valid_query(self): """ Send a CRAWLER_SEEDINGSTATS_QUERY message to the Tribler instance. Execute a valid SQL query. """ print >> sys.stderr, "-" * 80, "\ntest: subtest_valid_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) # test with valid data seedingstats_db = SQLiteSeedingStatsCacheDB.getInstance() seedingstats_db.insertMany("SeedingStats", [(50000, 'foobar', 'dummy_seed', 500, 0, 0), (80000, 'bar', 'dummy_seed', 800, 1, 0)]) s = OLConnection(self.my_keypair, "localhost", self.hisport, mylistenport=self.listen_port) queries = [ cPickle.dumps([("read", "SELECT * FROM SeedingStats"), ("read", "SELECT * FROM SeedingStats WHERE crawled = 0")]) ] for query in queries: self.send_crawler_request(s, CRAWLER_SEEDINGSTATS_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply( s, CRAWLER_SEEDINGSTATS_QUERY, 0) assert error == 0, (error, payload) if DEBUG: print >> sys.stderr, "test_seeding_stats:", cPickle.loads( payload)
def subtest_valid_messageid(self): """ Send a valid message-id from a registered crawler peer """ print >>sys.stderr, "-"*80, "\ntest: valid_messageid" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = ["SELECT name FROM category", "SELECT * FROM peer", "SELECT * FROM torrent"] for query in queries: self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 0) assert error == 0 if DEBUG: print >>sys.stderr, cPickle.loads(payload) time.sleep(1) s.close()
def subtest_invalid_messageid(self): """ Send an invalid message-id from a registered crawler peer """ print >>sys.stderr, "-"*80, "\ntest: invalid_messageid" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) # We are a registered crawler, start sending invalid messages messages = [CRAWLER_REQUEST, CRAWLER_REQUEST + chr(0), CRAWLER_REPLY, CRAWLER_REPLY + chr(0)] for msg in messages: s = OLConnection(self.my_keypair, "localhost", self.hisport) s.send(msg) response = s.recv() assert response == "", "response type is %s" % getMessageName(response[0]) time.sleep(1) s.close()
def subtest_invalid_query(self): """ Send a CRAWLER_SEEDINGSTATS_QUERY message to the Tribler instance. Execute an invalid SQL query. """ print >> sys.stderr, "-" * 80, "\ntest: subtest_invalid_query" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) queries = ["FOO BAR", cPickle.dumps(["select * from category", ""])] for query in queries: self.send_crawler_request(s, CRAWLER_SEEDINGSTATS_QUERY, 0, 0, query) error, payload = self.receive_crawler_reply( s, CRAWLER_SEEDINGSTATS_QUERY, 0) assert error != 0, error if DEBUG: print >> sys.stderr, "test_seeding_stats:", payload
def subtest_invalid_frequency(self): """ Send two valid requests shortly after each other. However, indicate that the frequency should be large. This should result in a frequency error """ print >> sys.stderr, "-" * 80, "\ntest: invalid_invalid_frequency" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 0, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 0 # try on the same connection self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 1000, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 254 # should give a frequency erro s.close() # try on a new connection s = OLConnection(self.my_keypair, "localhost", self.hisport) self.send_crawler_request(s, CRAWLER_DATABASE_QUERY, 42, 1000, "SELECT * FROM peer") error, payload = self.receive_crawler_reply(s, CRAWLER_DATABASE_QUERY, 42) assert error == 254 # should give a frequency error time.sleep(1) s.close()
def subtest_invalid_permid(self): """ Send crawler messages from a non-crawler peer """ print >>sys.stderr, "-"*80, "\ntest: invalid_permid" # make sure that the OLConnection is NOT in the crawler_db crawler_db = CrawlerDBHandler.getInstance() assert not self.my_permid in crawler_db.getCrawlers() # We are not a registered crawler, any request from us should # be denied messages = [CRAWLER_REQUEST, CRAWLER_REQUEST + CRAWLER_DATABASE_QUERY, CRAWLER_REQUEST + CRAWLER_DATABASE_QUERY, CRAWLER_REQUEST + chr(0)] for msg in messages: s = OLConnection(self.my_keypair, "localhost", self.hisport) s.send(msg) response = s.recv() assert response == "", "response type is %s" % getMessageName(response[0]) time.sleep(1) s.close()
def subtest_invalid_messageid(self): """ Send an invalid message-id from a registered crawler peer """ print >> sys.stderr, "-" * 80, "\ntest: invalid_messageid" # make sure that the OLConnection IS in the crawler_db crawler_db = CrawlerDBHandler.getInstance() crawler_db.temporarilyAddCrawler(self.my_permid) # We are a registered crawler, start sending invalid messages messages = [ CRAWLER_REQUEST, CRAWLER_REQUEST + chr(0), CRAWLER_REPLY, CRAWLER_REPLY + chr(0) ] for msg in messages: s = OLConnection(self.my_keypair, "localhost", self.hisport) s.send(msg) response = s.recv() assert response == "", "response type is %s" % getMessageName( response[0]) time.sleep(1) s.close()