def test_add_normal_urls(self, mc): '''testing insertion of new links''' mc.return_value = mongomock.MongoClient() qm = QueueManager("queues-names", START_DELAY, CONFIGURATION) dm = DocumentMetadata("http://random-url.com") dm.depth = 1 dm.links = [ "http://www.randomurl1.it", "http://www.randomurl2.it", "http://www.randomurl3.it", "http://www.randomurl4.it", "http://www.randomurl5.it", "http://www.randomurl6.it", "http://www.randomurl7.it", ] all_url_lenght = len(dm.links) # adding all the links found in the document to the normal list qm.add_normal_urls(dm) # checking if the urls are there: stored = qm.normal_store.getall() self.assertEqual(len(stored), all_url_lenght) links_set = set(dm.links) for s in stored: self.assertTrue(s[1] in links_set) # depth should be increased by 1 self.assertEqual(s[0], dm.depth + 1) # checking that seen is still empty for u in dm.links: self.assertEqual(qm.seen.is_new(u), True) # adding a duplicate with same depth # entry should be replaced. dm.links = ["http://www.randomurl1.it"] qm.add_normal_urls(dm) stored = qm.normal_store.getall() self.assertEqual(len(stored), all_url_lenght) # adding a duplicate with different depth # we should have duplicate entries. dm.depth = 3 dm.links = ["http://www.randomurl1.it"] qm.add_normal_urls(dm) stored = qm.normal_store.getall() self.assertEqual(len(stored), all_url_lenght + 1)
def pop(self): """Return the next document to fetch""" document_metadata = DocumentMetadata() item = self.priority_store.pop(int(time.time())) if item: logging.debug("Get priority:" + str(item[1])) document_metadata.url = item[1] document_metadata.depth = item[3] document_metadata.delay = item[2] document_metadata.source = Source.priority else: while not item: item = self.normal_store.pop() if not item: break # the following check is needed because urls are stored in seen # after seeing them # so we can have multiple identical url in normal list. # and we do not want to have multiple same urls in refetching list if not self.seen.is_new(item[1]): item = None if item: # In case of network error I repush url on normal queue # just to not loose them. So it is possible we have # something already seen here. # It is not a problem to refetch this cases logging.debug("Get normal:" + str(item[1])) document_metadata.url = item[1] document_metadata.depth = item[0] document_metadata.delay = 0 document_metadata.source = Source.normal else: item = self.refetch_store.pop(int(time.time())) if item: logging.debug("Get Refetch:" + str(item[1])) document_metadata.url = item[1] document_metadata.depth = item[3] document_metadata.delay = item[2] document_metadata.source = Source.refetch return document_metadata
def test_pop_ordering(self, mc): ''' Test adding url to priority, normal and refetch and checking the ordering of popping is correct ''' mc.return_value = mongomock.MongoClient() qm = QueueManager("queues-names", START_DELAY, CONFIGURATION) # inserting a priority url. urls = [ "www.daniele.com", ] qm.init_priority_list(urls) # inserting a normal url. burls = [ { "url": "www.daniele1.com", "depth": "2" }, ] qm.add_bootstrap_urls(burls) # inserting a refetch url dm = DocumentMetadata("http://www.randomurl8.it") dm.depth = 1 dm.dhash = 121212 dm.source = Source.normal dm.delay = 500 dm.alternatives = ["http://www.randomurl8.it"] qm.add_seen_and_reschedule(dm) # make sure all the inserted url are ready to be popped with mock.patch("time.time", mock_time): doc = qm.pop() # first one from priority self.assertEqual(doc.depth, 0) self.assertEqual(doc.source, Source.priority) doc = qm.pop() # second one from normal self.assertEqual(doc.source, Source.normal) doc = qm.pop() # third from refetching self.assertEqual(doc.source, Source.refetch)
def test_reschedule_newurl(self): ############################################ # inserting a new url dm = DocumentMetadata("http://www.randomurl8.it") dm.depth = 1 dm.dhash = 121212 dm.source = Source.normal dm.alternatives = ["http://www.randomurl8.it"] self.qm.add_seen_and_reschedule(dm) # checking all the parameters counter = self.qm.seen.get(dm.url).get("count") dhash = self.qm.seen.get(dm.url).get("page_hash") self.assertEqual(counter, 1) self.assertEqual(dhash, dm.dhash) with mock.patch("time.time", mock_time): refetching_data = self.qm.pop() self.assertEqual(refetching_data.delay, TWO_HOURS) self.assertEqual(refetching_data.source, Source.refetch)
def test_reschedule_different_content(self): ############################################ # this one is in seen with a different hash and # not taken from priority queue # I expect: halving the delay and set seen counter to 1 dm = DocumentMetadata("http://www.randomurl1.it") dm.depth = 1 dm.dhash = 1936 dm.source = Source.normal dm.delay = 20 dm.alternatives = ["http://www.randomurl1.it"] self.qm.add_seen_and_reschedule(dm) # checking all the parameters counter = self.qm.seen.get(dm.url).get("count") dhash = self.qm.seen.get(dm.url).get("page_hash") self.assertEqual(counter, 1) self.assertEqual(dhash, dm.dhash) with mock.patch("time.time", mock_time): refetching_data = self.qm.pop() self.assertEqual(refetching_data.delay, dm.delay / 2) self.assertEqual(refetching_data.source, Source.refetch)
def test_reschedule3(self): ############################################ # as before but with a small delay. # cheking delay not changing dm = DocumentMetadata("http://www.randomurl1.it") dm.depth = 1 dm.dhash = 121212 dm.source = Source.normal dm.delay = 500 dm.alternatives = ["http://www.randomurl1.it"] self.qm.add_seen_and_reschedule(dm) # checking all the parameters counter = self.qm.seen.get(dm.url).get("count") dhash = self.qm.seen.get(dm.url).get("page_hash") self.assertEqual(counter, 1) self.assertEqual(dhash, dm.dhash) with mock.patch("time.time", mock_time): refetching_data = self.qm.pop() self.assertEqual(refetching_data.delay, dm.delay) self.assertEqual(refetching_data.source, Source.refetch)
def test_reschedule_samecontent_lastdelay(self): ############################################ # testing rescheduling some urls # this one is in seen with the same hash and # not taken from priority queue # I expect: doublig the delay and set seen counter to 1 dm = DocumentMetadata("http://www.randomurl1.it") dm.depth = 1 dm.dhash = 12345 dm.source = Source.normal dm.delay = 40 # alternatives contains always at least one url. dm.alternatives = ["http://www.randomurl1.it"] # we want to check that former alternatives are also correctly # updated even if new alternatives field is different. alternatives = self.qm.seen.get(dm.url).get("alternatives") self.assertNotEqual(len(dm.alternatives), len(alternatives)) self.qm.add_seen_and_reschedule(dm) # check all the parameters counter = self.qm.seen.get(dm.url).get("count") dhash = self.qm.seen.get(dm.url).get("page_hash") self.assertEqual(counter, 1) # check updated all the alternatives for urls in alternatives: counter = self.qm.seen.get(urls).get("count") dhash = self.qm.seen.get(dm.url).get("page_hash") self.assertEqual(counter, 1) self.assertEqual(dhash, dm.dhash) with mock.patch("time.time", mock_time): refetching_data = self.qm.pop() self.assertEqual(refetching_data.url, "") self.assertEqual(refetching_data.source, Source.unknown)
def test_add_normal_urls_some_seen(self, mc): ''' Tesiting insertion of some new and old links. ''' mc.return_value = mongomock.MongoClient() qm = QueueManager("queues-names", START_DELAY, CONFIGURATION) ############################ # insert some urls into seen dm_seen_1 = DocumentMetadata("http://www.randomurl1.it") # this two are cosidered the same page with different urls dm_seen_1.alternatives = [ "http://www.randomurl1.it", "http://www.randomurl3.it", ] dm_seen_1.dhash = 12345 dm_seen_2 = DocumentMetadata("http://www.randomurl4.it") dm_seen_2.alternatives = [ "http://www.randomurl4.it", ] dm_seen_2.dhash = 98765 already_seen_urls = set(dm_seen_1.alternatives).union( dm_seen_2.alternatives) qm.seen.add(dm_seen_1) qm.seen.add(dm_seen_2) ############################################ # testing add normal url with some seen urls dm = DocumentMetadata("http://random-url.com") dm.depth = 1 dm.links = [ "http://www.randomurl1.it", "http://www.randomurl2.it", "http://www.randomurl3.it", "http://www.randomurl4.it", "http://www.randomurl5.it", "http://www.randomurl6.it", "http://www.randomurl7.it", ] # adding all the links found in the document to the normal list qm.add_normal_urls(dm) # checking if the urls are there (all except 3 because already seen): links_set = set(dm.links).difference(already_seen_urls) stored = qm.normal_store.getall() self.assertEqual(len(stored), len(links_set)) # count for this urls should be 1 for i in dm_seen_1.alternatives: data = qm.seen.get(i) # this is 3 becouse we inserted one in seen + # we tried to insert in normal www.randomurl1.com + # we tried to insert in normal www.randomurl3.com self.assertEqual(data["count"], 3) # adding a duplicate that is already in seen. # should not be added in normal list, but counters of # all alternatives shouls be updated (+1) dm.links = [ dm.links[0], ] qm.add_normal_urls(dm) stored = qm.normal_store.getall() for i in dm_seen_1.alternatives: data = qm.seen.get(i) self.assertEqual(data["count"], 4) # adding again with different depth should not # change the behaviour dm.depth = 3 dm.links = [ dm.links[0], ] qm.add_normal_urls(dm) for i in dm_seen_1.alternatives: # should be 2 because I inserted 2 times data = qm.seen.get(i) self.assertEqual(data["count"], 5)