def test_search_quick(self): """Perform search and check found caches""" # at time of writing, there were exactly 16 caches in this area + one PM only expected_cache_num = 16 tolerance = 7 rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.40)) with self.subTest("normal"): res = [c.wp for c in self.g.search_quick(rect)] for wp in ["GC41FJC", "GC17E8Y", "GC5ND9F"]: self.assertIn(wp, res) # but 108 caches larger tile self.assertLess(len(res), 130) self.assertGreater(len(res), 90) with self.subTest("strict handling of cache coordinates"): res = list(self.g.search_quick(rect, strict=True)) self.assertLess(len(res), expected_cache_num + tolerance) self.assertGreater(len(res), expected_cache_num - tolerance) with self.subTest("larger zoom - more precise"): res1 = list(self.g.search_quick(rect, strict=True, zoom=15)) res2 = list(self.g.search_quick(rect, strict=True, zoom=14)) for res in res1, res2: self.assertLess(len(res), expected_cache_num + tolerance) self.assertGreater(len(res), expected_cache_num - tolerance) for c1, c2 in itertools.product(res1, res2): self.assertLess(c1.location.precision, c2.location.precision)
def test_search_quick(self): """Perform quick search and check found caches""" # at time of writing, there were exactly 16 caches in this area + one PM only expected_cache_num = 16 tolerance = 7 rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.40)) with self.subTest("normal"): with self.recorder.use_cassette('geocaching_quick_normal'): # Once this feature is fixed, the corresponding cassette will have to be deleted # and re-recorded. res = [c.wp for c in self.gc.search_quick(rect)] for wp in ["GC41FJC", "GC17E8Y", "GC383XN"]: self.assertIn(wp, res) # but 108 caches larger tile self.assertLess(len(res), 130) self.assertGreater(len(res), 90) with self.subTest("strict handling of cache coordinates"): with self.recorder.use_cassette('geocaching_quick_strictness'): res = list(self.gc.search_quick(rect, strict=True)) self.assertLess(len(res), expected_cache_num + tolerance) self.assertGreater(len(res), expected_cache_num - tolerance) with self.subTest("larger zoom - more precise"): with self.recorder.use_cassette('geocaching_quick_zoom'): res1 = list(self.gc.search_quick(rect, strict=True, zoom=15)) res2 = list(self.gc.search_quick(rect, strict=True, zoom=14)) for res in res1, res2: self.assertLess(len(res), expected_cache_num + tolerance) self.assertGreater(len(res), expected_cache_num - tolerance) for c1, c2 in itertools.product(res1, res2): self.assertLess(c1.location.precision, c2.location.precision)
def test_search_quick(self): """Perform search and check found caches""" rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.40)) caches = list(self.g.search_quick(rect)) strict_caches = list(self.g.search_quick(rect, strict=True)) precise_caches = list(self.g.search_quick(rect, precision=45.)) # Check for known geocaches expected = ["GC41FJC", "GC17E8Y", "GC5ND9F"] for i in expected: found = False for c in caches: if c.wp == i: found = True break with self.subTest("Check if {} is in results".format(c.wp)): self.assertTrue(found) with self.subTest("Precision is in assumed range"): self.assertLess(caches[0].location.precision, 49.5) self.assertGreater(caches[0].location.precision, 49.3) with self.subTest("Found roughly correct amount of caches"): # At time of writing, there were 108 caches inside inspected tile self.assertLess(len(caches), 130) self.assertGreater(len(caches), 90) with self.subTest("Strict handling of cache coordinates"): # ...but only 12 inside this stricter area self.assertLess(len(strict_caches), 16) self.assertGreater(len(strict_caches), 7) with self.subTest("Precision grows when asking for it"): self.assertLess(precise_caches[0].location.precision, 45.)
def test_search_quick_match_load(self): """Test if search results matches exact cache locations.""" rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.39)) caches = list(self.g.search_quick(rect, strict=True, zoom=15)) for cache in caches: try: cache.load() self.assertIn(cache.location, rect) except PMOnlyException: pass
def test_calculate_initial_tiles(self): expect_tiles = [(2331, 1185, 12), (2331, 1186, 12), (2332, 1185, 12), (2332, 1186, 12)] expect_precision = 76.06702024121832 r = Rectangle(Point(60.15, 24.95), Point(60.17, 25.00)) tiles, starting_precision = self.g._calculate_initial_tiles(r) for t in tiles: with self.subTest("Tile {} expected as initial tile".format(t)): self.assertIn(t, expect_tiles) with self.subTest("Expected precision"): self.assertAlmostEqual(starting_precision, expect_precision)
def test_search_quick_match_load(self): """Test if quick search results matches exact cache locations.""" rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.39)) with self.recorder.use_cassette('geocaching_matchload'): # at commit time, this test is an allowed failure. Once this feature is fixed, the # corresponding cassette will have to be deleted and re-recorded. caches = list(self.gc.search_quick(rect, strict=True, zoom=15)) for cache in caches: try: cache.load() self.assertIn(cache.location, rect) except PMOnlyException: pass
def test_recover_from_rate_limit(self): """Test recovering from API rate limit exception.""" rect = Rectangle(Point(50.74, 13.38), Point(49.73, 14.40)) # large rectangle with self.recorder.use_cassette("geocaching_api_rate_limit") as vcr: orig_wait_for = TooManyRequestsError.wait_for with patch.object(TooManyRequestsError, "wait_for", autospec=True) as wait_for: # If we are recording, we must perform real wait, otherwise we skip waiting wait_for.side_effect = orig_wait_for if vcr.current_cassette.is_recording() else None for i, _cache in enumerate(self.gc.search_rect(rect, per_query=1)): if wait_for.called: self.assertEqual(wait_for.call_count, 1) break if i > 20: # rate limit should be released after ~10 requests self.fail("API Rate Limit not released")
def test_search_rect(self): """Perform search by rect and check found caches.""" rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.39)) expected = {"GC1TYYG", "GC11PRW", "GC7JRR5", "GC161KR", "GC1GW54", "GC7KDWE", "GC93HA6", "GCZC5D"} orig_wait_for = TooManyRequestsError.wait_for with self.recorder.use_cassette("geocaching_search_rect") as vcr: with patch.object(TooManyRequestsError, "wait_for", autospec=True) as wait_for: wait_for.side_effect = orig_wait_for if vcr.current_cassette.is_recording() else None with self.subTest("default use"): caches = self.gc.search_rect(rect) waypoints = {cache.wp for cache in caches} self.assertSetEqual(waypoints, expected) with self.subTest("sort by distance"): with self.assertRaises(AssertionError): caches = list(self.gc.search_rect(rect, sort_by="distance")) origin = Point.from_string("N 49° 44.230 E 013° 22.858") caches = list(self.gc.search_rect(rect, sort_by=SortOrder.distance, origin=origin)) waypoints = {cache.wp for cache in caches} self.assertSetEqual(waypoints, expected) # Check if caches are sorted by distance to origin distances = [] for cache in caches: try: distances.append(great_circle(cache.location, origin).meters) except PMOnlyException: # can happend when getting accurate location continue self.assertEqual(distances, sorted(distances)) with self.subTest("sort by different criteria"): for sort_by in SortOrder: if sort_by is SortOrder.distance: continue caches = self.gc.search_rect(rect, sort_by=sort_by) waypoints = {cache.wp for cache in caches} self.assertSetEqual(waypoints, expected)
def test_recover_from_rate_limit_without_sleep(self): """Test recovering from API rate limit exception without sleep.""" rect = Rectangle(Point(50.74, 13.38), Point(49.73, 14.40)) with self.recorder.use_cassette('geocaching_api_rate_limit_with_none') as vcr: with patch.object(TooManyRequestsError, 'wait_for', autospec=True) as wait_for: caches = self.gc.search_rect(rect, per_query=1, wait_sleep=False) for i, cache in enumerate(caches): if cache is None: import time while cache is None: if vcr.current_cassette.is_recording(): time.sleep(10) cache = next(caches) self.assertIsInstance(cache, Cache) break if i > 20: self.fail("API Rate Limit not released") self.assertEqual(wait_for.call_count, 0)
def test_search_rect(self): """Perform search by rect and check found caches.""" rect = Rectangle(Point(49.73, 13.38), Point(49.74, 13.39)) expected = {'GC1TYYG', 'GC11PRW', 'GC7JRR5', 'GC161KR', 'GC1GW54', 'GC7KDWE', 'GC8D303'} orig_wait_for = TooManyRequestsError.wait_for with self.recorder.use_cassette('geocaching_search_rect') as vcr: with patch.object(TooManyRequestsError, 'wait_for', autospec=True) as wait_for: wait_for.side_effect = orig_wait_for if vcr.current_cassette.is_recording() else None with self.subTest("default use"): caches = self.gc.search_rect(rect) waypoints = {cache.wp for cache in caches} self.assertSetEqual(waypoints, expected) with self.subTest("sort by distance"): with self.assertRaises(AssertionError): caches = list(self.gc.search_rect(rect, sort_by='distance')) origin = Point.from_string('N 49° 44.230 E 013° 22.858') caches = list(self.gc.search_rect(rect, sort_by=SortOrder.distance, origin=origin)) waypoints = [cache.wp for cache in caches] self.assertEqual(waypoints, [ 'GC11PRW', 'GC1TYYG', 'GC7JRR5', 'GC1GW54', 'GC161KR', 'GC7KDWE', 'GC8D303' ]) # Check if caches are sorted by distance to origin distances = [great_circle(cache.location, origin).meters for cache in caches] self.assertEqual(distances, sorted(distances)) with self.subTest("sort by different criteria"): for sort_by in SortOrder: if sort_by is SortOrder.distance: continue caches = self.gc.search_rect(rect, sort_by=sort_by) waypoints = {cache.wp for cache in caches} self.assertSetEqual(waypoints, expected)