def search(self, point, limit=float("inf")): """Return a generator of caches around some point. Search for caches around some point by loading search pages and parsing the data from these pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit as a convinient way to stop generator after certain number of caches. :param .geo.Point point: Search center point. :param int limit: Maximum number of caches to generate. """ logging.info("Searching at {}".format(point)) start_index = 0 while True: # get one page page = self._search_get_page(point, start_index) if not page: # result is empty - no more caches raise StopIteration() # parse caches in result for start_index, row in enumerate(page.find_all("tr"), start_index): limit -= 1 # handle limit if limit < 0: raise StopIteration() # parse raw data cache_details = row.find("span", "cache-details").text.split("|") wp = cache_details[1].strip() # create and fill cache object c = Cache(self, wp) c.type = Type.from_string(cache_details[0].strip()) c.name = row.find("span", "cache-name").text c.found = row.find("img", title="Found It!") is not None c.favorites = int(row.find(attrs={"data-column": "FavoritePoint"}).text) c.state = not (row.get("class") and "disabled" in row.get("class")) c.pm_only = row.find("td", "pm-upsell") is not None if c.pm_only: # PM only caches doesn't have other attributes filled in yield c continue c.size = Size.from_string(row.find(attrs={"data-column": "ContainerSize"}).text) c.difficulty = float(row.find(attrs={"data-column": "Difficulty"}).text) c.terrain = float(row.find(attrs={"data-column": "Terrain"}).text) c.hidden = parse_date(row.find(attrs={"data-column": "PlaceDate"}).text) c.author = row.find("span", "owner").text[3:] # delete "by " logging.debug("Cache parsed: {}".format(c)) yield c start_index += 1
def search(self, point, limit=float("inf")): """Return a generator of caches around some point. Search for caches around some point by loading search pages and parsing the data from these pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit as a convinient way to stop generator after certain number of caches. :param .geo.Point point: Search center point. :param int limit: Maximum number of caches to generate. """ logging.info("Searching at {}".format(point)) start_index = 0 while True: # get one page page = self._search_get_page(point, start_index) if not page: # result is empty - no more caches raise StopIteration() # parse caches in result for start_index, row in enumerate(page.find_all("tr"), start_index): limit -= 1 # handle limit if limit < 0: raise StopIteration() # parse raw data cache_details = row.find("span", "cache-details").text.split("|") wp = cache_details[1].strip() # create and fill cache object c = Cache(self, wp) c.type = Type.from_string(cache_details[0].strip()) c.name = row.find("span", "cache-name").text badge = row.find("svg", class_="badge") c.found = "found" in str(badge) if badge is not None else False c.favorites = int( row.find(attrs={ "data-column": "FavoritePoint" }).text) c.state = not (row.get("class") and "disabled" in row.get("class")) c.pm_only = row.find("td", "pm-upsell") is not None if c.pm_only: # PM only caches doesn't have other attributes filled in yield c continue c.size = Size.from_string( row.find(attrs={ "data-column": "ContainerSize" }).text) c.difficulty = float( row.find(attrs={ "data-column": "Difficulty" }).text) c.terrain = float( row.find(attrs={ "data-column": "Terrain" }).text) c.hidden = parse_date( row.find(attrs={ "data-column": "PlaceDate" }).text) c.author = row.find("span", "owner").text[3:] # delete "by " logging.debug("Cache parsed: {}".format(c)) yield c start_index += 1
def search(self, point, limit=float("inf")): """Return a generator of caches around some point. Search for caches around some point by loading search pages and parsing the data from these pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit as a convenient way to stop generator after certain number of caches. :param .geo.Point point: Search center point. :param int limit: Maximum number of caches to generate. """ logging.info("Searching at {}".format(point)) start_index = 0 while True: # get one page geocaches_table, whole_page = self._search_get_page(point, start_index) rows = geocaches_table.find_all("tr") # leave loop if there are no (more) results if not rows: return # prepare language-dependent mappings if start_index == 0: cache_sizes_filter_wrapper = whole_page.find("div", class_="cache-sizes-wrapper") localized_size_mapping = { # key = "Small" (localized), value = Size.small label.find("span").text.strip(): Size.from_number(label.find("input").get("value")) for label in cache_sizes_filter_wrapper.find_all("label") } # parse caches in result for start_index, row in enumerate(rows, start_index): limit -= 1 # handle limit if limit < 0: return # parse raw data cache_details = row.find("span", "cache-details").text.split("|") wp = cache_details[1].strip() # create and fill cache object # values are sanitized and converted in Cache setters c = Cache(self, wp) c.type = cache_details[0] c.name = row.find("span", "cache-name").text badge = row.find("svg", class_="badge") c.found = "found" in str(badge) if badge is not None else False c.favorites = row.find(attrs={"data-column": "FavoritePoint"}).text c.state = not (row.get("class") and "disabled" in row.get("class")) c.pm_only = row.find("td", "pm-upsell") is not None if c.pm_only: # PM only caches doesn't have other attributes filled in yield c continue c.size = localized_size_mapping[row.find(attrs={"data-column": "ContainerSize"}).text.strip()] c.difficulty = row.find(attrs={"data-column": "Difficulty"}).text c.terrain = row.find(attrs={"data-column": "Terrain"}).text c.hidden = row.find(attrs={"data-column": "PlaceDate"}).text c.author = row.find("span", "owner").text[3:] # delete "by " logging.debug("Cache parsed: {}".format(c)) yield c start_index += 1
def search(self, point, limit=float("inf")): """Return a generator of caches around some point. Search for caches around some point by loading search pages and parsing the data from these pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit as a convinient way to stop generator after certain number of caches. :param .geo.Point point: Search center point. :param int limit: Maximum number of caches to generate. """ logging.info("Searching at {}".format(point)) start_index = 0 while True: # get one page geocaches_table, whole_page = self._search_get_page(point, start_index) if not geocaches_table: # result is empty - no more caches return # prepare language-dependant mappings if start_index == 0: cache_sizes_filter_wrapper = whole_page.find("div", class_="cache-sizes-wrapper") localized_size_mapping = { # key = "Small" (localized), value = Size.small label.find("span").text.strip(): Size.from_number(label.find("input").get("value")) for label in cache_sizes_filter_wrapper.find_all("label") } # parse caches in result for start_index, row in enumerate(geocaches_table.find_all("tr"), start_index): limit -= 1 # handle limit if limit < 0: return # parse raw data cache_details = row.find("span", "cache-details").text.split("|") wp = cache_details[1].strip() # create and fill cache object # values are sanitized and converted in Cache setters c = Cache(self, wp) c.type = cache_details[0] c.name = row.find("span", "cache-name").text badge = row.find("svg", class_="badge") c.found = "found" in str(badge) if badge is not None else False c.favorites = row.find(attrs={"data-column": "FavoritePoint"}).text c.state = not (row.get("class") and "disabled" in row.get("class")) c.pm_only = row.find("td", "pm-upsell") is not None if c.pm_only: # PM only caches doesn't have other attributes filled in yield c continue c.size = localized_size_mapping[row.find(attrs={"data-column": "ContainerSize"}).text.strip()] c.difficulty = row.find(attrs={"data-column": "Difficulty"}).text c.terrain = row.find(attrs={"data-column": "Terrain"}).text c.hidden = row.find(attrs={"data-column": "PlaceDate"}).text c.author = row.find("span", "owner").text[3:] # delete "by " logging.debug("Cache parsed: {}".format(c)) yield c start_index += 1