def bounding_box(self): """Get extreme latitude and longitude values. Return Rectangle that contains all points""" lats = sorted([p.latitude for p in self.points]) lons = sorted([p.longitude for p in self.points]) return Rectangle(Point(min(lats), min(lons)), Point(max(lats), max(lons)))
def __init__(self, point_a, point_b): """Create rectangle defined by opposite corners Parameters point_a and point_b are Point instances.""" assert point_a != point_b, "Corner points cannot be the same" self.corners = [point_a, point_b] self.points = [ point_a, Point(point_a.latitude, point_b.longitude), point_b, Point(point_b.latitude, point_a.longitude) ]
def location(self, location): if type(location) is str: location = Point.from_string(location) elif type(location) is not Point: raise ValueError( "Passed object is not Point instance nor string containing coordinates." ) self._location = location
def test_inside_area(self): inside_points = [ Point(*i) for i in [(10., 20.), (30., -5.), (18., 15.), (29., -1), (10., -3)] ] outside_points = [ Point(*i) for i in [(-10., -170.), (-70., 0.), (0., 40), (20., -10.), (50., 0.)] ] for point_list, test_func in [(inside_points, 'assertTrue'), (outside_points, 'assertFalse')]: for p in point_list: with self.subTest("Area -> point: {}".format(p)): getattr(self, test_func)(self.rect.inside_area(p)) with self.subTest("Point -> area: {}".format(p)): getattr(self, test_func)(p.inside_area(self.rect))
def geocode(self, query): """Tries to fetch coordinates for given query.""" assert type(query) is str url = self._urls["geocode"] + "?q=" + query try: res = self._browser.get(url).json() except requests.exceptions.ConnectionError as e: raise Error("Cannot load geocode page.") from e if res["status"] != "success": raise GeocodeError(res["msg"]) return Point(float(res["data"]["lat"]), float(res["data"]["lng"]))
def load_cache(self, wp, destination=None): """Loads details from cache page. Loads all cache details and return fully populated cache object.""" assert type(wp) is str and wp.startswith("GC") logging.info("Loading details about %s...", wp) # assemble request params = urlencode({"wp": wp}) url = self._urls["cache_details"] + "?" + params try: root = self._browser.get(url).soup except requests.exceptions.ConnectionError as e: raise Error("Cannot load cache details page.") from e cache_details = root.find(id="cacheDetails") # check for PM only caches if using free account if cache_details is None: if root.select(".PMOWarning") is not None: raise PMOnlyException("Premium Members only.") # parse raw data name = cache_details.find("h2") cache_type = cache_details.find("img").get("alt") author = cache_details("a")[1] hidden = cache_details.find("div", "minorCacheDetails").find_all("div")[1] location = root.find(id="uxLatLon") state = root.find("ul", "OldWarning") found = root.find("div", "FoundStatus") D_T = root.find("div", "CacheStarLabels").find_all("img") size = root.find("div", "CacheSize").find("img") attributes_raw = root.find_all( "div", "CacheDetailNavigationWidget")[0].find_all("img") user_content = root.find_all("div", "UserSuppliedContent") hint = root.find(id="div_hint") favorites = root.find("span", "favorite-value") # create cache object c = destination or Cache(wp, self) assert isinstance(c, Cache) # prettify data c.name = name.text c.cache_type = cache_type c.author = author.text c.hidden = Util.parse_date(hidden.text.split()[2]) c.location = Point.from_string(location.text) c.state = state is None c.found = found and "Found It!" in found.text or False c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T] c.size = " ".join(size.get("alt").split()[1:]) attributes_raw = [ _.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw ] c.attributes = { attribute_name: appendix.startswith("yes") for attribute_name, appendix in attributes_raw if not appendix.startswith("blank") } c.summary = user_content[0].text c.description = str(user_content[1]) c.hint = Util.rot13(hint.text.strip()) c.favorites = int(favorites.text) logging.debug("Cache loaded: %r", c) return c
def mean_point(self): """Return point with average latitude and longitude of points""" lats = [p.latitude for p in self.points] lons = [p.longitude for p in self.points] return Point(sum(lats) / len(lats), sum(lons) / len(lons))
def location(self, location): if type(location) is str: location = Point.from_string(location) elif type(location) is not Point: raise ValueError("Passed object is not Point instance nor string containing coordinates.") self._location = location
def setUp(self): self.p = Polygon(*[ Point(*i) for i in [(10., 20.), (30., -5.), (-10., -170.), (-70., 0.), (0., 40)] ])
def setUp(self): self.rect = Rectangle(Point(10., 20.), Point(30., -5.))
def get_location(self): """Calculate actual coordinates of this grid block""" x_i, y_i = self._get_middle_point() return Point.from_tile(self.utf_grid.x, self.utf_grid.y, self.utf_grid.z, x_i, y_i, self.utf_grid.size)
def get_location(self): """Calculate actual coordinates of this grid block""" x_i, y_i = self._get_middle_point() return Point.from_tile( self.utf_grid.x, self.utf_grid.y, self.utf_grid.z, x_i, y_i, self.utf_grid.size)
def load_cache(self, wp, destination=None): """Loads details from cache page. Loads all cache details and return fully populated cache object.""" assert type(wp) is str and wp.startswith("GC") logging.info("Loading details about %s...", wp) # assemble request params = urlencode({"wp": wp}) url = self._urls["cache_details"] + "?" + params try: root = self._browser.get(url).soup except requests.exceptions.ConnectionError as e: raise Error("Cannot load cache details page.") from e cache_details = root.find(id="cacheDetails") # check for PM only caches if using free account if cache_details is None: if root.select(".PMOWarning") is not None: raise PMOnlyException("Premium Members only.") # parse raw data name = cache_details.find("h2") cache_type = cache_details.find("img").get("alt") author = cache_details("a")[1] hidden = cache_details.find("div", "minorCacheDetails").find_all("div")[1] location = root.find(id="uxLatLon") state = root.find("ul", "OldWarning") found = root.find("div", "FoundStatus") D_T = root.find("div", "CacheStarLabels").find_all("img") size = root.find("div", "CacheSize").find("img") attributes_raw = root.find_all("div", "CacheDetailNavigationWidget")[0].find_all("img") user_content = root.find_all("div", "UserSuppliedContent") hint = root.find(id="div_hint") favorites = root.find("span", "favorite-value") # create cache object c = destination or Cache(wp, self) assert isinstance(c, Cache) # prettify data c.name = name.text c.cache_type = cache_type c.author = author.text c.hidden = Util.parse_date(hidden.text.split()[2]) c.location = Point.from_string(location.text) c.state = state is None c.found = found and "Found It!" in found.text or False c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T] c.size = " ".join(size.get("alt").split()[1:]) attributes_raw = [_.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw] c.attributes = {attribute_name: appendix.startswith("yes") for attribute_name, appendix in attributes_raw if not appendix.startswith("blank")} c.summary = user_content[0].text c.description = str(user_content[1]) c.hint = Util.rot13(hint.text.strip()) c.favorites = int(favorites.text) logging.debug("Cache loaded: %r", c) return c
def load_cache_by_url(self, url, destination=None): try: root = self._browser.get(url).soup except requests.exceptions.ConnectionError as e: raise Error("Cannot load cache details page.") from e cache_details = root.find(id="cacheDetails") # check for PM only caches if using free account if cache_details is None: if root.select(".PMOWarning") is not None: raise PMOnlyException("Premium Members only.") # parse raw data wp = root.title.string.split(' ')[0] name = cache_details.find("h2") cache_type = cache_details.find("img").get("src") author = cache_details("a")[1] hidden = cache_details.find("div", "minorCacheDetails").find_all("div")[1] location = root.find(id="uxLatLon") state = root.find("ul", "OldWarning") found = root.find("div", "FoundStatus") D_T = root.find("div", "CacheStarLabels").find_all("img") size = root.find("div", "CacheSize").find("img") attributes_raw = root.find_all( "div", "CacheDetailNavigationWidget")[0].find_all("img") user_content = root.find_all("div", "UserSuppliedContent") hint = root.find(id="div_hint") favorites = root.find("span", "favorite-value") # check for trackables inventory_raw = root.find_all("div", "CacheDetailNavigationWidget") inventory_links = inventory_raw[1].find_all("a") if len(inventory_links) >= 3: trackable_page = self._urls['trackable_base'] + inventory_links[ -3].get("href") else: trackable_page = None # create cache object c = destination or Cache(wp, self) assert isinstance(c, Cache) # prettify data c.name = name.text c.cache_type = Cache.get_cache_type_by_img(cache_type) c.author = author.text c.hidden = Util.parse_date(hidden.text.split(":")[-1]) c.location = Point.from_string(location.text) c.state = state is None c.found = found and "Found It!" in found.text or False c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T] c.size = size.get("src").split("/")[-1].rsplit( ".", 1)[0] # filename of img[src] attributes_raw = [ _.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw ] c.attributes = { attribute_name: appendix.startswith("yes") for attribute_name, appendix in attributes_raw if not appendix.startswith("blank") } c.summary = user_content[0].text c.description = str(user_content[1]) c.hint = Util.rot13(hint.text.strip()) if favorites is None: c.favorites = 0 else: c.favorites = int(favorites.text) if trackable_page is not None: c.trackables = self.load_trackable_list(trackable_page) else: c.trackables = [] logging.debug("Cache loaded: %r", c) return c
def load_cache_by_url(self, url, destination=None): try: root = self._browser.get(url).soup except requests.exceptions.ConnectionError as e: raise Error("Cannot load cache details page.") from e cache_details = root.find(id="cacheDetails") # check for PM only caches if using free account if cache_details is None: if root.select(".PMOWarning") is not None: raise PMOnlyException("Premium Members only.") # parse raw data wp = root.title.string.split(' ')[0] name = cache_details.find("h2") cache_type = cache_details.find("img").get("src") author = cache_details("a")[1] hidden = cache_details.find("div", "minorCacheDetails").find_all("div")[1] location = root.find(id="uxLatLon") state = root.find("ul", "OldWarning") found = root.find("div", "FoundStatus") D_T = root.find("div", "CacheStarLabels").find_all("img") size = root.find("div", "CacheSize").find("img") attributes_raw = root.find_all("div", "CacheDetailNavigationWidget")[0].find_all("img") user_content = root.find_all("div", "UserSuppliedContent") hint = root.find(id="div_hint") favorites = root.find("span", "favorite-value") # check for trackables inventory_raw = root.find_all("div", "CacheDetailNavigationWidget") inventory_links = inventory_raw[1].find_all("a") if len(inventory_links) >= 3: trackable_page = self._urls['trackable_base'] + inventory_links[-3].get("href") else: trackable_page = None # create cache object c = destination or Cache(wp, self) assert isinstance(c, Cache) # prettify data c.name = name.text c.cache_type = Cache.get_cache_type_by_img(cache_type) c.author = author.text c.hidden = Util.parse_date(hidden.text.split(":")[-1]) c.location = Point.from_string(location.text) c.state = state is None c.found = found and "Found It!" in found.text or False c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T] c.size = size.get("src").split("/")[-1].rsplit(".", 1)[0] # filename of img[src] attributes_raw = [_.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw] c.attributes = {attribute_name: appendix.startswith("yes") for attribute_name, appendix in attributes_raw if not appendix.startswith("blank")} c.summary = user_content[0].text c.description = str(user_content[1]) c.hint = Util.rot13(hint.text.strip()) if favorites is None: c.favorites = 0 else: c.favorites = int(favorites.text) if trackable_page is not None: c.trackables = self.load_trackable_list(trackable_page) else: c.trackables = [] logging.debug("Cache loaded: %r", c) return c