Ejemplo n.º 1
0
 def test_load_trackables(self):
     cache = Cache(
         self.gc,
         "GC26737")  # TB graveyard - will surelly have some trackables
     with recorder.use_cassette('cache_trackables'):
         trackable_list = list(cache.load_trackables(limit=10))
     self.assertTrue(isinstance(trackable_list, list))
Ejemplo n.º 2
0
    def _parse_utfgrid(self, json_grid):
        """Parse geocache coordinates from UTFGrid

        Consume json-decoded UTFGrid data from MechanicalSoup browser.
        Calculate waypoint coordinates and return generator object of
        Cache instances.

        Geocaching.com UTFGrids do not follow UTFGrid specification [2]
        in grid contents and key values.  List "grid" contains valid
        code pixels that are individual, but list "keys" contain a list
        of coordinates as "(x, y)" for points where there are geocaches
        on the grid.  Code pixels can however be decoded to produce
        index of coordinate point in list "keys".  Grid resolution is
        64x64 and coordinates run from northwest corner.  Dictionary
        "data" has key-value pairs, where keys are same coordinates as
        previously described and values are lists of dictionaries each
        containing geocache waypoint code and name in form {"n": name,
        "i": waypoint}.  Waypoints seem to appear nine times each, if
        the cache is not cut out from edges.

        [2] https://github.com/mapbox/utfgrid-spec"""

        logging.debug("Parsing UTFGrid")
        caches = {}   # {waypoint: [<Cache>, <GridCoordinateBlock>]}
        size = len(json_grid["grid"])
        assert len(json_grid["grid"][1]) == size   # square grid
        if size != self.size:
            logging.warning("GC.com UTFGrid specs seem to have changed: "
                            "grid resolution is not 64!")
            self.size = size
        caches = {}
        for coordinate_key in json_grid["data"]:
            cache_list = json_grid["data"][coordinate_key]
            x_i, y_i = (int(i) for i in coordinate_key.strip(" ()").split(","))
            # Store all caches to dictionary
            for cache_dic in cache_list:
                waypoint = cache_dic["i"]
                # Store all found coordinate points
                if waypoint not in caches:
                    c = Cache(waypoint, self._gc, name=cache_dic["n"])
                    caches[waypoint] \
                        = [c, GridCoordinateBlock(self, (x_i, y_i),)]
                else:
                    caches[waypoint][1].add((x_i, y_i))
        # Try to determine grid coordinate block size
        GridCoordinateBlock.determine_block_size(
            *[len(caches[wp][1].points) for wp in caches])
        # Calculate geocache coordinates and yield objects
        for waypoint in caches:
            c, coord_block = caches[waypoint]
            c.location = coord_block.get_location()
            yield c
        logging.info("Found {} caches".format(len(caches)))
Ejemplo n.º 3
0
    def _parse_utfgrid(self, json_grid):
        """Parse geocache coordinates from UTFGrid

        Consume json-decoded UTFGrid data from MechanicalSoup browser.
        Calculate waypoint coordinates and return generator object of
        Cache instances.

        Geocaching.com UTFGrids do not follow UTFGrid specification [2]
        in grid contents and key values.  List "grid" contains valid
        code pixels that are individual, but list "keys" contain a list
        of coordinates as "(x, y)" for points where there are geocaches
        on the grid.  Code pixels can however be decoded to produce
        index of coordinate point in list "keys".  Grid resolution is
        64x64 and coordinates run from northwest corner.  Dictionary
        "data" has key-value pairs, where keys are same coordinates as
        previously described and values are lists of dictionaries each
        containing geocache waypoint code and name in form {"n": name,
        "i": waypoint}.  Waypoints seem to appear nine times each, if
        the cache is not cut out from edges.

        [2] https://github.com/mapbox/utfgrid-spec"""

        logging.debug("Parsing UTFGrid")
        caches = {}  # {waypoint: [<Cache>, <GridCoordinateBlock>]}
        size = len(json_grid["grid"])
        assert len(json_grid["grid"][1]) == size  # square grid
        if size != self.size:
            logging.warning("GC.com UTFGrid specs seem to have changed: " "grid resolution is not 64!")
            self.size = size
        caches = {}
        for coordinate_key in json_grid["data"]:
            cache_list = json_grid["data"][coordinate_key]
            x_i, y_i = (int(i) for i in coordinate_key.strip(" ()").split(","))
            # Store all caches to dictionary
            for cache_dic in cache_list:
                waypoint = cache_dic["i"]
                # Store all found coordinate points
                if waypoint not in caches:
                    c = Cache(waypoint, self._gc, name=cache_dic["n"])
                    caches[waypoint] = [c, GridCoordinateBlock(self, (x_i, y_i))]
                else:
                    caches[waypoint][1].add((x_i, y_i))
        # Try to determine grid coordinate block size
        GridCoordinateBlock.determine_block_size(*[len(caches[wp][1].points) for wp in caches])
        # Calculate geocache coordinates and yield objects
        for waypoint in caches:
            c, coord_block = caches[waypoint]
            c.location = coord_block.get_location()
            yield c
        logging.info("Found {} caches".format(len(caches)))
Ejemplo n.º 4
0
 def setUp(self):
     self.gc = Geocaching()
     self.c = Cache(self.gc,
                    "GC12345",
                    name="Testing",
                    type=Type.traditional,
                    location=Point(),
                    state=True,
                    found=False,
                    size=Size.micro,
                    difficulty=1.5,
                    terrain=5,
                    author="human",
                    hidden=date(2000, 1, 1),
                    attributes={
                        "onehour": True,
                        "kids": False,
                        "available": True
                    },
                    summary="text",
                    description="long text",
                    hint="rot13",
                    favorites=0,
                    pm_only=False,
                    original_location=Point(),
                    waypoints={},
                    guid="53d34c4d-12b5-4771-86d3-89318f71efb1")
     self.c._log_page_url = "/seek/log.aspx?ID=1234567&lcn=1"
Ejemplo n.º 5
0
    def test_load_quick(self):
        with self.subTest("normal"):
            cache = Cache(self.gc, "GC4808G")
            cache.load_quick()
            self.assertEqual(4, cache.terrain)
            self.assertEqual(Size.regular, cache.size)

        with self.subTest("fail"):
            with self.assertRaises(LoadError):
                cache = Cache(self.gc, "GC123456")
                cache.load_quick()
Ejemplo n.º 6
0
    def test_load_quick(self):
        with self.subTest("normal"):
            cache = Cache(self.gc, "GC4808G")
            cache.load_quick()
            self.assertEqual(4, cache.terrain)
            self.assertEqual(Size.regular, cache.size)
            self.assertEqual(cache.guid,
                             "15ad3a3d-92c1-4f7c-b273-60937bcc2072")

        with self.subTest("fail"):
            with self.assertRaises(LoadError):
                cache = Cache(self.gc, "GC123456")
                cache.load_quick()
Ejemplo n.º 7
0
    def get_cache(self, wp=None, guid=None):
        """Return a :class:`.Cache` object by its waypoint or GUID.

        :param str wp: Cache waypoint.
        :param str guid: Cache GUID.

        .. note ::
           Provide only the GUID or the waypoint, not both.
        """
        if (wp is None) == (guid is None):
            raise TypeError('Please provide exactly one of `wp` or `guid`.')
        if wp is not None:
            return Cache(self, wp)
        return self._cache_from_guid(guid)
Ejemplo n.º 8
0
    def test_load_quick(self):
        with self.subTest("normal"):
            cache = Cache(self.gc, "GC4808G")
            cache.load_quick()
            self.assertEqual(4, cache.terrain)
            self.assertEqual(Size.regular, cache.size)

        with self.subTest("fail"):
            with self.assertRaises(LoadError):
                cache = Cache(self.gc, "GC123456")
                cache.load_quick()
Ejemplo n.º 9
0
    def test_load_by_guid(self, mock_load_quick, mock_load):
        with self.subTest("normal"):
            cache = Cache(self.gc, "GC2WXPN", guid="5f45114d-1d79-4fdb-93ae-8f49f1d27188")
            with self.recorder.use_cassette('cache_guidload_normal'):
                cache.load_by_guid()
            self.assertEqual(cache.name, "Der Schatz vom Luftschloss")
            self.assertEqual(cache.location, Point("N 49° 57.895' E 008° 12.988'"))
            self.assertEqual(cache.type, Type.mystery)
            self.assertEqual(cache.size, Size.large)
            self.assertEqual(cache.difficulty, 2.5)
            self.assertEqual(cache.terrain, 1.5)
            self.assertEqual(cache.author, "engelmz & Punxsutawney Phil")
            self.assertEqual(cache.hidden, parse_date("23/06/2011"))
            self.assertDictEqual(cache.attributes, {
                "bicycles": True,
                "available": True,
                "parking": True,
                "onehour": True,
                "kids": True,
                "s-tool": True,
            })
            self.assertEqual(cache.summary, "Gibt es das Luftschloss wirklich?")
            self.assertIn("Seit dem 16.", cache.description)
            self.assertEqual(cache.hint, "Das ist nicht nötig")
            self.assertGreater(cache.favorites, 350)
            self.assertEqual(len(cache.waypoints), 2)

        with self.subTest("PM-only"):
            cache = Cache(self.gc, "GC6MKEF", guid="53d34c4d-12b5-4771-86d3-89318f71efb1")
            with self.recorder.use_cassette('cache_guidload_PMO'):
                with self.assertRaises(PMOnlyException):
                    cache.load_by_guid()

        with self.subTest("calls load_quick if no guid"):
            cache = Cache(self.gc, "GC2WXPN")
            with self.recorder.use_cassette('cache_guidload_fallback'):
                with self.assertRaises(Exception):
                    cache.load_by_guid()  # Raises error since we mocked load_quick()
            self.assertTrue(mock_load_quick.called)
Ejemplo n.º 10
0
    def test_load_quick(self):
        with self.subTest("normal"):
            with self.recorder.use_cassette('cache_quick_normal'):
                cache = Cache(self.gc, "GC4808G")
                cache.load_quick()
            self.assertEqual(4, cache.terrain)
            self.assertEqual(Size.regular, cache.size)
            self.assertEqual(cache.guid, "15ad3a3d-92c1-4f7c-b273-60937bcc2072")

        with self.subTest("fail"):
            with self.recorder.use_cassette('cache_quickload_fail'):
                with self.assertRaises(LoadError):
                    cache = Cache(self.gc, "GC123456")
                    cache.load_quick()
Ejemplo n.º 11
0
    def load_cache_quick(self, wp, destination=None):
        """Loads details from map server.

        Loads just basic cache details, but very quickly."""

        assert type(wp) is str and wp.startswith("GC")
        logging.info("Loading quick details about %s...", wp)

        # assemble request
        params = urlencode({"i": wp})
        url = self._urls["map"] + "?" + params

        try:
            res = self._browser.get(url).json()
        except requests.exceptions.ConnectionError as e:
            raise Error("Cannot load quick cache details page.") from e

        if res["status"] == "failed" or len(res["data"]) != 1:
            raise LoadError("Waypoint '{}' cannot be loaded: {}".format(
                wp, res["msg"]))

        data = res["data"][0]

        # create cache object
        c = destination or Cache(wp, self)
        assert isinstance(c, Cache)

        # prettify data
        c.name = data["name"]
        c.cache_type = data["type"]["text"]
        c.state = data["available"]
        c.size = data["container"]["text"]
        c.difficulty = data["difficulty"]["text"]
        c.terrain = data["terrain"]["text"]
        c.hidden = Util.parse_date(data["hidden"])
        c.author = data["owner"]["text"]
        c.favorites = int(data["fp"])
        c.pm_only = data["subrOnly"]

        logging.debug("Cache loaded: %r", c)
        return c
Ejemplo n.º 12
0
    def search_quick(self, area, *, strict=False, zoom=None):
        """Return a generator of caches in some area.

        Area is converted to map tiles, each tile is then loaded and :class:`.Cache` objects are then
        created from its blocks.

        :param bool strict: Whether to return caches strictly in the `area` and discard others.
        :param int zoom: Zoom level of tiles. You can also specify it manually, otherwise it is
            automatically determined for whole :class:`.Area` to fit into one :class:`.Tile`. Higher
            zoom level is more precise, but requires more tiles to be loaded.
        """
        logging.info("Searching quick in {}".format(area))

        tiles = area.to_tiles(self, zoom)
        # TODO process tiles by multiple workers
        for tile in tiles:
            for block in tile.blocks:
                cache = Cache.from_block(block)
                if strict and cache.location not in area:
                    # if strict mode is on and cache is not in area
                    continue
                else:
                    # can yield more caches (which are not exactly in desired area)
                    yield cache
Ejemplo n.º 13
0
    def search_quick(self, area, *, strict=False, zoom=None):
        """Return a generator of caches in some area.

        Area is converted to map tiles, each tile is then loaded and :class:`.Cache` objects are then
        created from its blocks.

        :param bool strict: Whether to return caches strictly in the `area` and discard others.
        :param int zoom: Zoom level of tiles. You can also specify it manually, otherwise it is
            automatically determined for whole :class:`.Area` to fit into one :class:`.Tile`. Higher
            zoom level is more precise, but requires more tiles to be loaded.
        """
        logging.info("Searching quick in {}".format(area))

        tiles = area.to_tiles(self, zoom)
        # TODO process tiles by multiple workers
        for tile in tiles:
            for block in tile.blocks:
                cache = Cache.from_block(block)
                if strict and cache.location not in area:
                    # if strict mode is on and cache is not in area
                    continue
                else:
                    # can yield more caches (which are not exactly in desired area)
                    yield cache
Ejemplo n.º 14
0
 def _cache_from_guid(self, guid):
     logging.info('Loading cache with GUID {!r}'.format(guid))
     print_page = self._request(Cache._urls["print_page"], params={"guid": guid})
     return Cache._from_print_page(self, guid, print_page)
Ejemplo n.º 15
0
    def load_cache(self, wp, destination=None):
        """Loads details from cache page.

        Loads all cache details and return fully populated cache object."""

        assert type(wp) is str and wp.startswith("GC")
        logging.info("Loading details about %s...", wp)

        # assemble request
        params = urlencode({"wp": wp})
        url = self._urls["cache_details"] + "?" + params

        try:
            root = self._browser.get(url).soup
        except requests.exceptions.ConnectionError as e:
            raise Error("Cannot load cache details page.") from e

        cache_details = root.find(id="cacheDetails")

        # check for PM only caches if using free account
        if cache_details is None:
            if root.select(".PMOWarning") is not None:
                raise PMOnlyException("Premium Members only.")

        # parse raw data
        name = cache_details.find("h2")
        cache_type = cache_details.find("img").get("alt")
        author = cache_details("a")[1]
        hidden = cache_details.find("div",
                                    "minorCacheDetails").find_all("div")[1]
        location = root.find(id="uxLatLon")
        state = root.find("ul", "OldWarning")
        found = root.find("div", "FoundStatus")
        D_T = root.find("div", "CacheStarLabels").find_all("img")
        size = root.find("div", "CacheSize").find("img")
        attributes_raw = root.find_all(
            "div", "CacheDetailNavigationWidget")[0].find_all("img")
        user_content = root.find_all("div", "UserSuppliedContent")
        hint = root.find(id="div_hint")
        favorites = root.find("span", "favorite-value")

        # create cache object
        c = destination or Cache(wp, self)
        assert isinstance(c, Cache)

        # prettify data
        c.name = name.text
        c.cache_type = cache_type
        c.author = author.text
        c.hidden = Util.parse_date(hidden.text.split()[2])
        c.location = Point.from_string(location.text)
        c.state = state is None
        c.found = found and "Found It!" in found.text or False
        c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T]
        c.size = " ".join(size.get("alt").split()[1:])
        attributes_raw = [
            _.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw
        ]
        c.attributes = {
            attribute_name: appendix.startswith("yes")
            for attribute_name, appendix in attributes_raw
            if not appendix.startswith("blank")
        }
        c.summary = user_content[0].text
        c.description = str(user_content[1])
        c.hint = Util.rot13(hint.text.strip())
        c.favorites = int(favorites.text)

        logging.debug("Cache loaded: %r", c)
        return c
Ejemplo n.º 16
0
 def test_load_trackables(self):
     cache = Cache(self.gc, "GC26737")  # TB graveyard - will surelly have some trackables
     trackable_list = list(cache.load_trackables(limit=10))
     self.assertTrue(isinstance(trackable_list, list))
Ejemplo n.º 17
0
    def _search_parse_cache(self, root):
        """Returns a Cache object parsed from BeautifulSoup Tag."""

        assert isinstance(root, bs4.Tag)

        # parse raw data
        favorites = root.find("span", "favorite-rank")
        typeLink, nameLink = root.find_all("a", "lnk")
        pm_only = root.find("img",
                            title="Premium Member Only Cache") is not None
        direction, info, D_T, placed, last_found = root.find_all(
            "span", "small")
        found = root.find("img", title="Found It!") is not None
        size = root.find("td", "AlignCenter").find("img")
        author, wp, area = [t.strip() for t in info.text.split("|")]

        # create cache object
        c = Cache(wp, self)

        # prettify data
        c.cache_type = typeLink.find("img").get("alt")
        c.name = nameLink.span.text.strip()
        c.found = found
        c.state = "Strike" not in nameLink.get("class")
        c.size = " ".join(size.get("alt").split()[1:])
        c.difficulty, c.terrain = list(map(float, D_T.text.split("/")))
        c.hidden = Util.parse_date(placed.text)
        c.author = author[3:]  # delete "by "
        c.favorites = int(favorites.text)
        c.pm_only = pm_only

        logging.debug("Cache parsed: %s", c)
        return c
Ejemplo n.º 18
0
    def search(self, point, limit=float("inf")):
        """Return a generator of caches around some point.

        Search for caches around some point by loading search pages and parsing the data from these
        pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit
        as a convinient way to stop generator after certain number of caches.

        :param .geo.Point point: Search center point.
        :param int limit: Maximum number of caches to generate.
        """
        logging.info("Searching at {}".format(point))

        start_index = 0
        while True:
            # get one page
            geocaches_table, whole_page = self._search_get_page(point, start_index)

            if not geocaches_table:
                # result is empty - no more caches
                return

            # prepare language-dependant mappings
            if start_index == 0:
                cache_sizes_filter_wrapper = whole_page.find("div", class_="cache-sizes-wrapper")
                localized_size_mapping = {
                    # key = "Small" (localized), value = Size.small
                    label.find("span").text.strip(): Size.from_number(label.find("input").get("value"))
                    for label in cache_sizes_filter_wrapper.find_all("label")
                }

            # parse caches in result
            for start_index, row in enumerate(geocaches_table.find_all("tr"), start_index):

                limit -= 1  # handle limit
                if limit < 0:
                    return

                # parse raw data
                cache_details = row.find("span", "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                # values are sanitized and converted in Cache setters
                c = Cache(self, wp)
                c.type = cache_details[0]
                c.name = row.find("span", "cache-name").text
                badge = row.find("svg", class_="badge")
                c.found = "found" in str(badge) if badge is not None else False
                c.favorites = row.find(attrs={"data-column": "FavoritePoint"}).text
                c.state = not (row.get("class") and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = localized_size_mapping[row.find(attrs={"data-column": "ContainerSize"}).text.strip()]
                c.difficulty = row.find(attrs={"data-column": "Difficulty"}).text
                c.terrain = row.find(attrs={"data-column": "Terrain"}).text
                c.hidden = row.find(attrs={"data-column": "PlaceDate"}).text
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: {}".format(c))
                yield c

            start_index += 1
Ejemplo n.º 19
0
    def test_load(self):
        with self.subTest("normal (with explicit call of load())"):
            with self.recorder.use_cassette('cache_explicit_load'):
                cache = Cache(self.gc, "GC4808G")
                cache.load()
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("normal"):
            with self.recorder.use_cassette('cache_normal_normal'):
                cache = Cache(self.gc, "GC4808G")
                self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("non-ascii chars"):
            with self.recorder.use_cassette('cache_non-ascii'):
                cache = Cache(self.gc, "GC5VJ0P")
                self.assertEqual("u parezové chaloupky", cache.hint)

        with self.subTest("PM only"):
            with self.recorder.use_cassette('cache_PMO'):
                with self.assertRaises(PMOnlyException):
                    cache = Cache(self.gc, "GC3AHDM")
                    cache.load()

        with self.subTest("fail"):
            with self.recorder.use_cassette('cache_normal_fail'):
                with self.assertRaises(LoadError):
                    cache = Cache(self.gc, "GC123456")
                    cache.load()
Ejemplo n.º 20
0
    def search(self, point, limit=0):
        """Returns a generator object of caches around some point."""

        assert isinstance(point, Point)
        assert type(limit) is int

        logging.info("Searching at %s...", point)

        start_index = 0
        while True:
            # get one page
            page = self._search_get_page(point, start_index)

            if not page:
                # result is empty - no more caches
                raise StopIteration()

            # parse caches in result
            for start_index, row in enumerate(
                    BeautifulSoup(page).find_all("tr"), start_index):

                if limit > 0 and start_index == limit:
                    raise StopIteration()

                # parse raw data
                cache_details = row.find("span",
                                         "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                c = Cache(wp, self)
                c.cache_type = cache_details[0].strip()
                c.name = row.find("span", "cache-name").text
                c.found = row.find("img", title="Found It!") is not None
                c.favorites = int(
                    row.find(attrs={
                        "data-column": "FavoritePoint"
                    }).text)
                c.state = not (row.get("class")
                               and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = row.find(attrs={"data-column": "ContainerSize"}).text
                c.difficulty = float(
                    row.find(attrs={
                        "data-column": "Difficulty"
                    }).text)
                c.terrain = float(
                    row.find(attrs={
                        "data-column": "Terrain"
                    }).text)
                c.hidden = Util.parse_date(
                    row.find(attrs={
                        "data-column": "PlaceDate"
                    }).text)
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: %s", c)
                yield c

            start_index += 1
Ejemplo n.º 21
0
    def test_load(self):
        with self.subTest("normal (with explicit call of load())"):
            with self.recorder.use_cassette('cache_explicit_load'):
                cache = Cache(self.gc, "GC4808G")
                cache.load()
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("normal"):
            with self.recorder.use_cassette('cache_normal_normal'):
                cache = Cache(self.gc, "GC4808G")
                self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("non-ascii chars"):
            with self.recorder.use_cassette('cache_non-ascii'):
                cache = Cache(self.gc, "GC5VJ0P")
                self.assertEqual("u parezové chaloupky", cache.hint)

        with self.subTest("PM only"):
            with self.recorder.use_cassette('cache_PMO'):
                with self.assertRaises(PMOnlyException):
                    cache = Cache(self.gc, "GC3AHDM")
                    cache.load()

        with self.subTest("fail"):
            with self.recorder.use_cassette('cache_normal_fail'):
                with self.assertRaises(LoadError):
                    cache = Cache(self.gc, "GC123456")
                    cache.load()
Ejemplo n.º 22
0
    def get_cache(self, wp):
        """Return a :class:`.Cache` object by its waypoint.

        :param str wp: Cache waypoint.
        """
        return Cache(self, wp)
Ejemplo n.º 23
0
    def search(self, point, limit=float("inf")):
        """Return a generator of caches around some point.

        Search for caches around some point by loading search pages and parsing the data from these
        pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit
        as a convinient way to stop generator after certain number of caches.

        :param .geo.Point point: Search center point.
        :param int limit: Maximum number of caches to generate.
        """
        logging.info("Searching at {}".format(point))

        start_index = 0
        while True:
            # get one page
            page = self._search_get_page(point, start_index)

            if not page:
                # result is empty - no more caches
                raise StopIteration()

            # parse caches in result
            for start_index, row in enumerate(page.find_all("tr"),
                                              start_index):

                limit -= 1  # handle limit
                if limit < 0:
                    raise StopIteration()

                # parse raw data
                cache_details = row.find("span",
                                         "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                c = Cache(self, wp)
                c.type = Type.from_string(cache_details[0].strip())
                c.name = row.find("span", "cache-name").text
                badge = row.find("svg", class_="badge")
                c.found = "found" in str(badge) if badge is not None else False
                c.favorites = int(
                    row.find(attrs={
                        "data-column": "FavoritePoint"
                    }).text)
                c.state = not (row.get("class")
                               and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = Size.from_string(
                    row.find(attrs={
                        "data-column": "ContainerSize"
                    }).text)
                c.difficulty = float(
                    row.find(attrs={
                        "data-column": "Difficulty"
                    }).text)
                c.terrain = float(
                    row.find(attrs={
                        "data-column": "Terrain"
                    }).text)
                c.hidden = parse_date(
                    row.find(attrs={
                        "data-column": "PlaceDate"
                    }).text)
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: {}".format(c))
                yield c

            start_index += 1
Ejemplo n.º 24
0
    def search(self, point, limit=float("inf")):
        """Return a generator of caches around some point.

        Search for caches around some point by loading search pages and parsing the data from these
        pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit
        as a convinient way to stop generator after certain number of caches.

        :param .geo.Point point: Search center point.
        :param int limit: Maximum number of caches to generate.
        """
        logging.info("Searching at {}".format(point))

        start_index = 0
        while True:
            # get one page
            page = self._search_get_page(point, start_index)

            if not page:
                # result is empty - no more caches
                raise StopIteration()

            # parse caches in result
            for start_index, row in enumerate(page.find_all("tr"), start_index):

                limit -= 1  # handle limit
                if limit < 0:
                    raise StopIteration()

                # parse raw data
                cache_details = row.find("span", "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                c = Cache(self, wp)
                c.type = Type.from_string(cache_details[0].strip())
                c.name = row.find("span", "cache-name").text
                c.found = row.find("img", title="Found It!") is not None
                c.favorites = int(row.find(attrs={"data-column": "FavoritePoint"}).text)
                c.state = not (row.get("class") and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = Size.from_string(row.find(attrs={"data-column": "ContainerSize"}).text)
                c.difficulty = float(row.find(attrs={"data-column": "Difficulty"}).text)
                c.terrain = float(row.find(attrs={"data-column": "Terrain"}).text)
                c.hidden = parse_date(row.find(attrs={"data-column": "PlaceDate"}).text)
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: {}".format(c))
                yield c

            start_index += 1
Ejemplo n.º 25
0
    def search_rect(self,
                    rect: Rectangle,
                    *,
                    per_query: int = 200,
                    sort_by: Union[str,
                                   SortOrder] = SortOrder.date_last_visited,
                    origin: Optional[Point] = None,
                    wait_sleep: bool = True):
        """
        Return a generator of caches in given Rectange area.

        :param rect: Search area.
        :param int per_query: Number of caches requested in single query.
        :param sort_by: Order cached by given criterion.
        :param origin: Origin point for search by distance.
        :param wait_sleep: In case of rate limits exceeding, wait appropriate time if set True,
            otherwise just yield None.
        """
        if not isinstance(sort_by, SortOrder):
            sort_by = SortOrder(sort_by)

        params = {
            "box":
            "{},{},{},{}".format(
                rect.corners[0].latitude,
                rect.corners[0].longitude,
                rect.corners[1].latitude,
                rect.corners[1].longitude,
            ),
            "take":
            per_query,
            "asc":
            "true",
            "skip":
            0,
            "sort":
            sort_by.value,
        }

        if sort_by is SortOrder.distance:
            assert isinstance(origin, Point)
            params["origin"] = "{},{}".format(origin.latitude,
                                              origin.longitude)

        total, offset = None, 0
        while (total is None) or (offset < total):
            params["skip"] = offset

            try:
                resp = self._request(self._urls["api_search"],
                                     params=params,
                                     expect="json")
            except TooManyRequestsError as e:
                if wait_sleep:
                    e.wait_for()
                else:
                    yield None
                continue

            for record in resp["results"]:
                yield Cache._from_api_record(self, record)

            total = resp["total"]
            offset += per_query
Ejemplo n.º 26
0
 def test_load_trackables(self):
     cache = Cache(self.gc, "GC26737")  # TB graveyard - will surelly have some trackables
     with self.recorder.use_cassette('cache_trackables'):
         trackable_list = list(cache.load_trackables(limit=10))
     self.assertTrue(isinstance(trackable_list, list))
Ejemplo n.º 27
0
 def setUpClass(cls):
     cls.gc = Geocaching()
     cls.gc.login(_username, _password)
     cls.c = Cache(cls.gc, "GC1PAR2")
     cls.c.load()
Ejemplo n.º 28
0
    def _search_parse_cache(self, root):
        """Returns a Cache object parsed from BeautifulSoup Tag."""

        assert isinstance(root, bs4.Tag)

        # parse raw data
        favorites = root.find("span", "favorite-rank")
        typeLink, nameLink = root.find_all("a", "lnk")
        pm_only = root.find("img", title="Premium Member Only Cache") is not None
        direction, info, D_T, placed, last_found = root.find_all("span", "small")
        found = root.find("img", title="Found It!") is not None
        size = root.find("td", "AlignCenter").find("img")
        author, wp, area = [t.strip() for t in info.text.split("|")]

        # create cache object
        c = Cache(wp, self)

        # prettify data
        c.cache_type = typeLink.find("img").get("alt")
        c.name = nameLink.span.text.strip()
        c.found = found
        c.state = "Strike" not in nameLink.get("class")
        c.size = " ".join(size.get("alt").split()[1:])
        c.difficulty, c.terrain = list(map(float, D_T.text.split("/")))
        c.hidden = Util.parse_date(placed.text)
        c.author = author[3:]  # delete "by "
        c.favorites = int(favorites.text)
        c.pm_only = pm_only

        logging.debug("Cache parsed: %s", c)
        return c
Ejemplo n.º 29
0
    def load_cache_by_url(self, url, destination=None):
        try:
            root = self._browser.get(url).soup
        except requests.exceptions.ConnectionError as e:
            raise Error("Cannot load cache details page.") from e

        cache_details = root.find(id="cacheDetails")

        # check for PM only caches if using free account
        if cache_details is None:
            if root.select(".PMOWarning") is not None:
                raise PMOnlyException("Premium Members only.")

        # parse raw data
        wp = root.title.string.split(' ')[0]

        name = cache_details.find("h2")
        cache_type = cache_details.find("img").get("src")
        author = cache_details("a")[1]
        hidden = cache_details.find("div",
                                    "minorCacheDetails").find_all("div")[1]
        location = root.find(id="uxLatLon")
        state = root.find("ul", "OldWarning")
        found = root.find("div", "FoundStatus")
        D_T = root.find("div", "CacheStarLabels").find_all("img")
        size = root.find("div", "CacheSize").find("img")
        attributes_raw = root.find_all(
            "div", "CacheDetailNavigationWidget")[0].find_all("img")
        user_content = root.find_all("div", "UserSuppliedContent")
        hint = root.find(id="div_hint")
        favorites = root.find("span", "favorite-value")

        # check for trackables
        inventory_raw = root.find_all("div", "CacheDetailNavigationWidget")
        inventory_links = inventory_raw[1].find_all("a")
        if len(inventory_links) >= 3:
            trackable_page = self._urls['trackable_base'] + inventory_links[
                -3].get("href")
        else:
            trackable_page = None

        # create cache object
        c = destination or Cache(wp, self)
        assert isinstance(c, Cache)

        # prettify data
        c.name = name.text
        c.cache_type = Cache.get_cache_type_by_img(cache_type)
        c.author = author.text
        c.hidden = Util.parse_date(hidden.text.split(":")[-1])
        c.location = Point.from_string(location.text)
        c.state = state is None
        c.found = found and "Found It!" in found.text or False
        c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T]
        c.size = size.get("src").split("/")[-1].rsplit(
            ".", 1)[0]  # filename of img[src]
        attributes_raw = [
            _.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw
        ]
        c.attributes = {
            attribute_name: appendix.startswith("yes")
            for attribute_name, appendix in attributes_raw
            if not appendix.startswith("blank")
        }
        c.summary = user_content[0].text
        c.description = str(user_content[1])
        c.hint = Util.rot13(hint.text.strip())
        if favorites is None:
            c.favorites = 0
        else:
            c.favorites = int(favorites.text)
        if trackable_page is not None:
            c.trackables = self.load_trackable_list(trackable_page)
        else:
            c.trackables = []
        logging.debug("Cache loaded: %r", c)
        return c
Ejemplo n.º 30
0
 def test___eq__(self):
     self.assertEqual(self.c, Cache(self.gc, "GC12345"))
Ejemplo n.º 31
0
 def setUpClass(cls):
     super().setUpClass()
     cls.c = Cache(cls.gc, "GC1PAR2")
     with cls.recorder.use_cassette('cache_setup'):
         cls.c.load()
Ejemplo n.º 32
0
    def load_cache_by_url(self, url, destination=None):
        try:
            root = self._browser.get(url).soup
        except requests.exceptions.ConnectionError as e:
            raise Error("Cannot load cache details page.") from e

        cache_details = root.find(id="cacheDetails")

        # check for PM only caches if using free account
        if cache_details is None:
            if root.select(".PMOWarning") is not None:
                raise PMOnlyException("Premium Members only.")

        # parse raw data
        wp = root.title.string.split(' ')[0]

        name = cache_details.find("h2")
        cache_type = cache_details.find("img").get("src")
        author = cache_details("a")[1]
        hidden = cache_details.find("div", "minorCacheDetails").find_all("div")[1]
        location = root.find(id="uxLatLon")
        state = root.find("ul", "OldWarning")
        found = root.find("div", "FoundStatus")
        D_T = root.find("div", "CacheStarLabels").find_all("img")
        size = root.find("div", "CacheSize").find("img")
        attributes_raw = root.find_all("div", "CacheDetailNavigationWidget")[0].find_all("img")
        user_content = root.find_all("div", "UserSuppliedContent")
        hint = root.find(id="div_hint")
        favorites = root.find("span", "favorite-value")

        # check for trackables
        inventory_raw = root.find_all("div", "CacheDetailNavigationWidget")
        inventory_links = inventory_raw[1].find_all("a")
        if len(inventory_links) >= 3:
            trackable_page = self._urls['trackable_base'] + inventory_links[-3].get("href")
        else:
            trackable_page = None

        # create cache object
        c = destination or Cache(wp, self)
        assert isinstance(c, Cache)

        # prettify data
        c.name = name.text
        c.cache_type = Cache.get_cache_type_by_img(cache_type)
        c.author = author.text
        c.hidden = Util.parse_date(hidden.text.split(":")[-1])
        c.location = Point.from_string(location.text)
        c.state = state is None
        c.found = found and "Found It!" in found.text or False
        c.difficulty, c.terrain = [float(_.get("alt").split()[0]) for _ in D_T]
        c.size = size.get("src").split("/")[-1].rsplit(".", 1)[0]  # filename of img[src]
        attributes_raw = [_.get("src").split('/')[-1].rsplit("-", 1) for _ in attributes_raw]
        c.attributes = {attribute_name: appendix.startswith("yes")
                        for attribute_name, appendix in attributes_raw if not appendix.startswith("blank")}
        c.summary = user_content[0].text
        c.description = str(user_content[1])
        c.hint = Util.rot13(hint.text.strip())
        if favorites is None:
            c.favorites = 0
        else:
            c.favorites = int(favorites.text)
        if trackable_page is not None:
            c.trackables = self.load_trackable_list(trackable_page)
        else:
            c.trackables = []
        logging.debug("Cache loaded: %r", c)
        return c
Ejemplo n.º 33
0
    def search(self, point, limit=float("inf")):
        """Return a generator of caches around some point.

        Search for caches around some point by loading search pages and parsing the data from these
        pages. Yield :class:`.Cache` objects filled with data from search page. You can provide limit
        as a convenient way to stop generator after certain number of caches.

        :param .geo.Point point: Search center point.
        :param int limit: Maximum number of caches to generate.
        """
        logging.info("Searching at {}".format(point))

        start_index = 0
        while True:
            # get one page
            geocaches_table, whole_page = self._search_get_page(point, start_index)
            rows = geocaches_table.find_all("tr")

            # leave loop if there are no (more) results
            if not rows:
                return

            # prepare language-dependent mappings
            if start_index == 0:
                cache_sizes_filter_wrapper = whole_page.find("div", class_="cache-sizes-wrapper")
                localized_size_mapping = {
                    # key = "Small" (localized), value = Size.small
                    label.find("span").text.strip(): Size.from_number(label.find("input").get("value"))
                    for label in cache_sizes_filter_wrapper.find_all("label")
                }

            # parse caches in result
            for start_index, row in enumerate(rows, start_index):

                limit -= 1  # handle limit
                if limit < 0:
                    return

                # parse raw data
                cache_details = row.find("span", "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                # values are sanitized and converted in Cache setters
                c = Cache(self, wp)
                c.type = cache_details[0]
                c.name = row.find("span", "cache-name").text
                badge = row.find("svg", class_="badge")
                c.found = "found" in str(badge) if badge is not None else False
                c.favorites = row.find(attrs={"data-column": "FavoritePoint"}).text
                c.state = not (row.get("class") and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = localized_size_mapping[row.find(attrs={"data-column": "ContainerSize"}).text.strip()]
                c.difficulty = row.find(attrs={"data-column": "Difficulty"}).text
                c.terrain = row.find(attrs={"data-column": "Terrain"}).text
                c.hidden = row.find(attrs={"data-column": "PlaceDate"}).text
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: {}".format(c))
                yield c

            start_index += 1
Ejemplo n.º 34
0
    def test_load_by_guid(self, mock_load_quick, mock_load):
        with self.subTest("normal"):
            cache = Cache(self.gc,
                          "GC2WXPN",
                          guid="5f45114d-1d79-4fdb-93ae-8f49f1d27188")
            with self.recorder.use_cassette('cache_guidload_normal'):
                cache.load_by_guid()
            self.assertEqual(cache.name, "Der Schatz vom Luftschloss")
            self.assertEqual(cache.location,
                             Point("N 49° 57.895' E 008° 12.988'"))
            self.assertEqual(cache.type, Type.mystery)
            self.assertEqual(cache.size, Size.large)
            self.assertEqual(cache.difficulty, 2.5)
            self.assertEqual(cache.terrain, 1.5)
            self.assertEqual(cache.author, "engelmz & Punxsutawney Phil")
            self.assertEqual(cache.hidden, parse_date("23/06/2011"))
            self.assertDictEqual(
                cache.attributes, {
                    "bicycles": True,
                    "available": True,
                    "parking": True,
                    "onehour": True,
                    "kids": True,
                    "s-tool": True,
                })
            self.assertEqual(cache.summary,
                             "Gibt es das Luftschloss wirklich?")
            self.assertIn("Seit dem 16.", cache.description)
            self.assertEqual(cache.hint, "Das ist nicht nötig")
            self.assertGreater(cache.favorites, 350)
            self.assertEqual(len(cache.waypoints), 2)
            self.assertDictEqual(
                cache.log_counts, {
                    LogType.found_it: 800,
                    LogType.note: 35,
                    LogType.archive: 1,
                    LogType.needs_archive: 1,
                    LogType.temp_disable_listing: 5,
                    LogType.enable_listing: 4,
                    LogType.publish_listing: 1,
                    LogType.needs_maintenance: 5,
                    LogType.owner_maintenance: 3,
                    LogType.post_reviewer_note: 2,
                })

        with self.subTest("PM-only"):
            cache = Cache(self.gc,
                          "GC6MKEF",
                          guid="53d34c4d-12b5-4771-86d3-89318f71efb1")
            with self.recorder.use_cassette('cache_guidload_PMO'):
                with self.assertRaises(PMOnlyException):
                    cache.load_by_guid()

        with self.subTest("calls load_quick if no guid"):
            cache = Cache(self.gc, "GC2WXPN")
            with self.recorder.use_cassette('cache_guidload_fallback'):
                with self.assertRaises(Exception):
                    cache.load_by_guid(
                    )  # Raises error since we mocked load_quick()
            self.assertTrue(mock_load_quick.called)
Ejemplo n.º 35
0
    def test_load(self):
        with self.subTest("normal (with explicit call of load())"):
            cache = Cache(self.gc, "GC4808G")
            cache.load()
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("normal"):
            cache = Cache(self.gc, "GC4808G")
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("non-ascii chars"):
            cache = Cache(self.gc, "GC4FRG5")
            self.assertEqual("Entre l'arbre et la grille.", cache.hint)

        with self.subTest("PM only"):
            with self.assertRaises(PMOnlyException):
                cache = Cache(self.gc, "GC3AHDM")
                cache.load()

        with self.subTest("fail"):
            with self.assertRaises(LoadError):
                cache = Cache(self.gc, "GC123456")
                cache.load()
Ejemplo n.º 36
0
 def _cache_from_guid(self, guid):
     logging.info('Loading cache with GUID {!r}'.format(guid))
     print_page = self._request(Cache._urls["print_page"], params={"guid": guid})
     return Cache._from_print_page(self, guid, print_page)
Ejemplo n.º 37
0
    def test_load_by_guid(self, mock_load_quick, mock_load):
        with self.subTest("normal"):
            cache = Cache(self.gc,
                          "GC2WXPN",
                          guid="5f45114d-1d79-4fdb-93ae-8f49f1d27188")
            cache.load_by_guid()
            self.assertEqual(cache.name, "Der Schatz vom Luftschloss")
            self.assertEqual(cache.location,
                             Point("N 49° 57.895' E 008° 12.988'"))
            self.assertEqual(cache.type, Type.mystery)
            self.assertEqual(cache.size, Size.large)
            self.assertEqual(cache.difficulty, 2.5)
            self.assertEqual(cache.terrain, 1.5)
            self.assertEqual(cache.author, "engelmz & Punxsutawney Phil")
            self.assertEqual(cache.hidden, parse_date("23/06/2011"))
            self.assertDictEqual(
                cache.attributes, {
                    "bicycles": True,
                    "available": True,
                    "firstaid": True,
                    "parking": True,
                    "onehour": True,
                    "kids": True,
                    "s-tool": True,
                })
            self.assertEqual(cache.summary,
                             "Gibt es das Luftschloss wirklich?")
            self.assertIn("Seit dem 16.", cache.description)
            self.assertEqual(cache.hint, "Das ist nicht nötig")
            self.assertGreater(cache.favorites, 380)
            self.assertEqual(len(cache.waypoints), 2)

        with self.subTest("PM-only"):
            cache = Cache(self.gc,
                          "GC6MKEF",
                          guid="53d34c4d-12b5-4771-86d3-89318f71efb1")
            with self.assertRaises(PMOnlyException):
                cache.load_by_guid()

        with self.subTest("calls load_quick if no guid"):
            cache = Cache(self.gc, "GC2WXPN")
            with self.assertRaises(Exception):
                cache.load_by_guid(
                )  # Raises error since we mocked load_quick()
            self.assertTrue(mock_load_quick.called)
Ejemplo n.º 38
0
    def test_load(self):
        with self.subTest("normal (with explicit call of load())"):
            cache = Cache(self.gc, "GC4808G")
            cache.load()
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("normal"):
            cache = Cache(self.gc, "GC4808G")
            self.assertEqual("Nekonecne ticho", cache.name)

        with self.subTest("non-ascii chars"):
            cache = Cache(self.gc, "GC4FRG5")
            self.assertEqual("Entre l'arbre et la grille.", cache.hint)

        with self.subTest("PM only"):
            with self.assertRaises(PMOnlyException):
                cache = Cache(self.gc, "GC3AHDM")
                cache.load()

        with self.subTest("fail"):
            with self.assertRaises(LoadError):
                cache = Cache(self.gc, "GC123456")
                cache.load()
Ejemplo n.º 39
0
 def test_geocaching(self):
     with self.assertRaises(PycachingValueError):
         Cache(None, "GC12345")
Ejemplo n.º 40
0
    def search(self, point, limit=0):
        """Returns a generator object of caches around some point."""

        assert isinstance(point, Point)
        assert type(limit) is int

        logging.info("Searching at %s...", point)

        start_index = 0
        while True:
            # get one page
            page = self._search_get_page(point, start_index)

            if not page:
                # result is empty - no more caches
                raise StopIteration()

            # parse caches in result
            for start_index, row in enumerate(BeautifulSoup(page).find_all("tr"), start_index):

                if limit > 0 and start_index == limit:
                    raise StopIteration()

                # parse raw data
                cache_details = row.find("span", "cache-details").text.split("|")
                wp = cache_details[1].strip()

                # create and fill cache object
                c = Cache(wp, self)
                c.cache_type = cache_details[0].strip()
                c.name = row.find("span", "cache-name").text
                c.found = row.find("img", title="Found It!") is not None
                c.favorites = int(row.find(attrs={"data-column": "FavoritePoint"}).text)
                c.state = not (row.get("class") and "disabled" in row.get("class"))
                c.pm_only = row.find("td", "pm-upsell") is not None

                if c.pm_only:
                    # PM only caches doesn't have other attributes filled in
                    yield c
                    continue

                c.size = row.find(attrs={"data-column": "ContainerSize"}).text
                c.difficulty = float(row.find(attrs={"data-column": "Difficulty"}).text)
                c.terrain = float(row.find(attrs={"data-column": "Terrain"}).text)
                c.hidden = Util.parse_date(row.find(attrs={"data-column": "PlaceDate"}).text)
                c.author = row.find("span", "owner").text[3:]  # delete "by "

                logging.debug("Cache parsed: %s", c)
                yield c

            start_index += 1