def _parse_overview(self, content, location, rec_depth = 0):
        c1, c2 = location
        text = content.replace("\\'", "'")
        a = json.loads(text.replace('\t', ' '))
        points = []
        if not 'cc' in a['cs']:
            if 'count' in a['cs'] and 'count' != 0:
                # let's try to download one half of the geocaches first
                mlat = (c1.lat + c2.lat)/2
                nc1 = geo.Coordinate(min(c1.lat, c2.lat), min(c1.lon, c2.lon))
                mc1 = geo.Coordinate(mlat, max(c1.lon, c2.lon))
                mc2 = geo.Coordinate(mlat, min(c1.lon, c2.lon))
                nc2 = geo.Coordinate(max(c1.lat, c2.lat), max(c1.lon, c2.lon))
                #print "recursing..."

                CacheDownloader.lock.release()
                points += self.get_geocaches((nc1, mc1), rec_depth + 1)
                points += self.get_geocaches((mc2, nc2), rec_depth + 1)
                CacheDownloader.lock.acquire(False)
            return points
        for b in a['cs']['cc']:
            c = GeocacheCoordinate(b['lat'], b['lon'], b['gc'])
            c.title = b['nn']
            if b['ctid'] in self.CTIDS:
                c.type = self.CTIDS[b['ctid']]
            else:
                c.type = GeocacheCoordinate.TYPE_UNKNOWN

            c.found = b['f']
            if not b['ia']:
                c.status = GeocacheCoordinate.STATUS_DISABLED
            points.append(c)
        return points
 def _get_overview(self, location, rec_depth = 0):
     c1, c2 = location
     center = geo.Coordinate((c1.lat + c2.lat)/2, (c1.lon + c2.lon)/2)
     dist = (center.distance_to(c1)/1000)/2
     logger.debug("Distance is %f meters" % dist)
     if dist > 100:
         raise Exception("Please select a smaller part of the map!")
     url = self.OVERVIEW_URL % (center.lat, center.lon, dist)
     
     self.emit("progress", "Fetching list", 0, 1)
     response = self.downloader.get_reader(url, login_callback = self.login_callback, check_login_callback = self.check_login_callback)
     
     cont = True
     wpts = []
     page_last = 0 # Stores the "old" value of the page counter; If it doesn't increment, abort!
     while cont:
         # Count the number of results and pages
         text = response.read()
         response.close()
         doc = self._parse(text)
         bs = doc.cssselect('#ctl00_ContentBody_ResultsPanel .PageBuilderWidget b')
         if len(bs) == 0:
             raise Exception("There are no geocaches in this area.")
         count = int(bs[0].text_content())
         page_current = int(bs[1].text_content())
         if page_current == page_last:
             raise Exception("Current page has the same number as the last page; aborting!")
             break
         page_last = page_current
         page_max = int(bs[2].text_content())
         logger.info("We are at page %d of %d, total %d geocaches" % (page_current, page_max, count))
         if count > self.MAX_DOWNLOAD_NUM:
             raise Exception("%d geocaches found, please select a smaller part of the map!" % count)
             
             
         # Extract waypoint information from the page
         w = [(
             # Get the GUID from the link
             x.getparent().getchildren()[0].get('href').split('guid=')[1], 
             # See whether this cache was found or not
             'TertiaryRow' in x.getparent().getparent().get('class'), 
             # Get the GCID from the text
             x.text_content().split('|')[1].strip()
             ) for x in doc.cssselect(".SearchResultsTable .Merge .small")]
         wpts += w
             
         cont = False  
         # There are more pages...
         if page_current < page_max:
             from urllib import urlencode
             doc.forms[0].fields['__EVENTTARGET'] = 'ctl00$ContentBody$pgrTop$ctl08'
             # Quick hack. Nicer solution would be to remove the element.
             v = [x for x in doc.forms[0].form_values() if x[0] != 'ctl00$ContentBody$chkAll']
             values = urlencode(v)
             action = self.SEEK_URL % doc.forms[0].action
             logger.info("Retrieving next page!")
             self.emit("progress", "Fetching list (%d of %d)" % (page_current + 1, page_max), page_current, page_max)
             response = self.downloader.get_reader(action, data=('application/x-www-form-urlencoded', values), login_callback = self.login_callback, check_login_callback = self.check_login_callback)
             
             cont = True
     
     # Download the geocaches using the print preview 
     points = []
     i = 0
     for guid, found, id in wpts:
         i += 1
         coordinate = GeocacheCoordinate(-1, -1, id)
         coordinate.found = found
         self.emit("progress", "Geocache %d of %d" % (i, len(wpts)), i, len(wpts))
         logger.info("Downloading %s..." % id)
         url = self.PRINT_PREVIEW_URL % guid
         response = self.downloader.get_reader(url, login_callback = self.login_callback, check_login_callback = self.check_login_callback)                
         result = self._parse_cache_page_print(response, coordinate, num_logs = 20)
         if result != None and result.lat != -1:
             points += [result]
             
     return points
 def _get_overview(self, location, rec_depth = 0):
     c1, c2 = location
     center = geo.Coordinate((c1.lat + c2.lat)/2, (c1.lon + c2.lon)/2)
     dist = (center.distance_to(c1)/1000)/2
     logger.debug("Distance is %f meters" % dist)
     if dist > 100:
         raise Exception("Please select a smaller part of the map!")
     url = 'http://www.geocaching.com/seek/nearest.aspx?lat=%f&lng=%f&dist=%f' % (center.lat, center.lon, dist)
     response = self.downloader.get_reader(url, login_callback = self.login_callback, check_login_callback = self.check_login_callback)
     
     points = []
     cont = True
     while cont:
         # Count the number of results and pages
         text = response.read()
         doc = self._parse(text)
         bs = doc.cssselect('#ctl00_ContentBody_ResultsPanel .PageBuilderWidget b')
         if len(bs) == 0:
             raise Exception("There are no geocaches in this area.")
         count = int(bs[0].text_content())
         page_current = int(bs[1].text_content())
         page_max = int(bs[2].text_content())
         logger.info("We are at page %d of %d, total %d geocaches" % (page_current, page_max, count))
         if count > self.MAX_DOWNLOAD_NUM:
             raise Exception("%d geocaches found, please select a smaller part of the map!" % count)
             
             
         # Extract waypoint information from the page
         wpts = [(
             # Get the GUID from the link
             x.getparent().getchildren()[0].get('href').split('guid=')[1], 
             # See whether this cache was found or not
             'TertiaryRow' in x.getparent().getparent().get('class'), 
             # Get the GCID from the text
             x.text_content().split('|')[1].strip()
             ) for x in doc.cssselect(".SearchResultsTable .Merge .small")]
             
         # Download the geocaches using the print preview 
         for guid, found, id in wpts:
             coordinate = GeocacheCoordinate(-1, -1, id)
             coordinate.found = found
             self.emit("progress", "Geocachepreview", len(points), count)
             logger.info("Downloading %s..." % id)
             url = 'http://www.geocaching.com/seek/cdpf.aspx?guid=%s' % guid
             response = self.downloader.get_reader(url, login_callback = self.login_callback, check_login_callback = self.check_login_callback)                
             result = self._parse_cache_page_print(response, coordinate, num_logs = 20)
             if result != None and result.lat != -1:
                 points += [result]
             
         cont = False  
         # There are more pages...
         if page_current < page_max:
             from urllib import urlencode
             doc.forms[0].fields['__EVENTTARGET'] = 'ctl00$ContentBody$pgrTop$ctl08'
             values = urlencode(doc.forms[0].form_values())
             action = "http://www.geocaching.com/seek/%s" % doc.forms[0].action
             logger.info("Retrieving next page!")
             response = self.downloader.get_reader(action, data=('application/x-www-form-urlencoded', values), login_callback = self.login_callback, check_login_callback = self.check_login_callback)
             cont = True
         
             
     return points