Beispiel #1
0
 def async_get(self):
     shapefile_dir = os.path.join(self.precache.directory, safe64.dir(self.get_url))
     shapefile_path = [f for f in os.listdir(shapefile_dir) if f.endswith("shp")][0]
     self.jsonp(
         {"data_url": self.get_url, "srs": self.shapefile_projection(os.path.join(shapefile_dir, shapefile_path))},
         self.get_argument("jsoncallback", None),
     )
     self.finish()
Beispiel #2
0
 def cache(self, response):
     """ asynchttp request callback. caches the downloaded zipfile. """
     try:
         # Check that the directory does not exist yet, as there *can* be
         # concurrent jobs at this point. Do not actually create the
         # directory until we are sure that a successful shapefile can be
         # extracted from a zip.
         base_dir = os.path.join(self.directory, safe64.dir(response.request.url))
         if not os.path.isdir(base_dir):
             self.unzip_shapefile(response.body, base_dir, response.request)
     except Exception, e:
         logging.info('Failed: %s', response.request.url)
         logging.info('Exception: %s', e)
         if response.request.url in self.locks : self.locks.remove(response.request.url)
         if response.request.url not in self.queue : self.queue.append(response.request.url)
         self.request_handler.finish()
         return
Beispiel #3
0
 def process_request(self, request_url):
     # Directory exists, request has already been successfully processed.
     base_dir = os.path.join(self.directory, safe64.dir(request_url))
     if os.path.isdir(base_dir):
         if request_url in self.queue: self.queue.remove(request_url)
         if request_url in self.locks: self.locks.remove(request_url)
     # Request is in queue and not locked. Fire asynchronous HTTP request.
     elif request_url in self.queue and request_url not in self.locks:
         self.queue.remove(request_url)
         self.locks.append(request_url)
         logging.info("Locked: %s", request_url)
         http = tornado.httpclient.AsyncHTTPClient()
         http.fetch(request_url, request_timeout=60, callback=self.cache)
     # Request is in locks. Perform a holding pattern.
     elif request_url in self.locks:
         tornado.ioloop.IOLoop.instance().add_timeout(
             time.time() + 5, lambda: self.process_request(request_url))
     # All queued requests have been processed. Continue to callback.
     if len(self.queue) == 0 and len(self.locks) == 0:
         self.callback(**self.kwargs)
     return
Beispiel #4
0
 def url2fs(self, url):
     """ encode a URL to be safe as a filename """
     uri, extension = os.path.splitext(url)
     return safe64.dir(uri) + extension