def as_io(doc): if PY2: return BytesIO(doc) else: if isinstance(doc, str): return StringIO(doc) else: return BytesIO(doc)
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write(BUNDLEX_HEADER) for i in range(BUNDLEX_GRID_WIDTH * BUNDLEX_GRID_HEIGHT): buf.write(struct.pack('<Q', (i*4)+BUNDLE_HEADER_SIZE)[:5]) buf.write(BUNDLEX_FOOTER) write_atomic(self.filename, buf.getvalue())
def _render_mapfile(self, mapfile, query): start_time = time.time() m = self.map_obj(mapfile) m.resize(query.size[0], query.size[1]) m.srs = '+init=%s' % str(query.srs.srs_code.lower()) envelope = mapnik.Box2d(*query.bbox) m.zoom_to_box(envelope) data = None try: if self.layers: i = 0 for layer in m.layers[:]: if layer.name != 'Unkown' and layer.name not in self.layers: del m.layers[i] else: i += 1 img = mapnik.Image(query.size[0], query.size[1]) mapnik.render(m, img) data = img.tostring(str(query.format)) finally: size = None if data: size = len(data) log_request('%s:%s:%s:%s' % (mapfile, query.bbox, query.srs.srs_code, query.size), status='200' if data else '500', size=size, method='API', duration=time.time()-start_time) return ImageSource(BytesIO(data), size=query.size, image_opts=ImageOptions(transparent=self.transparent, format=query.format))
def load_tile(self, tile, with_metadata=False): # bulk loading with load_tiles is not implemented, because # CouchDB's /all_docs? does not include attachments if tile.source or tile.coord is None: return True url = self.document_url(tile.coord) + '?attachments=true' self.init_db() resp = self.req_session.get(url, headers={'Accept': 'application/json'}) if resp.status_code == 200: doc = json.loads(codecs.decode(resp.content, 'utf-8')) tile_obj = None # tile in attribute if self.tile_attribute: tile_obj = doc.get(self.tile_attribute) # fallback to _attachments if not tile_obj: tile_obj = doc.get('_attachments', {}).get('tile', None) if tile_obj: tile_data = BytesIO(base64.b64decode(tile_obj['data'])) tile.source = ImageSource(tile_data) else: raise ValueError('CouchDB: missing tile data') tile.timestamp = doc.get(self.md_template.timestamp_key) return True return False
def get_tiles(self, tiles): stmt = "SELECT x, y, data, date_added, unique_tile FROM %s WHERE " % ( self.table_name) stmt += ' OR '.join(['(x = ? AND y = ?)'] * len(tiles)) coords = [] for tile in tiles: x, y, level = tile.coord coords.append(x) coords.append(y) cursor = self.db.cursor() try: cursor.execute(stmt, coords) except sqlite3.OperationalError as e: print(e) #associate the right tiles with the cursor tile_dict = {} for tile in tiles: x, y, level = tile.coord tile_dict[(x, y)] = tile for row in cursor: tile = tile_dict[(row['x'], row['y'])] #TODO get unique tiles if row['data'] == null data = row['data'] if row[ 'data'] is not None else self.unique_tiles.get_data( row['unique_tile']) tile.timestamp = row['date_added'] tile.size = len(data) tile.source = ImageSource(BytesIO(data), size=tile.size) cursor.close() return tiles
def load_tile(self, tile, with_metadata=False): if tile.source or tile.coord is None: return True cur = self.db.cursor() if self.supports_timestamp: cur.execute('''SELECT tile_data, last_modified FROM tiles WHERE tile_column = ? AND tile_row = ? AND zoom_level = ?''', tile.coord) else: cur.execute('''SELECT tile_data FROM tiles WHERE tile_column = ? AND tile_row = ? AND zoom_level = ?''', tile.coord) content = cur.fetchone() if content: tile.source = ImageSource(BytesIO(content[0])) if self.supports_timestamp: tile.timestamp = sqlite_datetime_to_timestamp(content[1]) return True else: return False
def open(self, url, data=None): assert data is None, 'POST requests not supported by CGIClient' parsed_url = urlparse.urlparse(url) environ = os.environ.copy() environ.update({ 'QUERY_STRING': parsed_url.query, 'REQUEST_METHOD': 'GET', 'GATEWAY_INTERFACE': 'CGI/1.1', 'SERVER_ADDR': '127.0.0.1', 'SERVER_NAME': 'localhost', 'SERVER_PROTOCOL': 'HTTP/1.0', 'SERVER_SOFTWARE': 'MapProxy', }) start_time = time.time() try: p = subprocess.Popen([self.script], env=environ, stdout=subprocess.PIPE, cwd=self.working_directory or os.path.dirname(self.script)) except OSError as ex: if ex.errno == errno.ENOENT: raise SourceError('CGI script not found (%s)' % (self.script, )) elif ex.errno == errno.EACCES: raise SourceError('No permission for CGI script (%s)' % (self.script, )) else: raise stdout = p.communicate()[0] ret = p.wait() if ret != 0: raise HTTPClientError('Error during CGI call (exit code: %d)' % (ret, )) if self.no_headers: content = stdout headers = dict() else: headers, content = split_cgi_response(stdout) status_match = re.match('(\d\d\d) ', headers.get('Status', '')) if status_match: status_code = status_match.group(1) else: status_code = '-' size = len(content) content = IOwithHeaders(BytesIO(content), headers) log_request('%s:%s' % (self.script, parsed_url.query), status_code, size=size, method='CGI', duration=time.time() - start_time) return content
def load_tile(self, tile, with_metadata=False): if tile.source or tile.coord is None: return True key = self._key(tile) tile_data = self.r.get(key) if tile_data: tile.source = ImageSource(BytesIO(tile_data)) return True return False
def parse_capabilities_url(url, version='1.1.1'): try: capabilities_url = wms_capapilities_url(url, version) capabilities_response = open_url(capabilities_url) except HTTPClientError as ex: log_error('ERROR: %s', ex.args[0]) sys.exit(1) # after parsing capabilities_response will be empty, therefore cache it capabilities = BytesIO(capabilities_response.read()) return parse_capabilities(capabilities, version=version)
def _load_tile(self, fh, tile): if tile.source or tile.coord is None: return True x, y = self._rel_tile_coord(tile.coord) offset, size = self._tile_offset_size(fh, x, y) if not size: return False fh.seek(offset) data = fh.read(size) tile.source = ImageSource(BytesIO(data)) return True
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write( struct.pack(BUNDLE_V2_HEADER_STRUCT_FORMAT, *BUNDLE_V2_HEADER)) # Empty index (ArcGIS stores an offset of 4 and size of 0 for missing tiles) buf.write( struct.pack('<%dQ' % BUNDLE_V2_TILES, *(4, ) * BUNDLE_V2_TILES)) write_atomic(self.filename, buf.getvalue())
def _init_index(self): self._initialized = True if os.path.exists(self.filename): return ensure_directory(self.filename) buf = BytesIO() buf.write(struct.pack(BUNDLE_V2_HEADER_STRUCT_FORMAT, *BUNDLE_V2_HEADER)) # Empty index (ArcGIS stores an offset of 4 and size of 0 for missing tiles) buf.write(struct.pack('<%dQ' % BUNDLE_V2_TILES, *(4, ) * BUNDLE_V2_TILES)) write_atomic(self.filename, buf.getvalue())
def load_tiles(self, tiles, with_metadata=False, dimensions=None): #associate the right tiles with the cursor tile_dict = {} coords = [] for tile in tiles: if tile.source or tile.coord is None: continue x, y, level = tile.coord coords.append(x) coords.append(y) coords.append(level) tile_dict[(x, y)] = tile if not tile_dict: # all tiles loaded or coords are None return True if self.supports_timestamp: stmt_base = "SELECT tile_column, tile_row, tile_data, last_modified FROM tiles WHERE " else: stmt_base = "SELECT tile_column, tile_row, tile_data FROM tiles WHERE " loaded_tiles = 0 # SQLite is limited to 1000 args -> split into multiple requests if more arguments are needed while coords: cur_coords = coords[:999] stmt = stmt_base + ' OR '.join( ['(tile_column = ? AND tile_row = ? AND zoom_level = ?)'] * (len(cur_coords) // 3)) cursor = self.db.cursor() cursor.execute(stmt, cur_coords) for row in cursor: loaded_tiles += 1 tile = tile_dict[(row[0], row[1])] data = row[2] tile.size = len(data) tile.source = ImageSource(BytesIO(data)) if self.supports_timestamp: tile.timestamp = sqlite_datetime_to_timestamp(row[3]) cursor.close() coords = coords[999:] return loaded_tiles == len(tile_dict)
def load_tile(self, tile, with_metadata=False): if tile.source or tile.coord is None: return True cur = self.db.cursor() cur.execute("""SELECT tile_data FROM [{0}] WHERE tile_column = ? AND tile_row = ? AND zoom_level = ?""".format(self.table_name), tile.coord) content = cur.fetchone() if content: tile.source = ImageSource(BytesIO(content[0])) return True else: return False
def load_tile(self, tile, with_metadata=False): # bulk loading with load_tiles is not implemented, because # CouchDB's /all_docs? does not include attachments if tile.source or tile.coord is None: return True url = self.document_url(tile.coord) + '?attachments=true' self.init_db() resp = self.req_session.get(url, headers={'Accept': 'application/json'}) if resp.status_code == 200: doc = json.loads(codecs.decode(resp.content, 'utf-8')) tile_data = BytesIO(base64.b64decode(doc['_attachments']['tile']['data'])) tile.source = ImageSource(tile_data) tile.timestamp = doc.get(self.md_template.timestamp_key) return True return False
def load_tile(self, tile, with_metadata=False): if tile.source or tile.coord is None: return True idx = BundleIndex(self.base_filename + BUNDLEX_EXT) x, y = self._rel_tile_coord(tile.coord) offset = idx.tile_offset(x, y) if offset == 0: return False bundle = BundleData(self.base_filename + BUNDLE_EXT, self.offset) data = bundle.read_tile(offset) if not data: return False tile.source = ImageSource(BytesIO(data)) return True
def load_tiles(self, tiles, with_metadata=False): #associate the right tiles with the cursor tile_dict = {} coords = [] for tile in tiles: if tile.source or tile.coord is None: continue x, y, level = tile.coord coords.append(x) coords.append(y) coords.append(level) tile_dict[(x, y)] = tile if not tile_dict: # all tiles loaded or coords are None return True if len(coords) > 1000: # SQLite is limited to 1000 args raise CacheBackendError( 'cannot query SQLite for more than 333 tiles') if self.supports_timestamp: stmt = "SELECT tile_column, tile_row, tile_data, last_modified FROM tiles WHERE " else: stmt = "SELECT tile_column, tile_row, tile_data FROM tiles WHERE " stmt += ' OR '.join( ['(tile_column = ? AND tile_row = ? AND zoom_level = ?)'] * (len(coords) // 3)) cursor = self.db.cursor() cursor.execute(stmt, coords) loaded_tiles = 0 for row in cursor: loaded_tiles += 1 tile = tile_dict[(row[0], row[1])] data = row[2] tile.size = len(data) tile.source = ImageSource(BytesIO(data)) if self.supports_timestamp: tile.timestamp = sqlite_datetime_to_timestamp(row[3]) cursor.close() return loaded_tiles == len(tile_dict)
def load_tiles(self, tiles, with_metadata=False): missing = False with self.index().readonly() as idx: if not idx: return False with self.data().readonly() as bundle: for t in tiles: if t.source or t.coord is None: continue x, y = self._rel_tile_coord(t.coord) offset = idx.tile_offset(x, y) if offset == 0: missing = True continue data = bundle.read_tile(offset) if not data: missing = True continue t.source = ImageSource(BytesIO(data)) return not missing