def _store_bulk(self, tiles): records = [] # tile_buffer (as_buffer) will encode the tile to the target format # we collect all tiles before, to avoid having the db transaction # open during this slow encoding for tile in tiles: with tile_buffer(tile) as buf: if PY2: content = buffer(buf.read()) else: content = buf.read() x, y, level = tile.coord if self.supports_timestamp: records.append((level, x, y, content, time.time())) else: records.append((level, x, y, content)) cursor = self.db.cursor() try: if self.supports_timestamp: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))" cursor.executemany(stmt, records) else: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)" cursor.executemany(stmt, records) self.db.commit() except sqlite3.OperationalError as ex: log.warn('unable to store tile: %s', ex) return False return True
def _store_bulk(self, tiles): for tile in tiles: res = self._get_object(tile.coord) with tile_buffer(tile) as buf: data = buf.read() res.encoded_data = data res.usermeta = { 'timestamp': str(tile.timestamp), 'size': str(tile.size), } if self.use_secondary_index: x, y, z = tile.coord res.add_index('tile_coord_bin', '%02d-%07d-%07d' % (z, x, y)) try: res.store(w=1, dw=1, pw=1, return_body=False, timeout=self.request_timeout) except riak.RiakError as ex: log.warn('unable to store tile: %s', ex) return False return True
def _store(self, tile, location): if os.path.islink(location): os.unlink(location) with tile_buffer(tile) as buf: log.debug("writing %r to %s" % (tile.coord, location)) write_atomic(location, buf.read())
def write_tiles(self, tiles): tile_positions = [] count = len(tiles) # self.meta_tile.grid_size[0] header_size = ( 4 # META + 4 # metasize**2 + 3*4 # x, y, z + count * 8 #offset/size * tiles ) with open('/tmp/foo.metatile', 'wb') as f: f.write("META") f.write(struct.pack('i', count)) f.write(struct.pack('iii', *tiles[0].coord)) offsets_header_pos = f.tell() f.seek(header_size, 0) for tile in tiles: offset = f.tell() with tile_buffer(tile) as buf: tile_data = buf.read() f.write(tile_data) tile_positions.append((offset, len(tile_data))) f.seek(offsets_header_pos, 0) for offset, size in tile_positions: f.write(struct.pack('ii', offset, size))
def _tile_doc(self, tile): tile_id = self.document_url(tile.coord, relative=True) if self.md_template: tile_doc = self.md_template.doc(tile, self.tile_grid) else: tile_doc = {} tile_doc['_id'] = tile_id with tile_buffer(tile) as buf: data = buf.read() tile_obj = { 'content_type': 'image/' + self.file_ext, 'data': codecs.decode( base64.b64encode(data).replace(b'\n', b''), 'ascii', ) } if self.tile_attribute: tile_doc[self.tile_attribute] = tile_obj else: tile_doc['_attachments'] = { 'tile': tile_obj } return tile_id, tile_doc
def _store(self, tile, location): if os.path.islink(location): os.unlink(location) with tile_buffer(tile) as buf: log.debug('writing %r to %s' % (tile.coord, location)) write_atomic(location, buf.read())
def write_tiles(self, tiles): tile_positions = [] count = len(tiles) # self.meta_tile.grid_size[0] header_size = ( 4 # META + 4 # metasize**2 + 3 * 4 # x, y, z + count * 8 #offset/size * tiles ) with open('/tmp/foo.metatile', 'wb') as f: f.write("META") f.write(struct.pack('i', count)) f.write(struct.pack('iii', *tiles[0].coord)) offsets_header_pos = f.tell() f.seek(header_size, 0) for tile in tiles: offset = f.tell() with tile_buffer(tile) as buf: tile_data = buf.read() f.write(tile_data) tile_positions.append((offset, len(tile_data))) f.seek(offsets_header_pos, 0) for offset, size in tile_positions: f.write(struct.pack('ii', offset, size))
def set_tile(self, tile): x, y, z = tile.coord assert self.grid[0] <= x < self.grid[2] assert self.grid[1] <= y < self.grid[3] color = is_single_color_image(tile.source.as_image()) with tile_buffer(tile) as buf: _data = buffer(buf.read()) if color: data = None _color = ''.join('%02x' % v for v in color) self.unique_tiles.set_data(_data, _color) else: #get value of cStringIO-Object and store it to a buffer data = _data _color = None timestamp = int(time.time()) cursor = self.db.cursor() stmt = "INSERT INTO %s (x, y, data, date_added, unique_tile) VALUES (?,?,?,?,?)" % (self.table_name) try: cursor.execute(stmt, (x, y, data, timestamp, _color)) except (sqlite3.IntegrityError, sqlite3.OperationalError) as e: #tile is already present, updating data stmt = "UPDATE %s SET data = ?, date_added = ?, unique_tile = ? WHERE x = ? AND y = ?" % (self.table_name) try: cursor.execute(stmt, (data, timestamp, _color, x, y)) except sqlite3.OperationalError as e: #database is locked print(e) return False return True
def _store(self, tile, location): if os.path.islink(location): os.unlink(location) with tile_buffer(tile) as buf: with open(location, 'wb') as f: log.debug('writing %r to %s' % (tile.coord, location)) f.write(buf.read())
def store_tile(self, tile): if tile.stored: return key = self.tile_key(tile) log.debug('AzureBlob: store_tile, key: %s' % key) with tile_buffer(tile) as buf: content_settings = ContentSettings(content_type='image/' + self.file_ext) self.conn.upload_blob(name=key, data=buf, overwrite=True, content_settings=content_settings)
def store_tile(self, tile): if tile.stored: return True with tile_buffer(tile) as buf: content = buffer(buf.read()) x, y, level = tile.coord cursor = self.db.cursor() stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)" cursor.execute(stmt, (level, x, y, buffer(content))) self.db.commit() return True
def store_tiles(self, tiles): tiles_to_store=[t for t in tiles if not t.stored] tiles_stored=0 for bundle_file, bundle_tiles in iteritems(self._get_bundle_tiles(tiles_to_store)): records=[] for tile in bundle_tiles: with tile_buffer(tile) as buf: records.append((buf.read(), tile)) with BundleDataV2(bundle_file, mode = "write") as bundledata: for record in records: tiles_stored+=1 if bundledata.store_tile(*record) else 0 return tiles_stored==len(tiles_to_store)
def store_tile(self, tile): if tile.stored: return True key = self._key(tile) with tile_buffer(tile) as buf: data = buf.read() r = self.r.set(key, data) if self.ttl: # use ms expire times for unit-tests self.r.pexpire(key, int(self.ttl * 1000)) return r
def store_tile(self, tile): if tile.stored: return True with tile_buffer(tile) as buf: content = buffer(buf.read()) x, y, level = tile.coord cursor = self.db.cursor() if self.supports_timestamp: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))" cursor.execute(stmt, (level, x, y, content, time.time())) else: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)" cursor.execute(stmt, (level, x, y, content)) self.db.commit() return True
def store_tile(self, tile): if tile.stored: return True with tile_buffer(tile) as buf: data = buf.read() with FileLock(self.lock_filename): bundle = BundleData(self.base_filename + BUNDLE_EXT, self.offset) idx = BundleIndex(self.base_filename + BUNDLEX_EXT) x, y = self._rel_tile_coord(tile.coord) offset = idx.tile_offset(x, y) offset, size = bundle.append_tile(data, prev_offset=offset) idx.update_tile_offset(x, y, offset=offset, size=size) return True
def store_tile(self, tile): if tile.stored: return key = self.tile_key(tile) log.debug('S3: store_tile, key: %s' % key) extra_args = {} if self.file_ext in ('jpeg', 'png'): extra_args['ContentType'] = 'image/' + self.file_ext with tile_buffer(tile) as buf: self.conn().upload_fileobj( NopCloser(buf), # upload_fileobj closes buf, wrap in NopCloser self.bucket_name, key, ExtraArgs=extra_args)
def store_tile(self, tile): """ Add the given `tile` to the file cache. Stores the `Tile.source` to `FileCache.tile_location`. """ if tile.stored: return location = self.tile_location(tile) log.debug('store_tile, location: %s' % location) k = boto.s3.key.Key(self.bucket) k.key = location with tile_buffer(tile) as buf: k.set_contents_from_file(buf) k.close()
def _store_bulk(self, tiles): for tile in tiles: res = self._get_object(tile.coord) with tile_buffer(tile) as buf: data = buf.read() res.set_data(data) res.set_usermeta({ 'timestamp': str(tile.timestamp), 'size': str(tile.size), }) if self.use_secondary_index: x, y, z = tile.coord res.add_index('tile_coord_bin', '%02d-%07d-%07d' % (z, x, y)) res.store() return True
def _store_bulk(self, tiles): for tile in tiles: res = self._get_object(tile.coord) with tile_buffer(tile) as buf: data = buf.read() res.encoded_data = data res.usermeta = { 'timestamp': str(tile.timestamp), 'size': str(tile.size), } if self.use_secondary_index: x, y, z = tile.coord res.add_index('tile_coord_bin', '%02d-%07d-%07d' % (z, x, y)) res.store(return_body=False) return True
def _tile_doc(self, tile): tile_id = self.document_url(tile.coord, relative=True) if self.md_template: tile_doc = self.md_template.doc(tile, self.tile_grid) else: tile_doc = {} tile_doc['_id'] = tile_id with tile_buffer(tile) as buf: data = buf.read() tile_doc['_attachments'] = { 'tile': { 'content_type': 'image/' + self.file_ext, 'data': data.encode('base64').replace('\n', ''), } } return tile_id, tile_doc
def store_tiles(self, tiles): self._init_index() tiles_data = [] for t in tiles: if t.stored: continue with tile_buffer(t) as buf: data = buf.read() tiles_data.append((t.coord, data)) with FileLock(self.lock_filename): with self._readwrite() as fh: for tile_coord, data in tiles_data: self._store_tile(fh, tile_coord, data) return True
def store_tile(self, tile): if tile.stored: return True with tile_buffer(tile) as buf: content = buffer(buf.read()) x, y, level = tile.coord cursor = self.db.cursor() try: if self.supports_timestamp: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))" cursor.execute(stmt, (level, x, y, content, time.time())) else: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)" cursor.execute(stmt, (level, x, y, content)) self.db.commit() except sqlite3.OperationalError, ex: log.warn('unable to store tile: %s', ex) return False return True
def _store_bulk(self, tiles): for tile in tiles: res = self._get_object(tile.coord) with tile_buffer(tile) as buf: data = buf.read() res.encoded_data = data res.usermeta = { 'timestamp': str(tile.timestamp), 'size': str(tile.size), } if self.use_secondary_index: x, y, z = tile.coord res.add_index('tile_coord_bin', '%02d-%07d-%07d' % (z, x, y)) try: res.store(return_body=False, timeout=self.request_timeout) except riak.RiakError, ex: log.warn('unable to store tile: %s', ex) return False
def store_tiles(self, tiles): tiles_data = [] for t in tiles: if t.stored: continue with tile_buffer(t) as buf: data = buf.read() tiles_data.append((t.coord, data)) with FileLock(self.lock_filename): with self.data().readwrite() as bundle: with self.index().readwrite() as idx: for tile_coord, data in tiles_data: x, y = self._rel_tile_coord(tile_coord) offset = idx.tile_offset(x, y) offset, size = bundle.append_tile(data, prev_offset=offset) idx.update_tile_offset(x, y, offset=offset, size=size) return True
def store_tile(self, tile): if tile.stored: return True with tile_buffer(tile) as buf: if PY2: content = buffer(buf.read()) else: content = buf.read() x, y, level = tile.coord cursor = self.db.cursor() try: if self.supports_timestamp: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data, last_modified) VALUES (?,?,?,?, datetime(?, 'unixepoch', 'localtime'))" cursor.execute(stmt, (level, x, y, content, time.time())) else: stmt = "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)" cursor.execute(stmt, (level, x, y, content)) self.db.commit() except sqlite3.OperationalError as ex: log.warn('unable to store tile: %s', ex) return False return True
def set_tile(self, tile): x, y, z = tile.coord assert self.grid[0] <= x < self.grid[2] assert self.grid[1] <= y < self.grid[3] color = is_single_color_image(tile.source.as_image()) with tile_buffer(tile) as buf: _data = buffer(buf.read()) if color: data = None _color = ''.join('%02x' % v for v in color) self.unique_tiles.set_data(_data, _color) else: #get value of cStringIO-Object and store it to a buffer data = _data _color = None timestamp = int(time.time()) cursor = self.db.cursor() stmt = "INSERT INTO %s (x, y, data, date_added, unique_tile) VALUES (?,?,?,?,?)" % ( self.table_name) try: cursor.execute(stmt, (x, y, data, timestamp, _color)) except (sqlite3.IntegrityError, sqlite3.OperationalError) as e: #tile is already present, updating data stmt = "UPDATE %s SET data = ?, date_added = ?, unique_tile = ? WHERE x = ? AND y = ?" % ( self.table_name) try: cursor.execute(stmt, (data, timestamp, _color, x, y)) except sqlite3.OperationalError as e: #database is locked print(e) return False return True
def _store_bulk(self, tiles): records = [] # tile_buffer (as_buffer) will encode the tile to the target format # we collect all tiles before, to avoid having the db transaction # open during this slow encoding for tile in tiles: with tile_buffer(tile) as buf: if PY2: content = buffer(buf.read()) else: content = buf.read() x, y, level = tile.coord records.append((level, x, y, content)) cursor = self.db.cursor() try: stmt = "INSERT OR REPLACE INTO [{0}] (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)".format( self.table_name) cursor.executemany(stmt, records) self.db.commit() except sqlite3.OperationalError as ex: log.warn('unable to store tile: %s', ex) return False return True
def _store_bulk(self, tiles): records = [] # tile_buffer (as_buffer) will encode the tile to the target format # we collect all tiles before, to avoid having the db transaction # open during this slow encoding for tile in tiles: with tile_buffer(tile) as buf: if PY2: content = buffer(buf.read()) else: content = buf.read() x, y, level = tile.coord records.append((level, x, y, content)) cursor = self.db.cursor() try: stmt = "INSERT OR REPLACE INTO [{0}] (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)".format( self.table_name) cursor.executemany(stmt, records) self.db.commit() except sqlite3.OperationalError as ex: log.warning('unable to store tile: %s', ex) return False return True
def async_store(self, key, tile): print('Storing %s, %s'.format(key, tile)) with tile_buffer(tile) as buf: key.set_contents_from_file(buf)