def run(self): if self.outfile and not self.use_stdout: with self.outfile.open("w"): pass # create or truncate file, but don't write anything to it yet with sqlite3.connect(self.mbtiles) as conn: results = [] if self.show_size: sql = "SELECT cnt, dups.tile_id, LENGTH(tile_data) FROM (" \ " SELECT tile_id, COUNT(*) as cnt FROM map " \ " GROUP BY tile_id HAVING cnt > ?" \ ") dups JOIN images ON images.tile_id = dups.tile_id" sql_opts = [self.min_dup_count] if self.zoom: sql += f" WHERE zoom_level=?" sql_opts.append(self.zoom) else: sql_opts = [] sql = "SELECT COUNT(*) cnt, tile_id FROM map" if self.zoom: sql += f" WHERE zoom_level=?" sql_opts.append(self.zoom) sql += " GROUP BY tile_id HAVING cnt > ?" sql_opts.append(self.min_dup_count) for vals in query(conn, sql, sql_opts): results.append(vals) results.sort(reverse=True) size = None examples = None for vals in results: if len(vals) == 3: count, tile_id, size = vals else: count, tile_id = vals if self.show_examples: example_sql = "select zoom_level, tile_column, tile_row from map " \ "where tile_id = ? limit 5" examples = [ f'{z}/{x}/{y}' for z, x, y in query(conn, example_sql, [tile_id]) ] if self.verbose: res = f"{tile_id} x {count:,}" if self.show_size: res += f', {size:,} bytes' if self.show_examples: res += ', examples: ' + ', '.join(examples) print_err(res) results = [v[1] for v in results] if self.use_stdout: for v in results: print(v) elif self.outfile: with self.outfile.open("a") as f: f.writelines([str(v) + '\n' for v in results]) return results
def copy(self, target_mbtiles, reset, auto_minmax): with sqlite3.connect(self.mbtiles) as conn: metadata = {k: v for k, v in query(conn, "SELECT name, value FROM metadata", [])} def update_from_env(param, env_var): val = os.environ.get(env_var) if val is not None: metadata[param] = val update_from_env('name', 'METADATA_NAME') update_from_env('minzoom', 'MIN_ZOOM') update_from_env('maxzoom', 'MAX_ZOOM') update_from_env('attribution', 'METADATA_ATTRIBUTION') update_from_env('description', 'METADATA_DESCRIPTION') update_from_env('version', 'METADATA_VERSION') metadata['filesize'] = os.path.getsize(target_mbtiles) bbox_str = os.environ.get('BBOX') if bbox_str: bbox = Bbox(bbox=bbox_str, center_zoom=os.environ.get('CENTER_ZOOM')) metadata["bounds"] = bbox.bounds_str() metadata["center"] = bbox.center_str() with sqlite3.connect(target_mbtiles) as conn: cursor = conn.cursor() if auto_minmax: metadata["minzoom"], metadata["maxzoom"] = get_minmax(cursor) update_metadata(cursor, metadata, reset) print("The metadata now contains these values:") self.print_all()
def print_all(self, file: str = None): file = file or self.mbtiles data = self._get_metadata(file) if data: width = max((len(v) for v in data.keys())) for name, value in sorted(data.items(), key=lambda v: v[0] if v[0] != 'json' else 'zz'): print(f'{name:{width}} {self.validate(name, value)[0]}') else: print(f'There are no values present in {file} metadata table') if self.show_ranges: with sqlite3.connect(file) as conn: sql = """\ SELECT zoom_level, COUNT(*) AS count, MIN(tile_column) AS min_column, MAX(tile_column) AS max_column, MIN(tile_row) AS min_row, MAX(tile_row) AS max_row FROM map GROUP BY zoom_level """ res = [] for z, cnt, min_x, max_x, min_y, max_y in sorted( query(conn, sql, [])): res.append({ 'Zoom': z, 'Tile count': f'{cnt:,}', 'Found tile ranges': f'{min_x},{min_y} x {max_x},{max_y}', }) print('\n' + tabulate(res, headers='keys'))
def print_all(self, show_json: bool = False, show_ranges: bool = False, file: str = None): file = file or self.mbtiles data = self._get_metadata(file) if data: width = max((len(v) for v in data.keys())) for name, value in sorted(data.items(), key=lambda v: v[0] if v[0] != 'json' else 'zz'): print(f"{name:{width}} {validate(name, value, show_json)[0]}") else: print(f"There are no values present in {file} metadata table") if show_ranges: with sqlite3.connect(file) as conn: sql = """\ SELECT zoom_level, COUNT(*) as count, MIN(tile_column) AS min_column, MAX(tile_column) AS max_column, MIN(tile_row) AS min_row, MAX(tile_row) AS max_row FROM map GROUP BY zoom_level """ res = [] for z, cnt, min_x, max_x, min_y, max_y in sorted(query(conn, sql, [])): res.append({ "Zoom": z, "Tile count": f"{cnt:,}", "Found tile ranges": f"{min_x},{min_y} x {max_x},{max_y}", }) print("\n" + tabulate(res, headers="keys"))
def print_all(self): with sqlite3.connect(self.mbtiles) as conn: data = list(query(conn, "SELECT name, value FROM metadata", [])) width = max((len(v[0]) for v in data)) for name, value in sorted(data, key=lambda v: v[0] if v[0] != 'json' else 'zz'): print(f"{name:{width}} {self.validate(name, value)[0]}")
def show_tile(self, zoom, x, y, show_names): with sqlite3.connect(self.mbtiles) as conn: sql = "SELECT tile_data FROM tiles " \ "WHERE zoom_level=? AND tile_column=? AND tile_row=?" for row in query(conn, sql, [zoom, x, y]): print_tile(row[0], zoom, x, y, show_names) break else: print(f"Tile {zoom}/{x}/{y} not found")
def show_tile(self, zoom, x, y, show_names, summary): with sqlite3.connect(self.mbtiles) as conn: sql = 'SELECT tile_data FROM tiles ' \ 'WHERE zoom_level=? AND tile_column=? AND tile_row=?' for row in query(conn, sql, [zoom, x, y]): print_tile(row[0], show_names, summary, f'{zoom}/{x}/{y}') break else: print(f'Tile {zoom}/{x}/{y} not found')
def tile_batches(self, conn: sqlite3.Connection, limit_to_keys=False): """Generate batches of tiles to be processed for the new zoom, based on the previous zoom level. Each yield contains two batches: one with "empty" tiles (those that match known keys), and another with non-empty tiles (only if limit_to_keys is False). The first batch can be inserted into mbtiles db as is. The second batch will be used as a list of tiles to be generated. """ batch_size = 1000000 zoom = self.zoom search_zoom = zoom - 1 sql = f"select tile_column, tile_row, tile_id from map where zoom_level=?" sql_args = [search_zoom] if limit_to_keys: sql += f" and tile_id IN ({','.join(('?' * len(self.keys)))})" sql_args += self.keys with_key = [] without_key = [] max_y = 2 ** search_zoom - 1 for x, y, key in query(conn, sql, sql_args): if limit_to_keys or key in self.keys: with_key.append((zoom, x * 2, y * 2, key)) with_key.append((zoom, x * 2 + 1, y * 2, key)) with_key.append((zoom, x * 2, y * 2 + 1, key)) with_key.append((zoom, x * 2 + 1, y * 2 + 1, key)) else: # mbtiles uses inverted Y (starts at the bottom) ry = max_y - y without_key.append(f"{zoom}/{x * 2}/{ry * 2}\n") without_key.append(f"{zoom}/{x * 2 + 1}/{ry * 2}\n") without_key.append(f"{zoom}/{x * 2}/{ry * 2 + 1}\n") without_key.append(f"{zoom}/{x * 2 + 1}/{ry * 2 + 1}\n") if len(with_key) > batch_size or len(without_key) > batch_size: yield with_key, without_key with_key = [] without_key = [] if with_key or without_key: yield with_key, without_key
def _get_metadata(file) -> Dict[str, str]: with sqlite3.connect(file) as conn: return { k: v for k, v in query(conn, 'SELECT name, value FROM metadata', []) }