def _make_metadata(name): from tilequeue.process import make_metadata from tilequeue.process import Source sources = { 'osm': Source('osm', 'openstreetmap.org'), 'ne': Source('ne', 'naturalearthdata.com'), 'wof': Source('wof', 'whosonfirst.org'), 'shp': Source('shp', 'openstreetmapdata.com'), } return make_metadata(sources[name])
def __init__(self, tables, source='test'): from tilequeue.process import lookup_source, Source self.tables = tables # first look up source, in case it's a real one that we're testing. # if not, then set it to a test value self.source = lookup_source(source) or Source(source, source) assert isinstance(self.source, Source)
def make_test_metadata(): from tilequeue.query.fixture import Metadata from tilequeue.process import Source return Metadata(Source('test', 'test'), [], [])
def __call__(self, zoom, unpadded_bounds): read_rows = [] bbox = box(*unpadded_bounds) for (fid, shape, props) in self.rows: # reject any feature which doesn't intersect the given bounds if bbox.disjoint(shape): continue # copy props so that any updates to it don't affect the original # data. props = props.copy() # TODO: there must be some better way of doing this? rels = props.pop('__relations__', []) ways = props.pop('__ways__', []) # place for assembing the read row as if from postgres read_row = {} # whether to generate a label placement centroid generate_label_placement = False # whether to clip to a padded box has_water_layer = False # tracking which layers claim this feature, as this is important to # figure out which layer will be assigned the name. claims_feature_at_some_zoom = set() for layer_name, info in self.layers.items(): if not info.allows_shape_type(shape): continue source_value = props.get('source') source = lookup_source(source_value) # this is a bit of a hack to ensure that custom source values, # as used in the tests, get passed though the fixture data # fetcher intact. if source is None and source_value is not None: source = Source(source_value, source_value) meta = Metadata(source, ways, rels) min_zoom = info.min_zoom_fn(shape, props, fid, meta) # reject features which don't match in this layer if min_zoom is None: continue # make a note that this feature is claimed at some zoom by this # layer, which is important for name processing. claims_feature_at_some_zoom.add(layer_name) # reject anything which isn't in the current zoom range # note that this is (zoom+1) because things with a min_zoom of # (e.g) 14.999 should still be in the zoom 14 tile. # # also, if zoom >= 16, we should include all features, even # those with min_zoom > zoom. if zoom < 16 and (zoom + 1) <= min_zoom: continue # UGLY HACK: match the query for "max zoom" for NE places. # this removes larger cities at low zooms, and smaller cities # as the zoom increases and as the OSM cities start to "fade # in". if source and source.name == 'ne': pop_max = int(props.get('pop_max', '0')) remove = ((zoom >= 8 and zoom < 10 and pop_max > 50000) or (zoom >= 10 and zoom < 11 and pop_max > 20000) or (zoom >= 11 and pop_max > 5000)) if remove: continue # if the feature exists in any label placement layer, then we # should consider generating a centroid label_layers = self.label_placement_layers.get( shape_type_lookup(shape), {}) if layer_name in label_layers: generate_label_placement = True layer_props = layer_properties(fid, shape, props, layer_name, zoom, self.osm) if source: layer_props['source'] = source.value layer_props['min_zoom'] = min_zoom props_name = '__%s_properties__' % layer_name read_row[props_name] = layer_props if layer_name == 'water': has_water_layer = True # if at least one min_zoom / properties match if read_row: clip_box = bbox if has_water_layer: pad_factor = 1.1 clip_box = calculate_padded_bounds(pad_factor, unpadded_bounds) clip_shape = clip_box.intersection(shape) # add back name into whichever of the pois, landuse or # buildings layers has claimed this feature. names = {} for k in name_keys(props): names[k] = props[k] if names: for layer_name in ('pois', 'landuse', 'buildings'): if layer_name in claims_feature_at_some_zoom: props_name = '__%s_properties__' % layer_name if props_name in read_row: read_row[props_name].update(names) # break regardless of whether or not we managed to # update the row - sometimes a feature is claimed # in one layer at a min_zoom higher than another # layer's min_zoom. so the feature is visible # before it gets labelled. break read_row['__id__'] = fid read_row['__geometry__'] = bytes(clip_shape.wkb) if generate_label_placement: read_row['__label__'] = bytes( shape.representative_point().wkb) read_rows.append(read_row) return read_rows
def _make_rawr_fetcher(cfg, layer_data, query_cfg, io_pool): rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml is not None, 'Missing rawr configuration in yaml' group_by_zoom = rawr_yaml.get('group-zoom') assert group_by_zoom is not None, 'Missing group-zoom rawr config' rawr_source_yaml = rawr_yaml.get('source') assert rawr_source_yaml, 'Missing rawr source config' table_sources = rawr_source_yaml.get('table-sources') assert table_sources, 'Missing definitions of source per table' # map text for table source onto Source objects for tbl, data in table_sources.items(): source_name = data['name'] source_value = data['value'] table_sources[tbl] = Source(source_name, source_value) label_placement_layers = rawr_yaml.get('label-placement-layers', {}) for geom_type, layers in label_placement_layers.items(): assert geom_type in ('point', 'polygon', 'linestring'), \ 'Geom type %r not understood, expecting point, polygon or ' \ 'linestring.' % (geom_type,) label_placement_layers[geom_type] = set(layers) indexes_cfg = rawr_yaml.get('indexes') assert indexes_cfg, 'Missing definitions of table indexes.' # source types are: # s3 - to fetch RAWR tiles from S3 # store - to fetch RAWR tiles from any tilequeue tile source # generate - to generate RAWR tiles directly, rather than trying to load # them from S3. this can be useful for standalone use and # testing. provide a postgresql subkey for database connection # settings. source_type = rawr_source_yaml.get('type') if source_type == 's3': rawr_source_s3_yaml = rawr_source_yaml.get('s3') bucket = rawr_source_s3_yaml.get('bucket') assert bucket, 'Missing rawr source s3 bucket' region = rawr_source_s3_yaml.get('region') assert region, 'Missing rawr source s3 region' prefix = rawr_source_s3_yaml.get('prefix') assert prefix, 'Missing rawr source s3 prefix' suffix = rawr_source_s3_yaml.get('suffix') assert suffix, 'Missing rawr source s3 suffix' allow_missing_tiles = rawr_source_s3_yaml.get('allow-missing-tiles', False) import boto3 from tilequeue.rawr import RawrS3Source s3_client = boto3.client('s3', region_name=region) storage = RawrS3Source(s3_client, bucket, prefix, suffix, table_sources, allow_missing_tiles) elif source_type == 'generate': from raw_tiles.source.conn import ConnectionContextManager from raw_tiles.source.osm import OsmSource postgresql_cfg = rawr_source_yaml.get('postgresql') assert postgresql_cfg, 'Missing rawr postgresql config' conn_ctx = ConnectionContextManager(postgresql_cfg) rawr_osm_source = OsmSource(conn_ctx) storage = _NullRawrStorage(rawr_osm_source, table_sources) elif source_type == 'store': from tilequeue.store import make_store from tilequeue.rawr import RawrStoreSource store_cfg = rawr_source_yaml.get('store') store = make_store(store_cfg, credentials=cfg.subtree('aws credentials')) storage = RawrStoreSource(store, table_sources) else: assert False, 'Source type %r not understood. ' \ 'Options are s3, generate and store.' % (source_type,) # TODO: this needs to be configurable, everywhere! this is a long term # refactor - it's hard-coded in a bunch of places :-( max_z = 16 layers = _make_layer_info(layer_data, cfg.process_yaml_cfg) return make_rawr_data_fetcher(group_by_zoom, max_z, storage, layers, indexes_cfg, label_placement_layers)