def _make_json_tiles( self, coord, post_process_data={}, db_features=[], cut_coords=[], buffer_cfg={}): from tilequeue.process import process_coord from tilequeue.tile import coord_to_mercator_bounds from tilequeue.format import json_format unpadded_bounds = coord_to_mercator_bounds(coord) feature_layers = [dict( layer_datum=dict( name='fake_layer', geometry_types=['Point'], transform_fn_names=[], sort_fn_name=None, is_clipped=False ), padded_bounds=dict(point=unpadded_bounds), features=db_features )] formats = [json_format] def _test_output_fn(*args): return dict(foo='bar', min_zoom=0) output_calc_mapping = dict(fake_layer=_test_output_fn) all_coords = [coord] + cut_coords tiles, extra = process_coord( coord, coord.zoom, feature_layers, post_process_data, formats, unpadded_bounds, all_coords, buffer_cfg, output_calc_mapping) return tiles
def _make_json_tiles( self, coord, post_process_data={}, db_features=[], cut_coords=[], buffer_cfg={}): from tilequeue.process import process_coord from tilequeue.tile import coord_to_mercator_bounds from tilequeue.format import json_format unpadded_bounds = coord_to_mercator_bounds(coord) feature_layers = [dict( layer_datum=dict( name='fake_layer', geometry_types=['Point'], transform_fn_names=[], sort_fn_name=None, is_clipped=False ), padded_bounds=dict(point=unpadded_bounds), features=db_features )] formats = [json_format] tiles, extra = process_coord( coord, coord.zoom, feature_layers, post_process_data, formats, unpadded_bounds, cut_coords, buffer_cfg) return tiles
def __call__(self, stop): # ignore ctrl-c interrupts when run from terminal signal.signal(signal.SIGINT, signal.SIG_IGN) output = OutputQueue(self.output_queue, self.tile_proc_logger, stop) saw_sentinel = False while not stop.is_set(): try: data = self.input_queue.get(timeout=timeout_seconds) except Queue.Empty: continue if data is None: saw_sentinel = True break coord = data['coord'] unpadded_bounds = data['unpadded_bounds'] cut_coords = data['cut_coords'] nominal_zoom = data['nominal_zoom'] source_rows = data['source_rows'] start = time.time() try: feature_layers = convert_source_data_to_feature_layers( source_rows, self.layer_data, unpadded_bounds, nominal_zoom) formatted_tiles, extra_data = process_coord( coord, nominal_zoom, feature_layers, self.post_process_data, self.formats, unpadded_bounds, cut_coords, self.buffer_cfg, self.output_calc_mapping) except Exception as e: stacktrace = format_stacktrace_one_line() self.tile_proc_logger.error( 'Processing error', e, stacktrace, coord) self.stats_handler.proc_error() continue metadata = data['metadata'] metadata['timing']['process'] = convert_seconds_to_millis( time.time() - start) metadata['layers'] = extra_data data = dict( metadata=metadata, coord=coord, formatted_tiles=formatted_tiles, ) if output(coord, data): break if not saw_sentinel: _force_empty_queue(self.input_queue) self.tile_proc_logger.lifecycle('processor stopped')
def __call__(self, stop): # ignore ctrl-c interrupts when run from terminal signal.signal(signal.SIGINT, signal.SIG_IGN) saw_sentinel = False while not stop.is_set(): try: data = self.input_queue.get(timeout=timeout_seconds) except Queue.Empty: continue if data is None: saw_sentinel = True break coord = data['coord'] feature_layers = data['feature_layers'] unpadded_bounds = data['unpadded_bounds'] cut_coords = data['cut_coords'] nominal_zoom = data['nominal_zoom'] start = time.time() try: formatted_tiles, extra_data = process_coord( coord, nominal_zoom, feature_layers, self.post_process_data, self.formats, unpadded_bounds, cut_coords, self.buffer_cfg) except: stacktrace = format_stacktrace_one_line() self.logger.error('Error processing: %s - %s' % (serialize_coord(coord), stacktrace)) continue metadata = data['metadata'] metadata['timing']['process_seconds'] = time.time() - start metadata['layers'] = extra_data data = dict( metadata=metadata, coord=coord, formatted_tiles=formatted_tiles, ) while not _non_blocking_put(self.output_queue, data): if stop.is_set(): break if not saw_sentinel: _force_empty_queue(self.input_queue) self.logger.debug('processor stopped')
def __call__(self, stop): # ignore ctrl-c interrupts when run from terminal signal.signal(signal.SIGINT, signal.SIG_IGN) saw_sentinel = False while not stop.is_set(): try: data = self.input_queue.get(timeout=timeout_seconds) except Queue.Empty: continue if data is None: saw_sentinel = True break coord = data['coord'] feature_layers = data['feature_layers'] unpadded_bounds = data['unpadded_bounds'] cut_coords = data['cut_coords'] start = time.time() try: formatted_tiles, extra_data = process_coord( coord, feature_layers, self.post_process_data, self.formats, unpadded_bounds, cut_coords, self.layers_to_format, self.buffer_cfg) except: stacktrace = format_stacktrace_one_line() self.logger.error('Error processing: %s - %s' % ( serialize_coord(coord), stacktrace)) continue metadata = data['metadata'] metadata['timing']['process_seconds'] = time.time() - start metadata['layers'] = extra_data data = dict( metadata=metadata, coord=coord, formatted_tiles=formatted_tiles, ) while not _non_blocking_put(self.output_queue, data): if stop.is_set(): break if not saw_sentinel: _force_empty_queue(self.input_queue) self.logger.debug('processor stopped')
def test_process_coord_empty(self): from tilequeue.process import process_coord from tilequeue.tile import coord_to_mercator_bounds coord = Coordinate(0, 0, 0) feature_layers = [] post_process_data = {} formats = [] unpadded_bounds = coord_to_mercator_bounds(coord) cut_coords = [] buffer_cfg = {} tiles, extra = process_coord( coord, coord.zoom, feature_layers, post_process_data, formats, unpadded_bounds, cut_coords, buffer_cfg) self.assertEqual([], tiles) self.assertEqual({'size': {}}, extra)
def _make_tiles(self, shape, coord, metatile_zoom): from tilequeue.format import mvt_format from tilequeue.process import process_coord from tilequeue.tile import coord_children_range from tilequeue.tile import coord_to_mercator_bounds db_features = [dict( __id__=1, __geometry__=shape.wkb, __properties__={}, )] nominal_zoom = coord.zoom + metatile_zoom unpadded_bounds = coord_to_mercator_bounds(coord) feature_layers = [dict( layer_datum=dict( name='fake_layer', geometry_types=[shape.geom_type], transform_fn_names=[], sort_fn_name=None, is_clipped=False ), padded_bounds={shape.geom_type.lower(): unpadded_bounds}, features=db_features )] formats = [mvt_format] post_process_data = {} buffer_cfg = {} cut_coords = [coord] if nominal_zoom > coord.zoom: cut_coords.extend(coord_children_range(coord, nominal_zoom)) def _output_fn(shape, props, fid, meta): return dict(fake='data', min_zoom=0) output_calc_mapping = dict(fake_layer=_output_fn) tiles, extra = process_coord( coord, nominal_zoom, feature_layers, post_process_data, formats, unpadded_bounds, cut_coords, buffer_cfg, output_calc_mapping) self.assertEqual(len(cut_coords), len(tiles)) return tiles, cut_coords
def test_cut_coord_exclusive(self): # test that cut coords are the only ones in the response, and that # the coordinate itself can be omitted. from tilequeue.process import process_coord from tilequeue.tile import coord_to_mercator_bounds from tilequeue.format import json_format coord = Coordinate(0, 0, 0) db_features = [] cut_coords = [ Coordinate(zoom=1, column=0, row=0), Coordinate(zoom=1, column=1, row=0), Coordinate(zoom=1, column=0, row=1), ] buffer_cfg = {} post_process_data = {} unpadded_bounds = coord_to_mercator_bounds(coord) feature_layers = [dict( layer_datum=dict( name='fake_layer', geometry_types=['Point'], transform_fn_names=[], sort_fn_name=None, is_clipped=False ), padded_bounds=dict(point=unpadded_bounds), features=db_features )] formats = [json_format] def _test_output_fn(*args): return dict(foo='bar', min_zoom=0) output_calc_mapping = dict(fake_layer=_test_output_fn) tiles, extra = process_coord( coord, coord.zoom, feature_layers, post_process_data, formats, unpadded_bounds, cut_coords, buffer_cfg, output_calc_mapping) self.assertEqual(len(cut_coords), len(tiles)) self.assertNotIn(coord, [t['coord'] for t in tiles])
def test_process_coord_empty(self): from tilequeue.process import process_coord from tilequeue.tile import coord_to_mercator_bounds coord = Coordinate(0, 0, 0) feature_layers = [] post_process_data = {} formats = [] unpadded_bounds = coord_to_mercator_bounds(coord) cut_coords = [coord] buffer_cfg = {} def _test_output_fn(*args): return dict(foo='bar') output_calc_mapping = dict(fake_layer=_test_output_fn) tiles, extra = process_coord( coord, coord.zoom, feature_layers, post_process_data, formats, unpadded_bounds, cut_coords, buffer_cfg, output_calc_mapping) self.assertEqual([], tiles) self.assertEqual({'size': {}}, extra)
def handle_request(self, request): if (self.health_checker and self.health_checker.is_health_check(request)): return self.health_checker(request) request_data = parse_request_path(request.path, self.extensions) if request_data is None: return self.generate_404() layer_spec = request_data.layer_spec layer_data = parse_layer_spec(request_data.layer_spec, self.layer_config) if layer_data is None: return self.generate_404() coord = request_data.coord format = request_data.format if self.store and coord.zoom <= 20: # we either have a dynamic layer request, or it's a # request for a new tile that is not currently in the # tiles of interest, or it's for a request that's in the # tiles of interest that hasn't been generated, possibly # because a new prefix is used and all tiles haven't been # generated yet before making the switch # in any case, it makes sense to try and fetch the json # format from the store first tile_data = self.store.read_tile(coord, json_format, 'all') if tile_data is not None: # the json format exists in the store # we'll use it to generate the response tile_data = reformat_selected_layers(tile_data, layer_data, coord, format) if layer_spec == 'all': # for the all layer, since the json format # existed, we should also save the requested # format too to allow the caches to serve it # directly in subsequent requests # we'll guard against re-saving json onto itself # though, which may be possible through a race # condition if format != json_format: self.io_pool.apply_async( async_store, (self.store, tile_data, coord, format, 'all')) # additionally, we'll want to enqueue the tile # onto sqs to ensure that the other formats get # processed too. if self.sqs_queue: self.io_pool.apply_async( async_enqueue, (self.sqs_queue, coord,)) return self.create_response(request, tile_data, format) # update the tiles of interest set with the coordinate if self.redis_cache_index: self.io_pool.apply_async(async_update_tiles_of_interest, (self.redis_cache_index, coord)) # fetch data for all layers, even if the request was for a partial set. # this ensures that we can always store the result, allowing for reuse, # but also that any post-processing functions which might have # dependencies on multiple layers will still work properly (e.g: # buildings or roads layer being cut against landuse). feature_data_all = self.data_fetcher(coord, self.layer_config.all_layers) formatted_tiles_all = process_coord( coord, feature_data_all['feature_layers'], self.post_process_data, [json_format], feature_data_all['unpadded_bounds'], feature_data_all['padded_bounds'], [], []) assert len(formatted_tiles_all) == 1, \ 'unexpected number of tiles: %d' % len(formatted_tiles_all) formatted_tile_all = formatted_tiles_all[0] tile_data_all = formatted_tile_all['tile'] # store tile with data for all layers to the cache, so that we can read # it all back for the dynamic layer request above. if self.store and coord.zoom <= 20: self.io_pool.apply_async( async_store, (self.store, tile_data_all, coord, json_format, 'all')) # enqueue the coordinate to ensure other formats get processed if self.sqs_queue and coord.zoom <= 20: self.io_pool.apply_async( async_enqueue, (self.sqs_queue, coord,)) if layer_spec == 'all': if format == json_format: # already done all the work, just need to return the tile to # the client. tile_data = tile_data_all else: # just need to format the data differently tile_data = reformat_selected_layers( tile_data_all, self.layer_config.all_layers, coord, format) # note that we want to store the formatted data too, # as this means that future requests can be serviced # directly from the store. if self.store and coord.zoom <= 20: self.io_pool.apply_async( async_store, ( self.store, tile_data, coord, format, 'all')) else: # select the data that the user actually asked for from the JSON/all # tile that we just created. tile_data = reformat_selected_layers( tile_data_all, layer_data, coord, format) response = self.create_response(request, tile_data, format) return response
def handle_request(self, request): if (self.health_checker and self.health_checker.is_health_check(request)): return self.health_checker(request) request_data = parse_request_path(request.path, self.extensions) if request_data is None: return self.generate_404(request) layer_spec = request_data.layer_spec layer_data = parse_layer_spec(request_data.layer_spec, self.layer_config) if layer_data is None: return self.generate_404(request) coord = request_data.coord format = request_data.format if self.store and coord.zoom <= 20: # we either have a dynamic layer request, or it's a # request for a new tile that is not currently in the # tiles of interest, or it's for a request that's in the # tiles of interest that hasn't been generated, possibly # because a new prefix is used and all tiles haven't been # generated yet before making the switch # in any case, it makes sense to try and fetch the json # format from the store first tile_data = self.store.read_tile(coord, json_format, 'all') if tile_data is not None: # the json format exists in the store # we'll use it to generate the response tile_data = reformat_selected_layers( tile_data, layer_data, coord, format, self.buffer_cfg) if layer_spec == 'all': # for the all layer, since the json format # existed, we should also save the requested # format too to allow the caches to serve it # directly in subsequent requests # we'll guard against re-saving json onto itself # though, which may be possible through a race # condition if format != json_format: self.io_pool.apply_async( async_store, (self.store, tile_data, coord, format, 'all')) # additionally, we'll want to enqueue the tile # onto sqs to ensure that the other formats get # processed too. if self.sqs_queue: self.io_pool.apply_async( async_enqueue, (self.sqs_queue, coord,)) return self.create_response( request, 200, tile_data, format.mimetype) # update the tiles of interest set with the coordinate if self.redis_cache_index: self.io_pool.apply_async(async_update_tiles_of_interest, (self.redis_cache_index, coord)) # fetch data for all layers, even if the request was for a partial set. # this ensures that we can always store the result, allowing for reuse, # but also that any post-processing functions which might have # dependencies on multiple layers will still work properly (e.g: # buildings or roads layer being cut against landuse). feature_data_all = self.data_fetcher( coord, self.layer_config.all_layers) formatted_tiles_all, extra_data = process_coord( coord, feature_data_all['feature_layers'], self.post_process_data, [json_format], feature_data_all['unpadded_bounds'], [], [], self.buffer_cfg) assert len(formatted_tiles_all) == 1, \ 'unexpected number of tiles: %d' % len(formatted_tiles_all) formatted_tile_all = formatted_tiles_all[0] tile_data_all = formatted_tile_all['tile'] # store tile with data for all layers to the cache, so that we can read # it all back for the dynamic layer request above. if self.store and coord.zoom <= 20: self.io_pool.apply_async( async_store, (self.store, tile_data_all, coord, json_format, 'all')) # enqueue the coordinate to ensure other formats get processed if self.sqs_queue and coord.zoom <= 20: self.io_pool.apply_async( async_enqueue, (self.sqs_queue, coord,)) if layer_spec == 'all': if format == json_format: # already done all the work, just need to return the tile to # the client. tile_data = tile_data_all else: # just need to format the data differently tile_data = reformat_selected_layers( tile_data_all, self.layer_config.all_layers, coord, format, self.buffer_cfg) # note that we want to store the formatted data too, # as this means that future requests can be serviced # directly from the store. if self.store and coord.zoom <= 20: self.io_pool.apply_async( async_store, ( self.store, tile_data, coord, format, 'all')) else: # select the data that the user actually asked for from the JSON/all # tile that we just created. tile_data = reformat_selected_layers( tile_data_all, layer_data, coord, format, self.buffer_cfg) response = self.create_response( request, 200, tile_data, format.mimetype) return response
def handle_request(self, request): if (self.health_checker and self.health_checker.is_health_check(request)): return self.health_checker(request) request_data = parse_request_path(request.path, self.extensions) if request_data is None: return self.generate_404(request) layer_spec = request_data.layer_spec layer_data = parse_layer_spec(request_data.layer_spec, self.layer_config) if layer_data is None: return self.generate_404(request) coord = request_data.coord format = request_data.format tile_data = self.reformat_from_stored_json(request_data, layer_data) if tile_data is not None: return self.create_response( request, 200, tile_data, format.mimetype) # update the tiles of interest set with the coordinate if self.redis_cache_index: self.io_pool.apply_async(async_update_tiles_of_interest, (self.redis_cache_index, coord)) if self.using_metatiles(): # make all formats when making metatiles wanted_formats = self.formats else: wanted_formats = [json_format] # add the request format, so that it gets created by the tile # render process and will be saved along with the JSON format. if format != json_format: wanted_formats.append(format) # fetch data for all layers, even if the request was for a partial set. # this ensures that we can always store the result, allowing for reuse, # but also that any post-processing functions which might have # dependencies on multiple layers will still work properly (e.g: # buildings or roads layer being cut against landuse). feature_data_all = self.data_fetcher( coord, self.layer_config.all_layers) formatted_tiles_all, extra_data = process_coord( coord, feature_data_all['feature_layers'], self.post_process_data, wanted_formats, feature_data_all['unpadded_bounds'], [], [], self.buffer_cfg) assert len(formatted_tiles_all) == len(wanted_formats), \ 'unexpected number of tiles: %d, wanted %d' \ % (len(formatted_tiles_all), len(wanted_formats)) # store tile with data for all layers to the cache, so that we can read # it all back for the dynamic layer request above. self.store_tile(coord, wanted_formats, formatted_tiles_all) # enqueue the coordinate to ensure other formats get processed if self.sqs_queue and coord.zoom <= 20: self.io_pool.apply_async( async_enqueue, (self.sqs_queue, coord,)) if layer_spec == 'all': tile_data = self.extract_tile_data(format, formatted_tiles_all) else: # select the data that the user actually asked for from the # JSON/all tile that we just created. json_data_all = self.extract_tile_data( json_format, formatted_tiles_all) tile_data = reformat_selected_layers( json_data_all, layer_data, coord, format, self.buffer_cfg) response = self.create_response( request, 200, tile_data, format.mimetype) return response
def handle_request(self, request): if (self.health_checker and self.health_checker.is_health_check(request)): return self.health_checker(request) request_data = parse_request_path(request.path) if request_data is None: return self.generate_404() layer_spec = request_data.layer_spec layer_data = parse_layer_spec(request_data.layer_spec, self.layer_config) if layer_data is None: return self.generate_404() coord = request_data.coord format = request_data.format if self.store and layer_spec != 'all' and coord.zoom <= 20: # we have a dynamic layer request # in this case, we should try to fetch the data from the # cache, and if present, prune the layers that aren't # necessary from there. tile_data = self.store.read_tile(coord, json_format) if tile_data is not None: # we were able to fetch the cached data # we'll need to decode it into the expected # feature_layers shape, prune the layers that aren't # needed, and then format the data feature_layers = decode_json_tile_for_layers( tile_data, layer_data) bounds_merc = coord_to_mercator_bounds(coord) bounds_wgs84 = ( mercator_point_to_wgs84(bounds_merc[:2]) + mercator_point_to_wgs84(bounds_merc[2:4])) tile_data_file = StringIO() format.format_tile(tile_data_file, feature_layers, coord, bounds_merc, bounds_wgs84) tile_data = tile_data_file.getvalue() response = self.create_response(request, tile_data, format) return response feature_data = self.data_fetcher(coord, layer_data) formatted_tiles = process_coord( coord, feature_data['feature_layers'], self.post_process_data, [format], feature_data['unpadded_bounds'], feature_data['padded_bounds'], []) assert len(formatted_tiles) == 1, \ 'unexpected number of tiles: %d' % len(formatted_tiles) formatted_tile = formatted_tiles[0] tile_data = formatted_tile['tile'] # we only want to store requests for the all layer if self.store and layer_spec == 'all' and coord.zoom <= 20: self.io_pool.apply_async( async_store, (self.store, tile_data, coord, format)) # update the tiles of interest set with the new coordinate if self.redis_cache_index: self.io_pool.apply_async(async_update_tiles_of_interest, (self.redis_cache_index, coord)) response = self.create_response(request, tile_data, format) return response
def handle_request(self, request): if (self.health_checker and self.health_checker.is_health_check(request)): return self.health_checker(request) request_data = parse_request_path(request.path) if request_data is None: return self.generate_404() layer_spec = request_data.layer_spec layer_data = parse_layer_spec(request_data.layer_spec, self.layer_config) if layer_data is None: return self.generate_404() coord = request_data.coord format = request_data.format if self.store and layer_spec != 'all' and coord.zoom <= 20: # we have a dynamic layer request # in this case, we should try to fetch the data from the # cache, and if present, prune the layers that aren't # necessary from there. tile_data = self.store.read_tile(coord, json_format) if tile_data is not None: # we were able to fetch the cached data # we'll need to decode it into the expected # feature_layers shape, prune the layers that aren't # needed, and then format the data feature_layers = decode_json_tile_for_layers( tile_data, layer_data) bounds_merc = coord_to_mercator_bounds(coord) bounds_wgs84 = (mercator_point_to_wgs84(bounds_merc[:2]) + mercator_point_to_wgs84(bounds_merc[2:4])) tile_data_file = StringIO() format.format_tile(tile_data_file, feature_layers, coord, bounds_merc, bounds_wgs84) tile_data = tile_data_file.getvalue() response = self.create_response(request, tile_data, format) return response feature_data = self.data_fetcher(coord, layer_data) formatted_tiles = process_coord(coord, feature_data['feature_layers'], self.post_process_data, [format], feature_data['unpadded_bounds'], feature_data['padded_bounds'], []) assert len(formatted_tiles) == 1, \ 'unexpected number of tiles: %d' % len(formatted_tiles) formatted_tile = formatted_tiles[0] tile_data = formatted_tile['tile'] # we only want to store requests for the all layer if self.store and layer_spec == 'all' and coord.zoom <= 20: self.io_pool.apply_async(async_store, (self.store, tile_data, coord, format)) # update the tiles of interest set with the new coordinate if self.redis_cache_index: self.io_pool.apply_async(async_update_tiles_of_interest, (self.redis_cache_index, coord)) response = self.create_response(request, tile_data, format) return response