def execute_seed_request(gwc_url, gs_user, gs_pwd, cache_data, grid='EPSG:4326', tile_format='image/png8', zoom_start=0, zoom_stop=3, threads=1, progress_check=5, exclude_layers=(), latest_is_failure=False, seed_type='seed'): """ Generate seeding xml and post it to Geoserver for each layer in a workspace. The starting zoom level defaults to 0. The progress of the tile caching will be printed to the console. In addition, a rough record of the layers cached is logged to seeding.log. :param str gwc_url: geoserver geowebcache rest url :param str gs_user: geoserver username :param str gs_pwd: geoserver password :param list cache_data: list of tuples containing the workspace name and a dictionary; the dictionary contains keys: layer_name, style_name .. seealso :: :function:'get_layer_styles' :param str grid: full name of the gridset to be used :param str tile_format: format that the tile images should be generated in :param int grid_number: integer value of the gridset :param int zoom_start: the minimum zoom level that should be cached :param int zoom_stop: the maximum zoom level that should be cached :param int threads: number of threads to be used when generating a tile cache :param float progress_check: interval in seconds between checks to GeoServer for progress on a caching job :param exclude_layers: iterable containing the names of layers from the workspace(s) that should not be cached; defaults to an empty tuple :type exclude_layers: list or tuple :return: requests objects from posting the seed requests :rtype: list """ # sparrow-flowline-reusable:51N2043963353 - 6.67 GB... need more disk quota with zoom_stop=10 # setup some basic logging db = SqliteDB() if not latest_is_failure: try: db.destroy_db() except: pass # create a new database if the last script run worked # otherwise append to the existing database db.create_db() logging.basicConfig(filename='seed_log.log', filemode='w', level=logging.INFO, format='%(asctime)s %(message)s' ) request_resps = [] job_ids_with_tiles = [] for cache_datum in cache_data: ws_name, layer_params = cache_datum layer_count = 'Total layers for {workspace_name}: {layer_count}'.format(workspace_name=ws_name, layer_count=len(layer_params) ) logging.info(layer_count) for layer_param in layer_params: layer_name = layer_param['layer_name'] started = 'Started - {workspace}:{layer}'.format(workspace=ws_name, layer=layer_name) logging.info(started) if layer_name in exclude_layers: finished = 'Did not cache - {workspace}:{layer}'.format(workspace=ws_name, layer=layer_name) seed_request = finished print(datetime.datetime.now()) print(finished) else: style_name = layer_param['style_name'] sp_gwc = GeoWebCacheSetUp(gwc_url, gs_user, gs_pwd, ws_name, layer_name, cert_verify=False ) # deal with overlay layers that do not have image/png8 as a caching option if ws_name in OVERLAY_WORKSPACES: tiling_config = sp_gwc.get_tile_cache_config() config_content = tiling_config[1] mime_formats = config_content['GeoServerLayer'].get('mimeFormats', []) # check if image/png8 is one of the mimeFormats # if it is not, add it if tile_format not in mime_formats: mime_formats.append(tile_format) updated_cache_config_xml = sp_gwc.create_disable_enable_cache_xml(format_list=tuple(mime_formats), style=style_name, gridset_name=grid ) update_cache_config = sp_gwc.disable_or_enable_cache(payload=updated_cache_config_xml) update_config_message = 'Updated layer parmeters for {workspace}:{layer} - {status_code}'.format(workspace=ws_name, layer=layer_name, status_code=update_cache_config.status_code ) print(update_config_message) logging.info(update_config_message) seed_xml = sp_gwc.create_seed_xml(style=None, tile_format=tile_format, gridset_id=grid, zoom_start=zoom_start, zoom_stop=zoom_stop, threads=threads, seed_type=seed_type ) seed_request_attempts = 0 while seed_request_attempts < 4: try: seed_request = sp_gwc.seed_request(seed_xml) seed_request_attempts = 0 break except ConnectionError: seed_request_attempts += 1 seed_request_connection_error = 'Encountered a connection error ({0}).'.format(seed_request_attempts) print(seed_request_connection_error) logging.info(seed_request_connection_error) if seed_request_attempts == 3: abort_message = 'Encountered a connection error {0} times. Aborting script.'.format(seed_request_attempts) print(abort_message) logging.info(abort_message) raise SuccessiveConnectionError time.sleep(progress_check*2) url_message = 'Request URL: {0}'.format(seed_request.url) status_code_message = 'Status: {0}'.format(seed_request.status_code) print(url_message) print(status_code_message) array_length = 1 while array_length > 0: attempts = 0 while attempts < 4: try: status = sp_gwc.query_task_status() attempts = 0 # reset attempts to 0 break except ConnectionError: attempts += 1 conn_err_message = 'Encountered a connection error ({0}).'.format(attempts) print(conn_err_message) logging.info(conn_err_message) if attempts == 3: # only try a total of 4 times connection_error_message = 'Encountered a connection error {0} times. Aborting script.'.format(attempts) print(connection_error_message) logging.info(connection_error_message) raise SuccessiveConnectionError time.sleep(progress_check*2) # provide sometime for the server to respond print(datetime.datetime.now()) status_message = '{workspace}:{layer} - {progress}'.format(workspace=ws_name, layer=layer_name, progress=status[1] ) print(status_message) long_array = status[1]['long-array-array'] try: thread0 = long_array[0] tile_count = thread0[1] job_id = thread0[3] job_tile_count = (job_id, tile_count) if job_tile_count not in job_ids_with_tiles: job_ids_with_tiles.append(job_tile_count) except IndexError: pass array_length = len(long_array) time.sleep(progress_check) finished = 'Finished - {workspace}:{layer}'.format(workspace=ws_name, layer=layer_name) complete_dt = str(datetime.datetime.now()) # keep track of the layers that have already been cached # save time by checking this database when re-running after a failure db.insert_data(workspace=ws_name, layer=layer_name, complete_datetime=complete_dt) logging.info(finished) request_resps.append(seed_request) tile_counts = [] for job_tile_tuple in job_ids_with_tiles: tile_count = float(job_tile_tuple[1]) tile_counts.append(tile_count) print(tile_counts) tile_sum = sum(tile_counts) print('Total tiles: {0}'.format(tile_sum)) return request_resps
def tile_cache(self, zoom_start=0, zoom_end=12, threads=2, check_interval=10, layer_data=AFINCH_LAYERS, seed_type='seed', gridset_id='EPSG:900913'): """ Execute the tile cache for the specified workspaces. :param int zoom_start: starting zoom level :param int zoom_end: ending zoom level :param int threads: number of threads to be used for tile caching :param float check_interval: frequency in seconds to check on tile caching progress for a layer :param layer_data: GLRI AFINCH layer parameters :type layer_data: list of named tuples :param str seed_type: specify 'seed', 'reseed', or 'truncate' :param str gridset_id: projection that should be cached; this should be the same as the map project used in the app :return: urls and status codes of posted seed requests :rtype: list """ seed_requests = [] for layer_datum in layer_data: tile_cache = layer_datum.tile_cache if tile_cache: workspace_name = layer_datum.workspace layer_name = layer_datum.lyr_name layer_styles = layer_datum.styles defined_cache_style = layer_datum.cache_style if defined_cache_style == layer_styles[0]: cache_style = None elif defined_cache_style is not None: cache_style = defined_cache_style else: cache_style = layer_datum.styles[0] gwc = GeoWebCacheSetUp(self.gwc_host, self.gs_user, self.gs_pwd, workspace_name, layer_name) seed_xml = gwc.create_seed_xml(cache_style, zoom_start=zoom_start, zoom_stop=zoom_end, threads=threads, gridset_id=gridset_id, seed_type=seed_type) seed = gwc.seed_request(seed_xml) seed_url = seed.url seed_status = seed.status_code seed_message = ('Posted seed xml to {0} ' 'with a {1} status code.').format( seed_url, seed_status) print(seed_message) progress_message = 'Caching progress on {0}:'.format( layer_name) print(progress_message) array_length = 1 while array_length > 0: progress = gwc.query_task_status() progress_str = '{0}: {1} - {2}'.format( datetime.datetime.now(), layer_name, progress[1]) print(progress_str) long_array = progress[1]['long-array-array'] array_length = len(long_array) time.sleep(check_interval) seed_requests.append((seed_url, seed_status)) return seed_requests
from py_geoserver_rest_requests import GeoWebCacheSetUp from params import USER, PWD if __name__ == '__main__': """ Run from the command line to terminate all seeding tasks. """ parser = argparse.ArgumentParser() parser.add_argument('--server_name', type=str) args = parser.parse_args() try: server_name = args.server_name.lower() except AttributeError: server_name = None if server_name: server_name = server_name else: #server_name = 'http://cida-eros-sparrowdev.er.usgs.gov:8081/sparrowgeoserver' server_name = 'http://cidasdpdaspwgeo:8080/sparrowgeoserver' USER = USER PWD = PWD gwc_url = '{server_name}/gwc/rest'.format(server_name=server_name) sp_gwc = GeoWebCacheSetUp(gwc_url, USER, PWD, None, None) # abort all seeding tasks abort = sp_gwc.abort_seed_request(kill_all=True) print(abort.status_code)
''' import argparse from py_geoserver_rest_requests import GeoWebCacheSetUp from params import USER, PWD if __name__ == '__main__': """ Run from the command line to terminate all seeding tasks. """ parser = argparse.ArgumentParser() parser.add_argument('--server_name', type=str) args = parser.parse_args() try: server_name = args.server_name.lower() except AttributeError: server_name = None if server_name: server_name = server_name else: #server_name = 'http://cida-eros-sparrowdev.er.usgs.gov:8081/sparrowgeoserver' server_name = 'http://cidasdpdaspwgeo:8080/sparrowgeoserver' USER = USER PWD = PWD gwc_url = '{server_name}/gwc/rest'.format(server_name=server_name) sp_gwc = GeoWebCacheSetUp(gwc_url, USER, PWD, None, None) # abort all seeding tasks abort = sp_gwc.abort_seed_request(kill_all=True) print(abort.status_code)