def _handle(self, browse_layer_id, browse_type): from ngeo_browse_server.control.queries import remove_browse # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get( id=browse_layer_id) except BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist" % browse_layer_id) raise CommandError("Browse layer '%s' does not exist" % browse_layer_id) else: try: browse_layer_model = BrowseLayer.objects.get( browse_type=browse_type) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type '%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type '%s' does " "not exist" % browse_type) # get all browses of browse layer browses_qs = Browse.objects.all().filter( browse_layer=browse_layer_model) paths_to_delete = [] seed_areas = [] with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): logger.info("Deleting '%d' browse%s from database." % (browses_qs.count(), "s" if browses_qs.count() > 1 else "")) # go through all browses to be deleted for browse_model in browses_qs: _, filename = remove_browse(browse_model, browse_layer_model, browse_model.coverage_id, seed_areas, unseed=False) paths_to_delete.append(filename) # loop through optimized browse images and delete them # This is done at this point to make sure a rollback is possible # if there is an error while deleting the browses and coverages for file_path in paths_to_delete: if exists(file_path): remove(file_path) logger.info("Optimized browse image deleted: %s" % file_path) else: logger.warning( "Optimized browse image to be deleted not found " "in path: %s" % file_path) delete_browse_layer(browse_layer_model, purge=True)
def _handle(self, browse_layer_id, browse_type): from ngeo_browse_server.control.queries import remove_browse # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id) except BrowseLayer.DoesNotExist: logger.error( "Browse layer '%s' does not exist" % browse_layer_id ) raise CommandError( "Browse layer '%s' does not exist" % browse_layer_id ) else: try: browse_layer_model = BrowseLayer.objects.get( browse_type=browse_type ) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type '%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type '%s' does " "not exist" % browse_type) # get all browses of browse layer browses_qs = Browse.objects.all().filter(browse_layer=browse_layer_model) paths_to_delete = [] seed_areas = [] with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): logger.info("Deleting '%d' browse%s from database." % (browses_qs.count(), "s" if browses_qs.count() > 1 else "")) # go through all browses to be deleted for browse_model in browses_qs: _, filename = remove_browse( browse_model, browse_layer_model, browse_model.coverage_id, seed_areas, unseed=False ) paths_to_delete.append(filename) # loop through optimized browse images and delete them # This is done at this point to make sure a rollback is possible # if there is an error while deleting the browses and coverages for file_path in paths_to_delete: if exists(file_path): remove(file_path) logger.info("Optimized browse image deleted: %s" % file_path) else: logger.warning("Optimized browse image to be deleted not found " "in path: %s" % file_path) delete_browse_layer(browse_layer_model, purge=True)
def _handle(self, start, end, browse_layer_id, browse_type): from ngeo_browse_server.control.queries import remove_browse # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get( id=browse_layer_id ) except BrowseLayer.DoesNotExist: logger.error( "Browse layer '%s' does not exist" % browse_layer_id ) raise CommandError( "Browse layer '%s' does not exist" % browse_layer_id ) else: try: browse_layer_model = BrowseLayer.objects.get( browse_type=browse_type ) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type'%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type'%s' does " "not exist" % browse_type) # get all browses of browse layer browses_qs = Browse.objects.all().filter( browse_layer=browse_layer_model ) # apply start/end filter if start and not end: browses_qs = browses_qs.filter(start_time__gte=start) elif end and not start: browses_qs = browses_qs.filter(end_time__lte=end) elif start and end: browses_qs = browses_qs.filter( start_time__gte=start, end_time__lte=end ) paths_to_delete = [] seed_areas = [] with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): logger.info("Deleting '%d' browse%s from database." % (browses_qs.count(), "s" if browses_qs.count() > 1 else "")) # go through all browses to be deleted for browse_model in browses_qs: _, filename = remove_browse( browse_model, browse_layer_model, browse_model.coverage_id, seed_areas ) paths_to_delete.append(filename) # loop through optimized browse images and delete them # This is done at this point to make sure a rollback is possible # if there is an error while deleting the browses and coverages for file_path in paths_to_delete: if exists(file_path): remove(file_path) logger.info("Optimized browse image deleted: %s" % file_path) else: logger.warning("Optimized browse image to be deleted not found" " in path: %s" % file_path) # only if either start or end is present browses are left if start or end: if start: if end: seed_areas = [ area for area in seed_areas if not (area[4] >= start and area[5] <= end) ] else: seed_areas = [ area for area in seed_areas if not (area[4] >= start) ] else: seed_areas = [ area for area in seed_areas if not (area[5] <= end) ] for minx, miny, maxx, maxy, start_time, end_time in seed_areas: try: # seed MapCache synchronously # TODO: maybe replace this with an async solution seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=minx, miny=miny, maxx=maxx, maxy=maxy, minzoom=browse_layer_model.lowest_map_level, maxzoom=browse_layer_model.highest_map_level, start_time=start_time, end_time=end_time, delete=False, **get_mapcache_seed_config()) logger.info("Successfully finished seeding.") except Exception, e: logger.warn("Seeding failed: %s" % str(e))
if strategy == "merge" and timedelta < threshold: if previous_time > current_time: # TODO: raise exception? pass rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": existing_browse_model.coverage_id} ) merge_footprint = rect_ds.getFootprint() merge_with = rect_ds.getData().getLocation().getPath() replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) _, _ = remove_browse(existing_browse_model, browse_layer, coverage_id, seed_areas, config=config) replaced = False logger.debug("Existing browse found, merging it.") else: # perform replacement replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) replaced_extent, replaced_filename = remove_browse( existing_browse_model, browse_layer, coverage_id, seed_areas, config=config ) replaced = True logger.info("Existing browse found, replacing it.") else: # A browse with that identifier does not exist, so just create a new one
def _handle(self, start, end, browse_layer_id, browse_type): from ngeo_browse_server.control.queries import remove_browse # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get( id=browse_layer_id) except BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist" % browse_layer_id) raise CommandError("Browse layer '%s' does not exist" % browse_layer_id) else: try: browse_layer_model = BrowseLayer.objects.get( browse_type=browse_type) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type'%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type'%s' does " "not exist" % browse_type) # get all browses of browse layer browses_qs = Browse.objects.all().filter( browse_layer=browse_layer_model) # apply start/end filter if start and not end: browses_qs = browses_qs.filter(start_time__gte=start) elif end and not start: browses_qs = browses_qs.filter(end_time__lte=end) elif start and end: browses_qs = browses_qs.filter(start_time__gte=start, end_time__lte=end) paths_to_delete = [] seed_areas = [] with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): logger.info("Deleting '%d' browse%s from database." % (browses_qs.count(), "s" if browses_qs.count() > 1 else "")) # go through all browses to be deleted for browse_model in browses_qs: _, filename = remove_browse(browse_model, browse_layer_model, browse_model.coverage_id, seed_areas) paths_to_delete.append(filename) # loop through optimized browse images and delete them # This is done at this point to make sure a rollback is possible # if there is an error while deleting the browses and coverages for file_path in paths_to_delete: if exists(file_path): remove(file_path) logger.info("Optimized browse image deleted: %s" % file_path) else: logger.warning("Optimized browse image to be deleted not found" " in path: %s" % file_path) # only if either start or end is present browses are left if start or end: if start: if end: seed_areas = [ area for area in seed_areas if not (area[4] >= start and area[5] <= end) ] else: seed_areas = [ area for area in seed_areas if not (area[4] >= start) ] else: seed_areas = [ area for area in seed_areas if not (area[5] <= end) ] for minx, miny, maxx, maxy, start_time, end_time in seed_areas: try: # seed MapCache synchronously # TODO: maybe replace this with an async solution seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=minx, miny=miny, maxx=maxx, maxy=maxy, minzoom=browse_layer_model.lowest_map_level, maxzoom=browse_layer_model.highest_map_level, start_time=start_time, end_time=end_time, delete=False, **get_mapcache_seed_config()) logger.info("Successfully finished seeding.") except Exception, e: logger.warn("Seeding failed: %s" % str(e))
def import_browse(p, browse, browse_report_model, browse_layer_model, crs, seed_areas, config): filename = browse.file_name coverage_id = splitext(filename)[0] footprint_filename = coverage_id + ".wkb" logger.info("Importing browse with data file '%s' and metadata file '%s'." % (filename, footprint_filename)) replaced = False replaced_filename = None existing_browse_model = get_existing_browse(browse.browse_identifier, coverage_id, browse_layer_model.id) if existing_browse_model: logger.info("Existing browse found, replacing it.") replaced_extent, replaced_filename = remove_browse( existing_browse_model, browse_layer_model, coverage_id, seed_areas, config=config ) replaced = True else: # A browse with that identifier does not exist, so just create a new one logger.info("Creating new browse.") output_filename = get_optimized_path(filename, browse_layer_model.id, config=config) if (exists(output_filename) and ((replaced_filename and not samefile(output_filename, replaced_filename)) or not replaced_filename)): raise ImportException("Output file '%s' already exists and is not to " "be replaced." % output_filename) with FileTransaction((output_filename, replaced_filename)): if not exists(dirname(output_filename)): makedirs(dirname(output_filename)) p.extract_browse_file(filename, output_filename) # TODO: find out num bands and footprint ds = gdal.Open(output_filename) num_bands = ds.RasterCount footprint = p.get_footprint(footprint_filename) extent, time_interval = create_browse( browse, browse_report_model, browse_layer_model, coverage_id, crs, replaced, footprint, num_bands, output_filename, seed_areas, config=config ) if not replaced: return IngestBrowseResult(browse.browse_identifier, extent, time_interval) else: replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) return IngestBrowseReplaceResult(browse.browse_identifier, extent, time_interval, replaced_extent, replaced_time_interval)
if previous_time > current_time: # TODO: raise exception? pass rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": existing_browse_model.coverage_id} ) merge_footprint = rect_ds.getFootprint() merge_with = rect_ds.getData().getLocation().getPath() replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) _, _ = remove_browse( existing_browse_model, browse_layer, coverage_id, seed_areas, config=config ) replaced = False logger.debug("Existing browse found, merging it.") elif strategy == "skip" and current_time <= previous_time: logger.debug("Existing browse found and not older, skipping.") return IngestBrowseSkipResult(parsed_browse.browse_identifier) else: # perform replacement replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) replaced_extent, replaced_filename = remove_browse(
def import_browse(p, browse, browse_report_model, browse_layer_model, crs, seed_areas, config): filename = browse.file_name coverage_id = splitext(filename)[0] footprint_filename = coverage_id + ".wkb" logger.info( "Importing browse with data file '%s' and metadata file '%s'." % (filename, footprint_filename)) replaced = False replaced_filename = None existing_browse_model = get_existing_browse(browse.browse_identifier, coverage_id, browse_layer_model.id) if existing_browse_model: logger.info("Existing browse found, replacing it.") replaced_extent, replaced_filename = remove_browse( existing_browse_model, browse_layer_model, coverage_id, seed_areas, config=config) replaced = True else: # A browse with that identifier does not exist, so just create a new one logger.info("Creating new browse.") output_filename = get_optimized_path(filename, browse_layer_model.id, config=config) if (exists(output_filename) and ((replaced_filename and not samefile(output_filename, replaced_filename)) or not replaced_filename)): raise ImportException("Output file '%s' already exists and is not to " "be replaced." % output_filename) with FileTransaction((output_filename, replaced_filename)): if not exists(dirname(output_filename)): makedirs(dirname(output_filename)) p.extract_browse_file(filename, output_filename) # TODO: find out num bands and footprint ds = gdal.Open(output_filename) num_bands = ds.RasterCount footprint = p.get_footprint(footprint_filename) extent, time_interval = create_browse(browse, browse_report_model, browse_layer_model, coverage_id, crs, replaced, footprint, num_bands, output_filename, seed_areas, config=config) if not replaced: return IngestBrowseResult(browse.browse_identifier, extent, time_interval) else: replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) return IngestBrowseReplaceResult(browse.browse_identifier, extent, time_interval, replaced_extent, replaced_time_interval)