def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None): """ Ingests a browse report. reraise_exceptions if errors shall be handled externally """ # initialize the EOxServer system/registry/configuration System.init() try: # get the according browse layer browse_type = parsed_browse_report.browse_type browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type) except models.BrowseLayer.DoesNotExist: logger.warn("Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type) raise IngestionException("Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type) # generate a browse report model browse_report = create_browse_report(parsed_browse_report, browse_layer) # initialize the preprocessor with configuration values crs = None if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible": crs = "EPSG:3857" elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad": crs = "EPSG:4326" logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid)) # create the required preprocessor/format selection format_selection = get_format_selection("GTiff", **get_format_config(config)) if do_preprocessing: # add config parameters and custom params params = get_optimization_config(config) # add radiometric interval rad_min = browse_layer.radiometric_interval_min if rad_min is not None: params["radiometric_interval_min"] = rad_min else: rad_min = "min" rad_max = browse_layer.radiometric_interval_max if rad_max is not None: params["radiometric_interval_max"] = rad_max else: rad_max = "max" # add band selection if (browse_layer.r_band is not None and browse_layer.g_band is not None and browse_layer.b_band is not None): bands = [(browse_layer.r_band, rad_min, rad_max), (browse_layer.g_band, rad_min, rad_max), (browse_layer.b_band, rad_min, rad_max)] if params["bandmode"] == RGBA: # RGBA bands.append((0, 0, 0)) params["bands"] = bands preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params) else: preprocessor = None # TODO: CopyPreprocessor report_result = IngestBrowseReportResult() succeded = [] failed = [] timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f") browse_dirname = _valid_path("%s_%s_%s_%s" % ( browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), timestamp )) success_dir = join(get_success_dir(config), browse_dirname) failure_dir = join(get_failure_dir(config), browse_dirname) if exists(success_dir): logger.warn("Success directory '%s' already exists.") else: makedirs(success_dir) if exists(failure_dir): logger.warn("Failure directory '%s' already exists.") else: makedirs(failure_dir) # iterate over all browses in the browse report for parsed_browse in parsed_browse_report: # transaction management per browse with transaction.commit_manually(): with transaction.commit_manually(using="mapcache"): try: seed_areas = [] # try ingest a single browse and log success result = ingest_browse(parsed_browse, browse_report, browse_layer, preprocessor, crs, success_dir, failure_dir, seed_areas, config=config) report_result.add(result) succeded.append(parsed_browse) # commit here to allow seeding transaction.commit() transaction.commit(using="mapcache") logger.info("Committed changes to database.") for minx, miny, maxx, maxy, start_time, end_time in seed_areas: try: # seed MapCache synchronously # TODO: maybe replace this with an async solution seed_mapcache(tileset=browse_layer.id, grid=browse_layer.grid, minx=minx, miny=miny, maxx=maxx, maxy=maxy, minzoom=browse_layer.lowest_map_level, maxzoom=browse_layer.highest_map_level, start_time=start_time, end_time=end_time, delete=False, **get_mapcache_seed_config(config)) logger.info("Successfully finished seeding.") except Exception, e: logger.warn("Seeding failed: %s" % str(e)) # log ingestions for report generation # date/browseType/browseLayerId/start/end report_logger.info("/\\/\\".join(( datetime.utcnow().isoformat("T") + "Z", parsed_browse_report.browse_type, browse_layer.id, (parsed_browse.start_time.replace(tzinfo=None)-parsed_browse.start_time.utcoffset()).isoformat("T") + "Z", (parsed_browse.end_time.replace(tzinfo=None)-parsed_browse.end_time.utcoffset()).isoformat("T") + "Z" ))) except Exception, e: # report error logger.error("Failure during ingestion of browse '%s'." % parsed_browse.browse_identifier) logger.error("Exception was '%s': %s" % (type(e).__name__, str(e))) logger.debug(traceback.format_exc() + "\n") # undo latest changes, append the failure and continue report_result.add(IngestBrowseFailureResult( parsed_browse.browse_identifier, getattr(e, "code", None) or type(e).__name__, str(e)) ) failed.append(parsed_browse) transaction.rollback() transaction.rollback(using="mapcache")
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None): """ Ingests a browse report. reraise_exceptions if errors shall be handled externally """ # initialize the EOxServer system/registry/configuration System.init() try: # get the according browse layer browse_type = parsed_browse_report.browse_type browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type) except models.BrowseLayer.DoesNotExist: logger.warn("Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type) raise IngestionException( "Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type ) # generate a browse report model browse_report = create_browse_report(parsed_browse_report, browse_layer) # initialize the preprocessor with configuration values crs = None if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible": crs = "EPSG:3857" elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad": crs = "EPSG:4326" logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid)) # create the required preprocessor/format selection format_selection = get_format_selection("GTiff", **get_format_config(config)) if do_preprocessing: # add config parameters and custom params params = get_optimization_config(config) # add radiometric interval rad_min = browse_layer.radiometric_interval_min if rad_min is not None: params["radiometric_interval_min"] = rad_min else: rad_min = "min" rad_max = browse_layer.radiometric_interval_max if rad_max is not None: params["radiometric_interval_max"] = rad_max else: rad_max = "max" # add band selection if browse_layer.r_band is not None and browse_layer.g_band is not None and browse_layer.b_band is not None: bands = [ (browse_layer.r_band, rad_min, rad_max), (browse_layer.g_band, rad_min, rad_max), (browse_layer.b_band, rad_min, rad_max), ] if params["bandmode"] == RGBA: # RGBA bands.append((0, 0, 0)) params["bands"] = bands preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params) else: preprocessor = None # TODO: CopyPreprocessor report_result = IngestBrowseReportResult() succeded = [] failed = [] timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f") browse_dirname = _valid_path( "%s_%s_%s_%s" % ( browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), timestamp, ) ) success_dir = join(get_success_dir(config), browse_dirname) failure_dir = join(get_failure_dir(config), browse_dirname) if exists(success_dir): logger.warn("Success directory '%s' already exists.") else: makedirs(success_dir) if exists(failure_dir): logger.warn("Failure directory '%s' already exists.") else: makedirs(failure_dir) # iterate over all browses in the browse report for parsed_browse in parsed_browse_report: # transaction management per browse with transaction.commit_manually(): with transaction.commit_manually(using="mapcache"): try: seed_areas = [] # try ingest a single browse and log success result = ingest_browse( parsed_browse, browse_report, browse_layer, preprocessor, crs, success_dir, failure_dir, seed_areas, config=config, ) report_result.add(result) succeded.append(parsed_browse) # commit here to allow seeding transaction.commit() transaction.commit(using="mapcache") logger.info("Committed changes to database.") for minx, miny, maxx, maxy, start_time, end_time in seed_areas: try: # seed MapCache synchronously # TODO: maybe replace this with an async solution seed_mapcache( tileset=browse_layer.id, grid=browse_layer.grid, minx=minx, miny=miny, maxx=maxx, maxy=maxy, minzoom=browse_layer.lowest_map_level, maxzoom=browse_layer.highest_map_level, start_time=start_time, end_time=end_time, delete=False, **get_mapcache_seed_config(config) ) logger.info("Successfully finished seeding.") except Exception, e: logger.warn("Seeding failed: %s" % str(e)) # log ingestions for report generation # date/browseType/browseLayerId/start/end report_logger.info( "/\\/\\".join( ( datetime.utcnow().isoformat("T") + "Z", parsed_browse_report.browse_type, browse_layer.id, ( parsed_browse.start_time.replace(tzinfo=None) - parsed_browse.start_time.utcoffset() ).isoformat("T") + "Z", ( parsed_browse.end_time.replace(tzinfo=None) - parsed_browse.end_time.utcoffset() ).isoformat("T") + "Z", ) ) ) except Exception, e: # report error logger.error("Failure during ingestion of browse '%s'." % parsed_browse.browse_identifier) logger.error("Exception was '%s': %s" % (type(e).__name__, str(e))) logger.debug(traceback.format_exc() + "\n") # undo latest changes, append the failure and continue report_result.add( IngestBrowseFailureResult( parsed_browse.browse_identifier, getattr(e, "code", None) or type(e).__name__, str(e) ) ) failed.append(parsed_browse) transaction.rollback() transaction.rollback(using="mapcache")
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None): """ Ingests a browse report. reraise_exceptions if errors shall be handled externally """ try: # get the according browse layer browse_type = parsed_browse_report.browse_type browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type) except models.BrowseLayer.DoesNotExist: raise IngestionException("Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type) # generate a browse report model browse_report = create_browse_report(parsed_browse_report, browse_layer) # initialize the preprocessor with configuration values crs = None if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible": crs = "EPSG:3857" elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad": crs = "EPSG:4326" logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid)) # create the required preprocessor/format selection format_selection = get_format_selection("GTiff", **get_format_config(config)) if do_preprocessing and not browse_layer.contains_vertical_curtains \ and not browse_layer.contains_volumes: # add config parameters and custom params params = get_optimization_config(config) # add radiometric interval rad_min = browse_layer.radiometric_interval_min if rad_min is not None: params["radiometric_interval_min"] = rad_min else: rad_min = "min" rad_max = browse_layer.radiometric_interval_max if rad_max is not None: params["radiometric_interval_max"] = rad_max else: rad_max = "max" # add band selection if (browse_layer.r_band is not None and browse_layer.g_band is not None and browse_layer.b_band is not None): bands = [(browse_layer.r_band, rad_min, rad_max), (browse_layer.g_band, rad_min, rad_max), (browse_layer.b_band, rad_min, rad_max)] if params["bandmode"] == RGBA: # RGBA bands.append((0, 0, 0)) params["bands"] = bands preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params) elif browse_layer.contains_vertical_curtains: logger.info("Preparing Vertical Curtain Pre-Processor") params = {} # add radiometric interval rad_min = browse_layer.radiometric_interval_min if rad_min is not None: params["radiometric_interval_min"] = rad_min else: rad_min = "min" rad_max = browse_layer.radiometric_interval_max if rad_max is not None: params["radiometric_interval_max"] = rad_max else: rad_max = "max" preprocessor = VerticalCurtainPreprocessor(**params) elif browse_layer.contains_volumes: preprocessor = VolumePreProcessor() else: preprocessor = None # TODO: CopyPreprocessor report_result = IngestBrowseReportResult() succeded = [] failed = [] timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f") browse_dirname = _valid_path("%s_%s_%s_%s" % ( browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), timestamp )) success_dir = join(get_success_dir(config), browse_dirname) failure_dir = join(get_failure_dir(config), browse_dirname) if exists(success_dir): logger.warn("Success directory '%s' already exists.") else: makedirs(success_dir) if exists(failure_dir): logger.warn("Failure directory '%s' already exists.") else: makedirs(failure_dir) # iterate over all browses in the browse report for parsed_browse in parsed_browse_report: # transaction management per browse with transaction.commit_manually(): with transaction.commit_manually(using="mapcache"): try: seed_areas = [] # try ingest a single browse and log success result = ingest_browse(parsed_browse, browse_report, browse_layer, preprocessor, crs, success_dir, failure_dir, seed_areas, config=config) report_result.add(result) succeded.append(parsed_browse) # commit here to allow seeding transaction.commit() transaction.commit(using="mapcache") logger.info("Commited changes to database.") if not browse_layer.contains_vertical_curtains and not browse_layer.contains_volumes: for minx, miny, maxx, maxy, start_time, end_time in seed_areas: try: # seed MapCache synchronously # TODO: maybe replace this with an async solution seed_mapcache(tileset=browse_layer.id, grid=browse_layer.grid, minx=minx, miny=miny, maxx=maxx, maxy=maxy, minzoom=browse_layer.lowest_map_level, maxzoom=browse_layer.highest_map_level, start_time=start_time, end_time=end_time, delete=False, **get_mapcache_seed_config(config)) logger.info("Successfully finished seeding.") except Exception, e: logger.warn("Seeding failed: %s" % str(e)) elif not browse_layer.contains_volumes: host = "http://localhost/browse/ows" level_0_num_tiles_y = 2 # rows level_0_num_tiles_x = 4 # cols seed_level = range(browse_layer.lowest_map_level, browse_layer.highest_map_level) for tileLevel in seed_level: tiles_x = level_0_num_tiles_x * pow(2, tileLevel); tiles_y = level_0_num_tiles_y * pow(2, tileLevel) #find which tiles are crossed by extent tile_width = 360 / (tiles_x) tile_height = 180 / (tiles_y) coverage = eoxs_models.Coverage.objects.get(identifier=result.identifier) #cycle through tiles for col in range(tiles_x): for row in range(tiles_y): west = -180 + (col * tile_width) east = west + tile_width north = 90 - (row * tile_height) south = north - tile_height if (coverage.footprint.intersects(Polygon.from_bbox( (west,south,east,north) ))): try: # NOTE: The MeshFactory ignores time time = (isoformat(result.time_interval[0]) + "/" + isoformat(result.time_interval[1])) baseurl = host + '?service=W3DS&request=GetTile&version=1.0.0&crs=EPSG:4326&layer={0}&style=default&format=model/gltf'.format(browse_layer.id) url = '{0}&tileLevel={1}&tilecol={2}&tilerow={3}&time={4}'.format(baseurl, tileLevel, col, row, time) logger.info('Seeding call to URL: %s' % (url,)) response = urllib2.urlopen(url) response.close() except Exception, e: logger.warn("Seeding failed: %s" % str(e)) transaction.commit() else: pass