def _get_containers(self, params): containers = params.get("container_ids", []) wrappers = [] for obj_id in containers: wrapper = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": obj_id} ) if not wrapper: wrapper = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", { "impl_id":"resources.coverages.wrappers.RectifiedStitchedMosaicWrapper", "obj_id": obj_id } ) if not wrapper: raise InternalError( "Dataset Series or Rectified Stitched Mosaic with ID %s not found." % obj_id ) wrappers.append(wrapper) return wrappers
def __init__(self): self.id_factory = self._get_id_factory() self.location_factory = System.getRegistry().bind( "backends.factories.LocationFactory" ) self.data_package_factory = System.getRegistry().bind( "resources.coverages.data.DataPackageFactory" )
def __init__(self): super(RectifiedStitchedMosaicManager, self).__init__() self.data_source_factory = System.getRegistry().bind( "resources.coverages.data.DataSourceFactory" ) self.tile_index_factory = System.getRegistry().bind( "resources.coverages.data.TileIndexFactory" )
def getDatasetById(self, cid): """ Convenience method to get a coverage by its ID. """ return System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": cid} )
def findDatasetsByFilters(self, *filters): """ Convenience method to get a list of coverages by given filter expressions. """ filter_exprs = [ System.getRegistry().getFromFactory( factory_id = "resources.coverages.filters.CoverageExpressionFactory", params = filter_expr ) for filter_expr in filters ] return System.getRegistry().bind( "resources.coverages.wrappers.EOCoverageFactory" ).find( impl_ids = [ "resources.coverages.wrappers.RectifiedDatasetWrapper", "resources.coverages.wrappers.ReferenceableDatasetWrapper" ], filter_exprs = filter_exprs )
def getManager(self, mgrtype=None, intf_id=None): if mgrtype is None: mgrtype = self.getType() if intf_id is None: intf_id = self.getInterfaceID() return System.getRegistry().findAndBind( intf_id=intf_id, params={ "resources.coverages.interfaces.res_type": mgrtype } )
def _get_coverages(self, params): coverages = params.get("coverages", []) coverage_factory = System.getRegistry().bind( "resources.coverages.wrappers.EOCoverageFactory" ) for cid in params.get("coverage_ids", []): coverage = coverage_factory.get(obj_id=cid) if not coverage: raise NoSuchCoverageException(cid) coverages.append(coverage) return coverages
def _get_containers(self, params): containers = params.get("container_ids", []) wrappers = [] for obj_id in containers: wrapper = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": obj_id} ) if not wrapper: raise InternalError( "Dataset Series or ID %s not found." % obj_id ) wrappers.append(wrapper) return wrappers
def add_browse_layer(browse_layer, config=None): """ Add a browse layer to the ngEO Browse Server system. This includes the database models, cache configuration and filesystem paths. """ config = config or get_ngeo_config() try: logger.info("Adding new browse layer '%s'." % browse_layer.id) # create a new browse layer model browse_layer_model = models.BrowseLayer( **browse_layer.get_kwargs() ) browse_layer_model.full_clean() browse_layer_model.save() # relatedDatasets are ignored (see NGEO-1508) # for related_dataset_id in browse_layer.related_dataset_ids: # models.RelatedDataset.objects.get_or_create( # dataset_id=related_dataset_id, browse_layer=browse_layer_model # ) except Exception: raise # create EOxServer dataset series dss_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.dataset_series" } ) dss_mgr.create(browse_layer.id, eo_metadata=EOMetadata( browse_layer.id, datetime.now(), datetime.now(), MultiPolygon(Polygon.from_bbox((0, 0, 1, 1))) ) ) # create EOxServer layer metadata if browse_layer.title or browse_layer.description: dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id} ) if browse_layer.title: md_title = LayerMetadataRecord.objects.get_or_create( key="ows_title", value=str(browse_layer.title))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_title) if browse_layer.description: md_abstract = LayerMetadataRecord.objects.get_or_create( key="ows_abstract", value=str(browse_layer.description))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_abstract) # add source to mapcache sqlite mapcache_models.Source.objects.create(name=browse_layer.id) # add an XML section to the mapcache config xml add_mapcache_layer_xml(browse_layer, config) # create a base directory for optimized files directory = get_project_relative_path(join( config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id )) if not os.path.exists(directory): os.makedirs(directory)
def remove_browse(browse_model, browse_layer_model, coverage_id, seed_areas, unseed=True, config=None): """ Delete all models and caches associated with browse model. Image itself is not deleted. Returns the extent and filename of the replaced image. """ # get previous extent to "un-seed" MapCache in that area rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": browse_model.coverage_id} ) replaced_extent = rect_ds.getExtent() replaced_filename = rect_ds.getData().getLocation().getPath() # delete the EOxServer rectified dataset entry rect_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.rect_dataset" } ) rect_mgr.delete(obj_id=browse_model.coverage_id) browse_model.delete() # search for time entries with an overlapping time span if browse_model.start_time==browse_model.end_time: times_qs = mapcache_models.Time.objects.filter( source=browse_layer_model.id, start_time__lte=browse_model.end_time, end_time__gte=browse_model.start_time ) else: times_qs = mapcache_models.Time.objects.filter( Q(source=browse_layer_model.id), Q(start_time__lt=browse_model.end_time, end_time__gt=browse_model.start_time) | Q(start_time=F("end_time"), start_time__lte=browse_model.end_time, end_time__gte=browse_model.start_time) ) if len(times_qs) == 1: time_model = times_qs[0] elif len(times_qs) == 0: #issue a warning if no corresponding Time object exists logger.warning("No MapCache Time object found for time: %s, %s" % ( browse_model.start_time, browse_model.end_time )) elif len(times_qs) > 1: #issue a warning if too many corresponding Time objects exist #try to delete redundant time models #note that this situation should never happen but just in case... logger.warning("Multiple MapCache Time objects found for time: %s, " "%s. Trying to delete redundant ones." % ( browse_model.start_time, browse_model.end_time )) first = True with transaction.commit_manually(using="mapcache"): for time_model_tmp in times_qs: if first: first = False time_model = time_model_tmp elif (time_model_tmp.start_time <= time_model.start_time and time_model_tmp.end_time >= time_model.end_time): time_model.delete() time_model = time_model_tmp else: time_model_tmp.delete() transaction.commit(using="mapcache") if unseed: # unseed here try: seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=time_model.minx, miny=time_model.miny, maxx=time_model.maxx, maxy=time_model.maxy, minzoom=browse_layer_model.lowest_map_level, maxzoom=browse_layer_model.highest_map_level, start_time=time_model.start_time, end_time=time_model.end_time, delete=True, **get_mapcache_seed_config(config)) except Exception, e: logger.warning("Un-seeding failed: %s" % str(e))
raise NGEOException("Browse Identifier '%s' not valid: '%s'." % (browse.browse_identifier, str(e.messages[0])), "ValidationError") browse_identifier_model = models.BrowseIdentifier( value=browse.browse_identifier, browse=browse_model, browse_layer=browse_layer_model ) browse_identifier_model.full_clean() browse_identifier_model.save() # initialize the Coverage Manager for Rectified Datasets to register the # datasets in the database rect_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.rect_dataset" } ) # create EO metadata necessary for registration eo_metadata = EOMetadata( coverage_id, browse.start_time, browse.end_time, footprint ) # get dataset series ID from browse layer, if available container_ids = [] if browse_layer_model: container_ids.append(browse_layer_model.id) range_type_name = "RGB" if num_bands == 3 else "RGBA"
def add_browse_layer(browse_layer, config=None): """ Add a browse layer to the ngEO Browse Server system. This includes the database models, cache configuration and filesystem paths. """ config = config or get_ngeo_config() try: logger.info("Adding new browse layer '%s'." % browse_layer.id) # create a new browse layer model browse_layer_model = models.BrowseLayer(**browse_layer.get_kwargs()) browse_layer_model.full_clean() browse_layer_model.save() # relatedDatasets are ignored (see NGEO-1508) # for related_dataset_id in browse_layer.related_dataset_ids: # models.RelatedDataset.objects.get_or_create( # dataset_id=related_dataset_id, browse_layer=browse_layer_model # ) except Exception: raise # create EOxServer dataset series dss_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.dataset_series" }) dss_mgr.create(browse_layer.id, eo_metadata=EOMetadata( browse_layer.id, datetime.now(), datetime.now(), MultiPolygon(Polygon.from_bbox((0, 0, 1, 1))))) # create EOxServer layer metadata if browse_layer.title or browse_layer.description: dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id}) if browse_layer.title: md_title = LayerMetadataRecord.objects.get_or_create( key="ows_title", value=str(browse_layer.title))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_title) if browse_layer.description: md_abstract = LayerMetadataRecord.objects.get_or_create( key="ows_abstract", value=str(browse_layer.description))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_abstract) # add source to mapcache sqlite mapcache_models.Source.objects.create(name=browse_layer.id) # add an XML section to the mapcache config xml add_mapcache_layer_xml(browse_layer, config) # create a base directory for optimized files directory = get_project_relative_path( join(config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id)) if not os.path.exists(directory): os.makedirs(directory) # create SxCat collection if harvesting via SxCat is enabled and source # is given harvesting_via_sxcat = False try: harvesting_via_sxcat = config.getboolean("control", "harvesting_via_sxcat") except: pass if harvesting_via_sxcat and browse_layer.harvesting_source: add_collection(browse_layer)
# get strategy and merge threshold threshold = ingest_config["merge_threshold"] if browse_layer.strategy != "inherit": strategy = browse_layer.strategy else: strategy = ingest_config["strategy"] if strategy == "merge" and timedelta < threshold: if previous_time > current_time: # TODO: raise exception? pass rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": existing_browse_model.coverage_id} ) merge_footprint = rect_ds.getFootprint() merge_with = rect_ds.getData().getLocation().getPath() replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) _, _ = remove_browse(existing_browse_model, browse_layer, coverage_id, seed_areas, config=config) replaced = False logger.debug("Existing browse found, merging it.") else: # perform replacement replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) replaced_extent, replaced_filename = remove_browse(
def _get_id_factory(self): return System.getRegistry().bind( "resources.coverages.wrappers.EOCoverageFactory" )
# get strategy and merge threshold threshold = ingest_config["merge_threshold"] if browse_layer.strategy != "inherit": strategy = browse_layer.strategy else: strategy = ingest_config["strategy"] if strategy == "merge" and timedelta < threshold: if previous_time > current_time: # TODO: raise exception? pass rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": existing_browse_model.coverage_id} ) merge_footprint = rect_ds.getFootprint() merge_with = rect_ds.getData().getLocation().getPath() replaced_time_interval = (existing_browse_model.start_time, existing_browse_model.end_time) _, _ = remove_browse( existing_browse_model, browse_layer, coverage_id, seed_areas, config=config ) replaced = False logger.debug("Existing browse found, merging it.") elif strategy == "skip" and current_time <= previous_time:
raise NGEOException( "Browse Identifier '%s' not valid: '%s'." % (browse.browse_identifier, str(e.messages[0])), "ValidationError") browse_identifier_model = models.BrowseIdentifier( value=browse.browse_identifier, browse=browse_model, browse_layer=browse_layer_model) browse_identifier_model.full_clean() browse_identifier_model.save() # initialize the Coverage Manager for Rectified Datasets to register the # datasets in the database rect_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={"resources.coverages.interfaces.res_type": "eo.rect_dataset"}) # create EO metadata necessary for registration eo_metadata = EOMetadata(coverage_id, browse.start_time, browse.end_time, footprint) # get dataset series ID from browse layer, if available container_ids = [] if browse_layer_model: container_ids.append(browse_layer_model.id) range_type_name = "RGB" if num_bands == 3 else "RGBA" # register the optimized dataset logger.info("Creating Rectified Dataset.")
def __get_manager( class_obj ) : return System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type":class_obj._type})
def delete_browse_layer(browse_layer, purge=False, config=None): config = config or get_ngeo_config() # only remove MapCache configuration in order to allow a roll-back # without data loss if models.BrowseLayer.objects.filter(id=browse_layer.id).exists(): logger.info("Starting disabling of browse layer '%s'." % browse_layer.id) else: raise Exception( "Could not disable browse layer '%s' as it does not exist." % browse_layer.id) # remove browse layer from MapCache XML remove_mapcache_layer_xml(browse_layer, config) # disable SxCat harvesting for collection harvesting_via_sxcat = False try: harvesting_via_sxcat = config.getboolean("control", "harvesting_via_sxcat") except: pass if harvesting_via_sxcat and browse_layer.harvesting_source: disable_collection(browse_layer) logger.info("Finished disabling of browse layer '%s'." % browse_layer.id) if purge: logger.info("Starting purging of browse layer '%s'." % browse_layer.id) # remove browse layer model. This should also delete all related browses # and browse reports models.BrowseLayer.objects.get(id=browse_layer.id).delete() # delete EOxServer layer metadata dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id}) dss._DatasetSeriesWrapper__model.layer_metadata.all().delete() # delete EOxServer dataset series dss_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.dataset_series" }) dss_mgr.delete(browse_layer.id) # remove source from mapcache sqlite mapcache_models.Source.objects.get(name=browse_layer.id).delete() # delete browse layer cache try: logger.info("Deleting tileset for browse layer '%s'." % browse_layer.id) os.remove(get_tileset_path(browse_layer.browse_type)) except OSError: # when no browse was ingested, the sqlite file does not exist, so # just issue a warning logger.warning("Could not remove tileset '%s'." % get_tileset_path(browse_layer.browse_type)) # delete all optimized files by deleting the whole directory of the layer optimized_dir = get_project_relative_path( join(config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id)) try: logger.info("Deleting optimized images for browse layer '%s'." % browse_layer.id) shutil.rmtree(optimized_dir) except OSError: logger.error( "Could not remove directory for optimized files: '%s'." % optimized_dir) if harvesting_via_sxcat and browse_layer.harvesting_source: remove_collection(browse_layer) logger.info("Finished purging of browse layer '%s'." % browse_layer.id)
def update_browse_layer(browse_layer, config=None): config = config or get_ngeo_config() try: logger.info("Fetching browse layer '%s' for update." % browse_layer.id) browse_layer_model = models.BrowseLayer.objects.get(id=browse_layer.id) except models.BrowseLayer.DoesNotExist: raise Exception( "Could not update browse layer '%s' as it does not exist." % browse_layer.id) immutable_values = ("id", "browse_type", "contains_vertical_curtains", "r_band", "g_band", "b_band", "radiometric_interval_min", "radiometric_interval_max", "grid", "lowest_map_level", "highest_map_level", "harvesting_source") for key in immutable_values: if getattr(browse_layer_model, key) != getattr(browse_layer, key): raise Exception("Cannot change immutable property '%s'." % key) mutable_values = [ "title", "description", "browse_access_policy", "timedimension_default", "tile_query_limit", "strategy" ] refresh_mapcache_xml = False refresh_metadata = False for key in mutable_values: setattr(browse_layer_model, key, getattr(browse_layer, key)) if key in ("timedimension_default", "tile_query_limit"): refresh_mapcache_xml = True if key in ("title", "description"): refresh_metadata = True # relatedDatasets are ignored (see NGEO-1508) # for related_dataset_id in browse_layer.related_dataset_ids: # models.RelatedDataset.objects.get_or_create( # dataset_id=related_dataset_id, browse_layer=browse_layer_model # ) # # remove all related datasets that are not referenced any more # models.RelatedDataset.objects.filter( # browse_layer=browse_layer_model # ).exclude( # dataset_id__in=browse_layer.related_dataset_ids # ).delete() browse_layer_model.full_clean() browse_layer_model.save() # update EOxServer layer metadata if refresh_metadata: dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id}) dss._DatasetSeriesWrapper__model.layer_metadata.all().delete() if browse_layer.title: md_title = LayerMetadataRecord.objects.get_or_create( key="ows_title", value=str(browse_layer.title))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_title) if browse_layer.description: md_abstract = LayerMetadataRecord.objects.get_or_create( key="ows_abstract", value=str(browse_layer.description))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_abstract) if refresh_mapcache_xml: try: remove_mapcache_layer_xml(browse_layer, config) except LayerException: logger.info("Nothing to be removed. Layer disabled?") add_mapcache_layer_xml(browse_layer, config) # re-configure SxCat harvesting for collection harvesting_via_sxcat = False try: harvesting_via_sxcat = config.getboolean("control", "harvesting_via_sxcat") except: pass if (harvesting_via_sxcat and browse_layer.harvesting_source and browse_layer.harvesting_source == browse_layer_model.harvesting_source): add_collection(browse_layer) logger.info("Finished updating browse layer '%s'." % browse_layer.id)
def update_browse_layer(browse_layer, config=None): config = config or get_ngeo_config() try: logger.info("Fetching browse layer '%s' for update." % browse_layer.id) browse_layer_model = models.BrowseLayer.objects.get(id=browse_layer.id) except models.BrowseLayer.DoesNotExist: raise Exception( "Could not update browse layer '%s' as it does not exist." % browse_layer.id ) immutable_values = ( "id", "browse_type", "contains_vertical_curtains", "r_band", "g_band", "b_band", "radiometric_interval_min", "radiometric_interval_max", "grid", "lowest_map_level", "highest_map_level" ) for key in immutable_values: if getattr(browse_layer_model, key) != getattr(browse_layer, key): raise Exception("Cannot change immutable property '%s'." % key) mutable_values = [ "title", "description", "browse_access_policy", "timedimension_default", "tile_query_limit", "strategy" ] refresh_mapcache_xml = False refresh_metadata = False for key in mutable_values: setattr(browse_layer_model, key, getattr(browse_layer, key)) if key in ("timedimension_default", "tile_query_limit"): refresh_mapcache_xml = True if key in ("title", "description"): refresh_metadata = True # relatedDatasets are ignored (see NGEO-1508) # for related_dataset_id in browse_layer.related_dataset_ids: # models.RelatedDataset.objects.get_or_create( # dataset_id=related_dataset_id, browse_layer=browse_layer_model # ) # # remove all related datasets that are not referenced any more # models.RelatedDataset.objects.filter( # browse_layer=browse_layer_model # ).exclude( # dataset_id__in=browse_layer.related_dataset_ids # ).delete() browse_layer_model.full_clean() browse_layer_model.save() # update EOxServer layer metadata if refresh_metadata: dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id} ) dss._DatasetSeriesWrapper__model.layer_metadata.all().delete() if browse_layer.title: md_title = LayerMetadataRecord.objects.get_or_create( key="ows_title", value=str(browse_layer.title))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_title) if browse_layer.description: md_abstract = LayerMetadataRecord.objects.get_or_create( key="ows_abstract", value=str(browse_layer.description))[0] dss._DatasetSeriesWrapper__model.layer_metadata.add(md_abstract) if refresh_mapcache_xml: try: remove_mapcache_layer_xml(browse_layer, config) except LayerException: logger.info("Nothing to be removed. Layer disabled?") add_mapcache_layer_xml(browse_layer, config) logger.info("Finished updating browse layer '%s'." % browse_layer.id)
def _wcst11AlterCapabilities( respSrc , OWS ) : conf = System.getConfig() regs = System.getRegistry() # get the service URL base_url = conf.getConfigValue("services.owscommon","http_service_url") # ===================================================================== # check the content try : # check if the WCST11 request handler is registered and enabled if not regs.getImplementationStatus("services.ows.wcs11Transaction.WCS11TransactionHandler") : raise Exception , "Operation handler is not enabled!" # check if payload contains XML content if respSrc.content_type not in ("text/xml","application/xml") : raise Exception , "Not XML!" # parse the original payload root = etree.fromstring( respSrc.content ) # check the root element if splitQN( root.tag )[1] != "Capabilities" : raise Exception , "Not Capabilities!" # check version if not root.get('version','').startswith(OWS.version) : raise Exception ,"Not Capabilities version %s!" % OWS.version # look for OperationsMetadata eOM = root.find( OWS.E_OperationsMetadata ) # look for ServiceIdentification eSI = root.find( OWS.E_ServiceIdentification ) if ( eOM is None ) and ( eSI is None ) : raise Exception , "No element to be altered has been found!" except Exception as e : # keep track of the failures logger.debug( "_wcst11AlterCapabilities(): version %s : Content not altered! reason: %s " % ( OWS.version , str(e) ) ) # return unafected original response return respSrc # ===================================================================== # insert new Profile element to ServiceIdentification conf = System.getConfig() if eOM is not None : #insert sub-element before the selected elements def insertBefore( dst , src , before ) : # get the sublelements elements = filter( lambda e : ( e is not None ) , map( lambda tag : dst.find( tag ) , before ) ) try: # locate firts sublelemet dl = list( dst ) idx = min( map( lambda e : dl.index( e ) , elements ) ) # create element e = etree.Element( src ) # insert element at the desired position dst.insert( idx , e ) except: # simply append elemet at the end e = etree.SubElement( dst , src ) return e before = ( OWS11.E_Fees , OWS11.E_AccessConstraints ) ; # ows:Profile - WCSt >>Multiple Actions<< if ( "True" == conf.getConfigValue("services.ows.wcst11","allow_multiple_actions") ) : #etree.SubElement( eSI , OWS.E_Profile ).text = "urn:ogc:extension:WCS:1.1:TransactionMultipleActions" insertBefore( eSI , OWS.E_Profile , before ).text = "urn:ogc:extension:WCS:1.1:TransactionMultipleActions" # unpack the allowed actions allowedActions = conf.getConfigValue("services.ows.wcst11","allowed_actions") allowedActions = set( filter( lambda s : s in ACTIONS_UPPER , map( lambda s : s.strip().upper() , allowedActions.split(",") ) ) ) # annotate allowd actions for action in allowedActions : # ows:Profile - WCSt allowed action action #etree.SubElement( eSI , OWS.E_Profile ).text = "urn:ogc:extension:WCS:1.1:Transaction%s" % ACTIONS_U2N[action] insertBefore( eSI , OWS.E_Profile , before ).text = "urn:ogc:extension:WCS:1.1:Transaction%s" % ACTIONS_U2N[action] # ===================================================================== # insert new Operation element to OperationMetadata if eOM is not None : # ows:Operation eOp= etree.SubElement( eOM , OWS.E_Operation, { A_name : "transaction" } ) # ows:DCP tmp = etree.SubElement( eOp , OWS.E_DCP ) tmp = etree.SubElement( tmp , OWS.E_HTTP ) tmp = etree.SubElement( tmp , OWS.E_Post , { A_href : base_url , A_type : "simple" } ) # ows:Constraint if 1 < int(OWS.version[0]) : tmp = etree.SubElement( tmp , OWS.E_Constraint , { A_name : "PostEncoding" } ) tmp = etree.SubElement( tmp , OWS.E_AllowedValues ) tmp = etree.SubElement( tmp , OWS.E_Value ) tmp.text = "XML" # ows:Parameter tmp = etree.SubElement( eOp , OWS.E_Parameter , { A_name : "service" } ) tmp = etree.SubElement( tmp , OWS.E_AllowedValues ) tmp = etree.SubElement( tmp , OWS.E_Value ) tmp.text = "WCS" # ows:Parameter tmp = etree.SubElement( eOp , OWS.E_Parameter , { A_name : "version" } ) tmp = etree.SubElement( tmp , OWS.E_AllowedValues ) tmp = etree.SubElement( tmp , OWS.E_Value ) tmp.text = "1.1" # ===================================================================== # return the altered payload return Response( content= etree.tostring(root,"UTF-8") , content_type=respSrc.content_type , status=respSrc.status )
def delete_browse_layer(browse_layer, purge=False, config=None): config = config or get_ngeo_config() # only remove MapCache configuration in order to allow a roll-back # without data loss if models.BrowseLayer.objects.filter(id=browse_layer.id).exists(): logger.info("Starting disabling of browse layer '%s'." % browse_layer.id) else: raise Exception( "Could not disable browse layer '%s' as it does not exist." % browse_layer.id ) # remove browse layer from MapCache XML remove_mapcache_layer_xml(browse_layer, config) logger.info("Finished disabling of browse layer '%s'." % browse_layer.id) if purge: logger.info("Starting purging of browse layer '%s'." % browse_layer.id) # remove browse layer model. This should also delete all related browses # and browse reports models.BrowseLayer.objects.get(id=browse_layer.id).delete() # delete EOxServer layer metadata dss = System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": browse_layer.id} ) dss._DatasetSeriesWrapper__model.layer_metadata.all().delete() # delete EOxServer dataset series dss_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={ "resources.coverages.interfaces.res_type": "eo.dataset_series" } ) dss_mgr.delete(browse_layer.id) # remove source from mapcache sqlite mapcache_models.Source.objects.get(name=browse_layer.id).delete() # delete browse layer cache try: logger.info( "Deleting tileset for browse layer '%s'." % browse_layer.id ) os.remove(get_tileset_path(browse_layer.browse_type)) except OSError: # when no browse was ingested, the sqlite file does not exist, so # just issue a warning logger.warning( "Could not remove tileset '%s'." % get_tileset_path(browse_layer.browse_type) ) # delete all optimized files by deleting the whole directory of the layer optimized_dir = get_project_relative_path(join( config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id )) try: logger.info( "Deleting optimized images for browse layer '%s'." % browse_layer.id ) shutil.rmtree(optimized_dir) except OSError: logger.error( "Could not remove directory for optimized files: '%s'." % optimized_dir ) logger.info("Finished purging of browse layer '%s'." % browse_layer.id)
def _get_id_factory(self): return System.getRegistry().bind("resources.coverages.wrappers.DatasetSeriesFactory")
def _wcst11AlterCapabilities(respSrc, OWS): conf = System.getConfig() regs = System.getRegistry() # get the service URL base_url = conf.getConfigValue("services.owscommon", "http_service_url") # ===================================================================== # check the content try: # check if the WCST11 request handler is registered and enabled if not regs.getImplementationStatus( "services.ows.wcs11Transaction.WCS11TransactionHandler"): raise Exception, "Operation handler is not enabled!" # check if payload contains XML content if respSrc.content_type not in ("text/xml", "application/xml"): raise Exception, "Not XML!" # parse the original payload root = etree.fromstring(respSrc.content) # check the root element if splitQN(root.tag)[1] != "Capabilities": raise Exception, "Not Capabilities!" # check version if not root.get('version', '').startswith(OWS.version): raise Exception, "Not Capabilities version %s!" % OWS.version # look for OperationsMetadata eOM = root.find(OWS.E_OperationsMetadata) # look for ServiceIdentification eSI = root.find(OWS.E_ServiceIdentification) if (eOM is None) and (eSI is None): raise Exception, "No element to be altered has been found!" except Exception as e: # keep track of the failures logger.debug( "_wcst11AlterCapabilities(): version %s : Content not altered! reason: %s " % (OWS.version, str(e))) # return unafected original response return respSrc # ===================================================================== # insert new Profile element to ServiceIdentification conf = System.getConfig() if eOM is not None: #insert sub-element before the selected elements def insertBefore(dst, src, before): # get the sublelements elements = filter(lambda e: (e is not None), map(lambda tag: dst.find(tag), before)) try: # locate firts sublelemet dl = list(dst) idx = min(map(lambda e: dl.index(e), elements)) # create element e = etree.Element(src) # insert element at the desired position dst.insert(idx, e) except: # simply append elemet at the end e = etree.SubElement(dst, src) return e before = (OWS11.E_Fees, OWS11.E_AccessConstraints) # ows:Profile - WCSt >>Multiple Actions<< if ("True" == conf.getConfigValue("services.ows.wcst11", "allow_multiple_actions")): #etree.SubElement( eSI , OWS.E_Profile ).text = "urn:ogc:extension:WCS:1.1:TransactionMultipleActions" insertBefore( eSI, OWS.E_Profile, before ).text = "urn:ogc:extension:WCS:1.1:TransactionMultipleActions" # unpack the allowed actions allowedActions = conf.getConfigValue("services.ows.wcst11", "allowed_actions") allowedActions = set( filter(lambda s: s in ACTIONS_UPPER, map(lambda s: s.strip().upper(), allowedActions.split(",")))) # annotate allowd actions for action in allowedActions: # ows:Profile - WCSt allowed action action #etree.SubElement( eSI , OWS.E_Profile ).text = "urn:ogc:extension:WCS:1.1:Transaction%s" % ACTIONS_U2N[action] insertBefore( eSI, OWS.E_Profile, before ).text = "urn:ogc:extension:WCS:1.1:Transaction%s" % ACTIONS_U2N[ action] # ===================================================================== # insert new Operation element to OperationMetadata if eOM is not None: # ows:Operation eOp = etree.SubElement(eOM, OWS.E_Operation, {A_name: "transaction"}) # ows:DCP tmp = etree.SubElement(eOp, OWS.E_DCP) tmp = etree.SubElement(tmp, OWS.E_HTTP) tmp = etree.SubElement(tmp, OWS.E_Post, { A_href: base_url, A_type: "simple" }) # ows:Constraint if 1 < int(OWS.version[0]): tmp = etree.SubElement(tmp, OWS.E_Constraint, {A_name: "PostEncoding"}) tmp = etree.SubElement(tmp, OWS.E_AllowedValues) tmp = etree.SubElement(tmp, OWS.E_Value) tmp.text = "XML" # ows:Parameter tmp = etree.SubElement(eOp, OWS.E_Parameter, {A_name: "service"}) tmp = etree.SubElement(tmp, OWS.E_AllowedValues) tmp = etree.SubElement(tmp, OWS.E_Value) tmp.text = "WCS" # ows:Parameter tmp = etree.SubElement(eOp, OWS.E_Parameter, {A_name: "version"}) tmp = etree.SubElement(tmp, OWS.E_AllowedValues) tmp = etree.SubElement(tmp, OWS.E_Value) tmp.text = "1.1" # ===================================================================== # return the altered payload return Response(content=etree.tostring(root, "UTF-8"), content_type=respSrc.content_type, status=respSrc.status)
def getDatasetSeriesById(self, eoid): return System.getRegistry().getFromFactory( "resources.coverages.wrappers.DatasetSeriesFactory", {"obj_id": eoid} )
def __init__(self): super(BaseManagerContainerMixIn, self).__init__() self.rect_dataset_mgr = System.getRegistry().bind( "resources.coverages.managers.RectifiedDatasetManager" )
def remove_browse(browse_model, browse_layer_model, coverage_id, seed_areas, unseed=True, config=None): """ Delete all models and caches associated with browse model. Image itself is not deleted. Returns the extent and filename of the replaced image. """ # get previous extent to "un-seed" MapCache in that area rect_ds = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": browse_model.coverage_id}) replaced_extent = rect_ds.getExtent() replaced_filename = rect_ds.getData().getLocation().getPath() # delete the EOxServer rectified dataset entry rect_mgr = System.getRegistry().findAndBind( intf_id="resources.coverages.interfaces.Manager", params={"resources.coverages.interfaces.res_type": "eo.rect_dataset"}) rect_mgr.delete(obj_id=browse_model.coverage_id) browse_model.delete() # search for time entries with an overlapping time span if browse_model.start_time == browse_model.end_time: times_qs = mapcache_models.Time.objects.filter( source=browse_layer_model.id, start_time__lte=browse_model.end_time, end_time__gte=browse_model.start_time) else: times_qs = mapcache_models.Time.objects.filter( Q(source=browse_layer_model.id), Q(start_time__lt=browse_model.end_time, end_time__gt=browse_model.start_time) | Q(start_time=F("end_time"), start_time__lte=browse_model.end_time, end_time__gte=browse_model.start_time)) if len(times_qs) == 1: time_model = times_qs[0] elif len(times_qs) == 0: #issue a warning if no corresponding Time object exists logger.warning("No MapCache Time object found for time: %s, %s" % (browse_model.start_time, browse_model.end_time)) elif len(times_qs) > 1: #issue a warning if too many corresponding Time objects exist #try to delete redundant time models #note that this situation should never happen but just in case... logger.warning("Multiple MapCache Time objects found for time: %s, " "%s. Trying to delete redundant ones." % (browse_model.start_time, browse_model.end_time)) first = True with transaction.commit_manually(using="mapcache"): for time_model_tmp in times_qs: if first: first = False time_model = time_model_tmp elif (time_model_tmp.start_time <= time_model.start_time and time_model_tmp.end_time >= time_model.end_time): time_model.delete() time_model = time_model_tmp else: time_model_tmp.delete() transaction.commit(using="mapcache") if unseed: # unseed here try: seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=time_model.minx, miny=time_model.miny, maxx=time_model.maxx, maxy=time_model.maxy, minzoom=browse_layer_model.lowest_map_level, maxzoom=browse_layer_model.highest_map_level, start_time=time_model.start_time, end_time=time_model.end_time, delete=True, **get_mapcache_seed_config(config)) except Exception, e: logger.warning("Un-seeding failed: %s" % str(e))
def _create_contained(self, container, data_sources): # TODO: make this more efficient by using updateModel() new_datasets = [] for data_source in data_sources: locations = data_source.detect() logger.info("Detected locations: %s"%[location.getPath() for location in locations]) for location in locations: md_location = self._guess_metadata_location(location) data_package = self._create_data_package(location, md_location) coverage_factory = System.getRegistry().bind( "resources.coverages.wrappers.EOCoverageFactory" ) filter_exprs = [System.getRegistry().getFromFactory( "resources.coverages.filters.CoverageExpressionFactory", { "op_name": "referenced_by", "operands": (location,) } )] existing_coverages = coverage_factory.find( impl_ids=["resources.coverages.wrappers.RectifiedDatasetWrapper", "resources.coverages.wrappers.ReferenceableDatasetWrapper"], filter_exprs=filter_exprs ) if len(existing_coverages) == 1: coverage = existing_coverages[0] logger.info("Add %s (%s) to %s."%( coverage.getCoverageId(), coverage.getType(), container.getType() ) ) container.addCoverage(existing_coverages[0]) new_datasets.append(existing_coverages[0]) else: eo_metadata = data_package.readEOMetadata() coverage_id_mgr = CoverageIdManager() coverage_id = coverage_id_mgr.reserve( eo_metadata.getEOID() ) try: range_type_name = self._get_contained_range_type_name( container, location ) if container.getType() == "eo.rect_stitched_mosaic": default_srid = container.getSRID() else: default_srid = None logger.info("Creating new coverage with ID %s." % coverage_id) # TODO: implement creation of ReferenceableDatasets, # RectifiedStitchedMosaics for DatasetSeriesManager new_dataset = self.rect_dataset_mgr.create( coverage_id, location=location, md_location=md_location, range_type_name=range_type_name, data_source=data_source, container=container, default_srid=default_srid ) logger.info("Done creating new coverage with ID %s." % coverage_id) new_datasets.append(new_dataset) finally: coverage_id_mgr.release(coverage_id) return new_datasets
def getStitchedMosaicById(self, cid): return System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": cid} )
def handle(self, *args, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse export from command line.") browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: logger.error("No browse layer or browse type was specified.") raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: logger.error("Both browse layer and browse type were specified.") raise CommandError("Both browse layer and browse type were specified.") start = kwargs.get("start") end = kwargs.get("end") compression = kwargs.get("compression") export_cache = kwargs["export_cache"] output_path = kwargs.get("output_path") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if not output_path: output_path = package.generate_filename(compression) with package.create(output_path, compression) as p: # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id) except BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist" % browse_layer_id) raise CommandError("Browse layer '%s' does not exist" % browse_layer_id) else: try: browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type '%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type '%s' does " "not exist" % browse_type) browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model) p.set_browse_layer( serialize_browse_layers((browse_layer,), pretty_print=True) ) # query browse reports; optionally filter for start/end time browse_reports_qs = BrowseReport.objects.all() # apply start/end filter if start and not end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start) elif end and not start: browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end) elif start and end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, browses__end_time__lte=end) # use count annotation to exclude all browse reports with no browses browse_reports_qs = browse_reports_qs.annotate( browse_count=Count('browses') ).filter(browse_layer=browse_layer_model, browse_count__gt=0) # iterate over all browse reports for browse_report_model in browse_reports_qs: browses_qs = Browse.objects.filter( browse_report=browse_report_model ) if start: browses_qs = browses_qs.filter(start_time__gte=start) if end: browses_qs = browses_qs.filter(end_time__lte=end) browse_report = browsereport_data.BrowseReport.from_model( browse_report_model, browses_qs ) # iterate over all browses in the query for browse, browse_model in izip(browse_report, browses_qs): coverage_wrapper = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": browse_model.coverage_id} ) # set the base_filename = browse_model.coverage_id data_filename = base_filename + ".tif" md_filename = base_filename + ".xml" footprint_filename = base_filename + ".wkb" browse._file_name = data_filename # add optimized browse image to package data_package = coverage_wrapper.getData() data_package.prepareAccess() browse_file_path = data_package.getGDALDatasetIdentifier() with open(browse_file_path) as f: p.add_browse(f, data_filename) wkb = coverage_wrapper.getFootprint().wkb p.add_footprint(footprint_filename, wkb) if export_cache: time_model = mapcache_models.Time.objects.get( start_time__lte=browse_model.start_time, end_time__gte=browse_model.end_time, source__name=browse_layer_model.id ) # get "dim" parameter dim = (isotime(time_model.start_time) + "/" + isotime(time_model.end_time)) # exit if a merged browse is found if dim != (isotime(browse_model.start_time) + "/" + isotime(browse_model.end_time)): logger.error("Browse layer '%s' contains " "merged browses and exporting " "of cache is requested. Try " "without exporting the cache." % browse_layer_model.id) raise CommandError("Browse layer '%s' contains " "merged browses and exporting " "of cache is requested. Try " "without exporting the cache." % browse_layer_model.id) # get path to sqlite tileset and open it ts = tileset.open( get_tileset_path(browse_layer.browse_type) ) for tile_desc in ts.get_tiles( browse_layer.id, URN_TO_GRID[browse_layer.grid], dim=dim, minzoom=browse_layer.highest_map_level, maxzoom=browse_layer.lowest_map_level ): p.add_cache_file(*tile_desc) # save browse report xml and add it to the package p.add_browse_report( serialize_browse_report(browse_report, pretty_print=True), name="%s_%s_%s_%s.xml" % ( browse_report.browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), uuid.uuid4().hex ) ) logger.info("Successfully finished browse export from command line.")
def __init__(self): super(DatasetSeriesManager, self).__init__() self.dataset_series_factory = self.id_factory self.data_source_factory = System.getRegistry().bind("resources.coverages.data.DataSourceFactory")