def verify_store(self, store, workspace=DEFAULT_WORKSPACE): cat = gs_catalog try: get_store(cat, store, workspace=workspace) except FailedRequestError: ds = cat.create_datastore(store, workspace=workspace) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] ds.connection_parameters.update(host=db['HOST'], port=db['PORT'], database=db['NAME'], user=db['USER'], passwd=db['PASSWORD'], dbtype=db_engine) cat.save(ds)
def verify_store(self, store, workspace=DEFAULT_WORKSPACE): cat = gs_catalog try: get_store(cat, store, workspace=workspace) except FailedRequestError: ds = cat.create_datastore(store, workspace=workspace) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] ds.connection_parameters.update( host=db['HOST'], port=db['PORT'], database=db['NAME'], user=db['USER'], passwd=db['PASSWORD'], dbtype=db_engine ) cat.save(ds)
def __init__(self, storename=ogc_server_settings.datastore_db['NAME'], workspace=DEFAULT_WORKSPACE, owner=Profile.objects.filter(is_superuser=True).first()): try: self.store = get_store(gs_catalog, storename, workspace) except FailedRequestError as e: logger.warning(e.message) self.store = create_datastore(store_name=storename) self.storename = storename self.workspace = workspace self.owner = owner
def create_feature_store(cat, workspace): dsname = ogc_server_settings.DATASTORE ds_exists = False try: ds = get_store(cat, dsname, workspace=workspace) ds_exists = True except FailedRequestError: ds = cat.create_datastore(dsname, workspace=workspace) db = ogc_server_settings.datastore_db db_engine = 'postgis' if 'postgis' in db['ENGINE'] else db['ENGINE'] ds.connection_parameters.update({ 'validate connections': 'true', 'max connections': '10', 'min connections': '1', 'fetch size': '1000', 'host': db['HOST'], 'port': db['PORT'] if isinstance(db['PORT'], basestring) else str(db['PORT']) or '5432', 'database': db['NAME'], 'user': db['USER'], 'passwd': db['PASSWORD'], 'dbtype': db_engine }) if ds_exists: ds.save_method = "PUT" cat.save(ds) return get_store(cat, dsname, workspace=workspace)
def get_store_connection(storename, workspace=None): from geonode.geoserver.helpers import (get_store, gs_catalog) from django.conf import settings if not workspace: workspace = settings.DEFAULT_WORKSPACE store = get_store(gs_catalog, storename, workspace) db = ogc_server_settings.datastore_db try: db_name = store.connection_parameters.get('database') host = store.connection_parameters['host'] port = store.connection_parameters['port'] except KeyError: db_name = db['NAME'] host = db['HOST'] port = db['PORT'] user = db['USER'] password = db['PASSWORD'] return DataManager.build_connection_string(host, db_name, user, password, int(port) if port else 5432)
def import_imagemosaic_granules(spatial_files, append_to_mosaic_opts, append_to_mosaic_name, mosaic_time_regex, mosaic_time_value, time_presentation, time_presentation_res, time_presentation_default_value, time_presentation_reference_value): # The very first step is to rename the granule by adding the selected regex # matching value to the filename. f = spatial_files[0].base_file dirname = os.path.dirname(f) basename = os.path.basename(f) head, tail = os.path.splitext(basename) if not mosaic_time_regex: mosaic_time_regex, mosaic_time_format = _get_time_regex( spatial_files, basename) # 0. A Time Regex is mandartory to validate the files if not mosaic_time_regex: raise UploadException( _("Could not find any valid Time Regex for the Mosaic files.")) for spatial_file in spatial_files: f = spatial_file.base_file basename = os.path.basename(f) head, tail = os.path.splitext(basename) regexp = re.compile(mosaic_time_regex) if regexp.match(head).groups(): mosaic_time_value = regexp.match(head).groups()[0] head = head.replace( regexp.match(head).groups()[0], '{mosaic_time_value}') if mosaic_time_value: dst_file = os.path.join( dirname, head.replace('{mosaic_time_value}', mosaic_time_value) + tail) os.rename(f, dst_file) spatial_file.base_file = dst_file # We use the GeoServer REST APIs in order to create the ImageMosaic # and later add the granule through the GeoServer Importer. head = head.replace('{mosaic_time_value}', '') head = re.sub('^[^a-zA-z]*|[^a-zA-Z]*$', '', head) # 1. Create a zip file containing the ImageMosaic .properties files # 1a. Let's check and prepare the DB based DataStore cat = gs_catalog workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] if not db_engine == 'postgis': raise UploadException(_("Unsupported DataBase for Mosaics!")) # dsname = ogc_server_settings.DATASTORE dsname = db['NAME'] ds_exists = False try: ds = get_store(cat, dsname, workspace=workspace) ds_exists = (ds is not None) except FailedRequestError: ds = cat.create_datastore(dsname, workspace=workspace) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] ds.connection_parameters.update({ 'validate connections': 'true', 'max connections': '10', 'min connections': '1', 'fetch size': '1000', 'host': db['HOST'], 'port': db['PORT'] if isinstance(db['PORT'], basestring) else str(db['PORT']) or '5432', 'database': db['NAME'], 'user': db['USER'], 'passwd': db['PASSWORD'], 'dbtype': db_engine }) cat.save(ds) ds = get_store(cat, dsname, workspace=workspace) ds_exists = (ds is not None) if not ds_exists: raise UploadException(_("Unsupported DataBase for Mosaics!")) context = { "abs_path_flag": "True", "time_attr": "time", "aux_metadata_flag": "False", "mosaic_time_regex": mosaic_time_regex, "db_host": db['HOST'], "db_port": db['PORT'], "db_name": db['NAME'], "db_user": db['USER'], "db_password": db['PASSWORD'], "db_conn_timeout": db['CONN_TOUT'] if 'CONN_TOUT' in db else "10", "db_conn_min": db['CONN_MIN'] if 'CONN_MIN' in db else "1", "db_conn_max": db['CONN_MAX'] if 'CONN_MAX' in db else "5", "db_conn_validate": db['CONN_VALIDATE'] if 'CONN_VALIDATE' in db else "true", } if mosaic_time_regex: indexer_template = """AbsolutePath={abs_path_flag} TimeAttribute={time_attr} Schema= the_geom:Polygon,location:String,{time_attr}:java.util.Date PropertyCollectors=TimestampFileNameExtractorSPI[timeregex]({time_attr}) CheckAuxiliaryMetadata={aux_metadata_flag} SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi""" timeregex_template = """regex=(?<=_)({mosaic_time_regex})""" if not os.path.exists(dirname + '/timeregex.properties'): with open(dirname + '/timeregex.properties', 'w') as timeregex_prop_file: timeregex_prop_file.write(timeregex_template.format(**context)) else: indexer_template = """AbsolutePath={abs_path_flag} Schema= the_geom:Polygon,location:String,{time_attr} CheckAuxiliaryMetadata={aux_metadata_flag} SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi""" datastore_template = r"""SPI=org.geotools.data.postgis.PostgisNGDataStoreFactory host={db_host} port={db_port} database={db_name} user={db_user} passwd={db_password} Loose\ bbox=true Estimated\ extends=false validate\ connections={db_conn_validate} Connection\ timeout={db_conn_timeout} min\ connections={db_conn_min} max\ connections={db_conn_max}""" if not os.path.exists(dirname + '/indexer.properties'): with open(dirname + '/indexer.properties', 'w') as indexer_prop_file: indexer_prop_file.write(indexer_template.format(**context)) if not os.path.exists(dirname + '/datastore.properties'): with open(dirname + '/datastore.properties', 'w') as datastore_prop_file: datastore_prop_file.write(datastore_template.format(**context)) files_to_upload = [] if not append_to_mosaic_opts and spatial_files: z = zipfile.ZipFile(dirname + '/' + head + '.zip', "w") for spatial_file in spatial_files: f = spatial_file.base_file dst_basename = os.path.basename(f) dst_head, dst_tail = os.path.splitext(dst_basename) if not files_to_upload: # Let's import only the first granule z.write(spatial_file.base_file, arcname=dst_head + dst_tail) files_to_upload.append(spatial_file.base_file) if os.path.exists(dirname + '/indexer.properties'): z.write(dirname + '/indexer.properties', arcname='indexer.properties') if os.path.exists(dirname + '/datastore.properties'): z.write(dirname + '/datastore.properties', arcname='datastore.properties') if mosaic_time_regex: z.write(dirname + '/timeregex.properties', arcname='timeregex.properties') z.close() # 2. Send a "create ImageMosaic" request to GeoServer through gs_config cat._cache.clear() # - name = name of the ImageMosaic (equal to the base_name) # - data = abs path to the zip file # - configure = parameter allows for future configuration after harvesting name = head data = open(dirname + '/' + head + '.zip', 'rb') try: cat.create_imagemosaic(name, data) except ConflictingDataError: # Trying to append granules to an existing mosaic pass # configure time as LIST if mosaic_time_regex: set_time_dimension(cat, name, workspace, time_presentation, time_presentation_res, time_presentation_default_value, time_presentation_reference_value) # - since GeoNode will upload the first granule again through the Importer, we need to / # delete the one created by the gs_config # mosaic_delete_first_granule(cat, name) if len(spatial_files) > 1: spatial_files = spatial_files[0] return head, files_to_upload else: cat._cache.clear() cat.reset() # cat.reload() return append_to_mosaic_name, files_to_upload
def reload_data(self, geonode_layer): if self.task: self.task.status = "IN_PROGRESS" self.task.save() # To get layer name from alternate as it is the same as DB table name and geoserver layer name self.config_obj.name = geonode_layer.alternate.split(':')[-1] self.config_obj.overwrite = True feature_iter = iter(self) gtype = self.esri_serializer.get_geometry_type() store = get_store(gs_catalog, geonode_layer.store, geonode_layer.workspace) # get database name and schema name from layer datastore # TODO: get all parameters for the datastore # TODO: find a way to pass the database password also , as it is encrypted in the datastore. db_connection = get_connection( database_name=store.connection_parameters['database'], schema=store.connection_parameters.get('schema', 'public')) with OSGEOManager.open_source(db_connection, update_enabled=1) as source: geoserver_layer = gs_catalog.get_layer(geonode_layer.alternate) # pass native_name to GetLayer as it represents the table name layer = source.GetLayer(geoserver_layer.resource.native_name) try: layer.StartTransaction() # remove all features # Note: remove features one by one allow to rollback if the error raised # TODO: check if truncating the table is possible to enhance the performance old_feature = layer.GetNextFeature() while old_feature: layer.DeleteFeature(old_feature.GetFID()) old_feature = layer.GetNextFeature() # TODO: reset FID sequence otherwise new FIDs will be generated # build fields is mandatory for domain fields and subtypes self.esri_serializer.build_fields() # set outSR by destination layer wkid, to retrieve the features with matched projection self.set_out_sr( int(layer.GetSpatialRef().GetAuthorityCode(None))) # importing the features again for next_feature in feature_iter: self.create_feature(layer, next_feature, gtype) layer.CommitTransaction() geoserver_pub = GeoserverPublisher() # remove layer caching to update rendering. # otherwise changes will not be rendered until layer refreshed geoserver_pub.remove_cached(geonode_layer.typename) if self.task: self.task.status = "FINISHED" self.task.save() # TODO: check the which exceptions should be handled except (StopIteration, EsriFeatureLayerException, ConnectionError, BaseException) as e: layer.RollbackTransaction() logger.error(e) return False else: return True
def test_save_and_delete_signals(self): """Test that GeoServer Signals methods work as espected""" layers = Layer.objects.all()[:2].values_list('id', flat=True) test_perm_layer = Layer.objects.get(id=layers[0]) self.client.login(username='******', password='******') if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.signals import (geoserver_pre_delete, geoserver_post_save, geoserver_post_save_local) # Handle Layer Save and Upload Signals geoserver_post_save(test_perm_layer, sender=Layer) geoserver_post_save_local(test_perm_layer) # Check instance bbox and links self.assertIsNotNone(test_perm_layer.bbox) self.assertIsNotNone(test_perm_layer.srid) self.assertIsNotNone(test_perm_layer.link_set) self.assertEquals(len(test_perm_layer.link_set.all()), 7) # Layer Manipulation from geonode.geoserver.upload import geoserver_upload from geonode.geoserver.signals import gs_catalog from geonode.geoserver.helpers import ( check_geoserver_is_up, get_sld_for, fixup_style, set_layer_style, get_store, set_attributes_from_geoserver, set_styles, create_gs_thumbnail, cleanup) check_geoserver_is_up() admin_user = get_user_model().objects.get(username="******") saved_layer = geoserver_upload( test_perm_layer, os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_poi.shp"), admin_user, test_perm_layer.name, overwrite=True) self.assertIsNotNone(saved_layer) _log(saved_layer) workspace, name = test_perm_layer.alternate.split(':') self.assertIsNotNone(workspace) self.assertIsNotNone(name) ws = gs_catalog.get_workspace(workspace) self.assertIsNotNone(ws) store = get_store(gs_catalog, name, workspace=ws) _log("1. ------------ %s " % store) self.assertIsNotNone(store) # Save layer attributes set_attributes_from_geoserver(test_perm_layer) # Save layer styles set_styles(test_perm_layer, gs_catalog) # set SLD sld = test_perm_layer.default_style.sld_body if test_perm_layer.default_style else None if sld: _log("2. ------------ %s " % sld) set_layer_style(test_perm_layer, test_perm_layer.alternate, sld) fixup_style(gs_catalog, test_perm_layer.alternate, None) self.assertIsNone(get_sld_for(gs_catalog, test_perm_layer)) _log("3. ------------ %s " % get_sld_for(gs_catalog, test_perm_layer)) create_gs_thumbnail(test_perm_layer, overwrite=True) self.assertIsNotNone(test_perm_layer.get_thumbnail_url()) self.assertTrue(test_perm_layer.has_thumbnail()) # Handle Layer Delete Signals geoserver_pre_delete(test_perm_layer, sender=Layer) # Check instance has been removed from GeoServer also from geonode.geoserver.views import get_layer_capabilities self.assertIsNone(get_layer_capabilities(test_perm_layer)) # Cleaning Up test_perm_layer.delete() cleanup(test_perm_layer.name, test_perm_layer.uuid)
def import_imagemosaic_granules( spatial_files, append_to_mosaic_opts, append_to_mosaic_name, mosaic_time_regex, mosaic_time_value, time_presentation, time_presentation_res, time_presentation_default_value, time_presentation_reference_value): # The very first step is to rename the granule by adding the selected regex # matching value to the filename. f = spatial_files[0].base_file dirname = os.path.dirname(f) basename = os.path.basename(f) head, tail = os.path.splitext(basename) if not mosaic_time_regex: mosaic_time_regex, mosaic_time_format = _get_time_regex(spatial_files, basename) # 0. A Time Regex is mandartory to validate the files if not mosaic_time_regex: raise UploadException(_("Could not find any valid Time Regex for the Mosaic files.")) for spatial_file in spatial_files: f = spatial_file.base_file basename = os.path.basename(f) head, tail = os.path.splitext(basename) regexp = re.compile(mosaic_time_regex) if regexp.match(head).groups(): mosaic_time_value = regexp.match(head).groups()[0] head = head.replace(regexp.match(head).groups()[0], '{mosaic_time_value}') if mosaic_time_value: dst_file = os.path.join( dirname, head.replace('{mosaic_time_value}', mosaic_time_value) + tail) os.rename(f, dst_file) spatial_file.base_file = dst_file # We use the GeoServer REST APIs in order to create the ImageMosaic # and later add the granule through the GeoServer Importer. head = head.replace('{mosaic_time_value}', '') head = re.sub('^[^a-zA-z]*|[^a-zA-Z]*$', '', head) # 1. Create a zip file containing the ImageMosaic .properties files # 1a. Let's check and prepare the DB based DataStore cat = gs_catalog workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] if not db_engine == 'postgis': raise UploadException(_("Unsupported DataBase for Mosaics!")) # dsname = ogc_server_settings.DATASTORE dsname = db['NAME'] ds_exists = False try: ds = get_store(cat, dsname, workspace=workspace) ds_exists = (ds is not None) except FailedRequestError: ds = cat.create_datastore(dsname, workspace=workspace) db = ogc_server_settings.datastore_db db_engine = 'postgis' if \ 'postgis' in db['ENGINE'] else db['ENGINE'] ds.connection_parameters.update( {'validate connections': 'true', 'max connections': '10', 'min connections': '1', 'fetch size': '1000', 'host': db['HOST'], 'port': db['PORT'] if isinstance( db['PORT'], basestring) else str(db['PORT']) or '5432', 'database': db['NAME'], 'user': db['USER'], 'passwd': db['PASSWORD'], 'dbtype': db_engine} ) cat.save(ds) ds = get_store(cat, dsname, workspace=workspace) ds_exists = (ds is not None) if not ds_exists: raise UploadException(_("Unsupported DataBase for Mosaics!")) context = { "abs_path_flag": "True", "time_attr": "time", "aux_metadata_flag": "False", "mosaic_time_regex": mosaic_time_regex, "db_host": db['HOST'], "db_port": db['PORT'], "db_name": db['NAME'], "db_user": db['USER'], "db_password": db['PASSWORD'], "db_conn_timeout": db['CONN_TOUT'] if 'CONN_TOUT' in db else "10", "db_conn_min": db['CONN_MIN'] if 'CONN_MIN' in db else "1", "db_conn_max": db['CONN_MAX'] if 'CONN_MAX' in db else "5", "db_conn_validate": db['CONN_VALIDATE'] if 'CONN_VALIDATE' in db else "true", } if mosaic_time_regex: indexer_template = """AbsolutePath={abs_path_flag} TimeAttribute={time_attr} Schema= the_geom:Polygon,location:String,{time_attr}:java.util.Date PropertyCollectors=TimestampFileNameExtractorSPI[timeregex]({time_attr}) CheckAuxiliaryMetadata={aux_metadata_flag} SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi""" timeregex_template = """regex=(?<=_)({mosaic_time_regex})""" if not os.path.exists(dirname + '/timeregex.properties'): with open(dirname + '/timeregex.properties', 'w') as timeregex_prop_file: timeregex_prop_file.write(timeregex_template.format(**context)) else: indexer_template = """AbsolutePath={abs_path_flag} Schema= the_geom:Polygon,location:String,{time_attr} CheckAuxiliaryMetadata={aux_metadata_flag} SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi""" datastore_template = """SPI=org.geotools.data.postgis.PostgisNGDataStoreFactory host={db_host} port={db_port} database={db_name} user={db_user} passwd={db_password} Loose\ bbox=true Estimated\ extends=false validate\ connections={db_conn_validate} Connection\ timeout={db_conn_timeout} min\ connections={db_conn_min} max\ connections={db_conn_max}""" if not os.path.exists(dirname + '/indexer.properties'): with open(dirname + '/indexer.properties', 'w') as indexer_prop_file: indexer_prop_file.write(indexer_template.format(**context)) if not os.path.exists(dirname + '/datastore.properties'): with open(dirname + '/datastore.properties', 'w') as datastore_prop_file: datastore_prop_file.write(datastore_template.format(**context)) files_to_upload = [] if not append_to_mosaic_opts and spatial_files: z = zipfile.ZipFile(dirname + '/' + head + '.zip', "w") for spatial_file in spatial_files: f = spatial_file.base_file dst_basename = os.path.basename(f) dst_head, dst_tail = os.path.splitext(dst_basename) if not files_to_upload: # Let's import only the first granule z.write(spatial_file.base_file, arcname=dst_head + dst_tail) files_to_upload.append(spatial_file.base_file) if os.path.exists(dirname + '/indexer.properties'): z.write(dirname + '/indexer.properties', arcname='indexer.properties') if os.path.exists(dirname + '/datastore.properties'): z.write( dirname + '/datastore.properties', arcname='datastore.properties') if mosaic_time_regex: z.write( dirname + '/timeregex.properties', arcname='timeregex.properties') z.close() # 2. Send a "create ImageMosaic" request to GeoServer through gs_config cat._cache.clear() # - name = name of the ImageMosaic (equal to the base_name) # - data = abs path to the zip file # - configure = parameter allows for future configuration after harvesting name = head data = open(dirname + '/' + head + '.zip', 'rb') try: cat.create_imagemosaic(name, data) except ConflictingDataError: # Trying to append granules to an existing mosaic pass # configure time as LIST if mosaic_time_regex: set_time_dimension( cat, name, workspace, time_presentation, time_presentation_res, time_presentation_default_value, time_presentation_reference_value) # - since GeoNode will upload the first granule again through the Importer, we need to / # delete the one created by the gs_config # mosaic_delete_first_granule(cat, name) if len(spatial_files) > 1: spatial_files = spatial_files[0] return head, files_to_upload else: cat._cache.clear() cat.reset() # cat.reload() return append_to_mosaic_name, files_to_upload
def get_store_schema(storename=None): if not storename: storename = ogc_server_settings.datastore_db.get('NAME') store = get_store(gs_catalog, storename, settings.DEFAULT_WORKSPACE) return store.connection_parameters.get('schema', 'public')
def get_gs_store(storename=None, workspace=DEFAULT_WORKSPACE): if not storename: storename = ogc_server_settings.datastore_db.get('NAME', None) return get_store(gs_catalog, storename, workspace)
def publish_resource_geonode(store, resource_name, workspace_name='geonode', execute_signals=True, user="******", verbosity=1, permissions=None, ignore_errors=True): output = { 'stats': { 'failed': 0, 'updated': 0, 'created': 0, 'deleted': 0, }, 'layers': [], 'deleted_layers': [] } cat = gs_catalog owner = get_valid_user(user) workspace = cat.get_workspace(workspace_name) store = get_store(cat, store, workspace=workspace) if store is None: rsc = [] else: resources = cat.get_resources(stores=[store]) rsc = [k for k in resources if '%s' % (k.name) in resource_name] for resource in rsc: name = resource.name the_store = resource.store workspace = the_store.workspace try: layer, created = Layer.objects.get_or_create( name=name, workspace=workspace.name, defaults={ # "workspace": workspace.name, "store": the_store.name, "storeType": the_store.resource_type, "alternate": "%s:%s" % (workspace.name, resource.name), "title": resource.title or 'No title provided', "abstract": resource.abstract or u"{}".format(_('No abstract provided')), "owner": owner, "uuid": str(uuid.uuid4()) }) # print("laayer", layer) # print("created", created) layer.bbox_x0 = Decimal(resource.native_bbox[0]) layer.bbox_x1 = Decimal(resource.native_bbox[1]) layer.bbox_y0 = Decimal(resource.native_bbox[2]) layer.bbox_y1 = Decimal(resource.native_bbox[3]) layer.srid = resource.projection # sync permissions in GeoFence perm_spec = json.loads(_perms_info_json(layer)) layer.set_permissions(perm_spec) # recalculate the layer statistics set_attributes_from_geoserver(layer, overwrite=True) # in some cases we need to explicitily save the resource to execute the signals # (for sure when running updatelayers) if execute_signals: layer.save() # Fix metadata links if the ip has changed if layer.link_set.metadata().count() > 0: if not created and settings.SITEURL not in layer.link_set.metadata( )[0].url: layer.link_set.metadata().delete() layer.save() metadata_links = [] for link in layer.link_set.metadata(): metadata_links.append((link.mime, link.name, link.url)) resource.metadata_links = metadata_links cat.save(resource) except Exception as e: print("ERROR: ", e) if ignore_errors: status = 'failed' exception_type, error, traceback = sys.exc_info() else: if verbosity > 0: msg = "Stopping process because --ignore-errors was not set and an error was found." print(msg, file=sys.stderr) raise_( Exception, Exception("Failed to process {}".format(resource.name), e), sys.exc_info()[2]) else: if created: if not permissions: layer.set_default_permissions() else: layer.set_permissions(permissions) status = 'created' output['stats']['created'] += 1 else: status = 'updated' output['stats']['updated'] += 1
def create_geoserver_layer(name, user, srid, overwrite=False, title=None, abstract=None, charset='UTF-8'): if "geonode.geoserver" in settings.INSTALLED_APPS: _user, _password = ogc_server_settings.credentials # # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.info( '>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_layer_type = "vector" # Get a short handle to the gsconfig geoserver catalog cat = Catalog(ogc_server_settings.internal_rest, _user, _password) workspace = cat.get_default_workspace() # Check if the store exists in geoserver try: store = get_store(cat, name, workspace=workspace) except FailedRequestError as e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # If the store is empty, we just delete it. if len(resources) == 0: cat.delete(store) else: # If our resource is already configured in the store it needs # to have the right resource type for resource in resources: if resource.name == name: msg = 'Name already in use and overwrite is False' assert overwrite, msg existing_type = resource.resource_type if existing_type != the_layer_type: msg = ('Type of uploaded file %s (%s) ' 'does not match type of existing ' 'resource type ' '%s' % (name, the_layer_type, existing_type)) logger.info(msg) raise GeoNodeException(msg) logger.debug('Creating vector layer: [%s]', name) ds = create_feature_store(cat, workspace) gs_resource = gs_catalog.publish_featuretype(name, ds, "EPSG:" + str(srid)) # # Step 7. Create the style and assign it to the created resource # # FIXME: Put this in gsconfig.py logger.info('>>> Step 7. Creating style for [%s]' % name) publishing = cat.get_layer(name) create_style() sld = get_sld_for(gs_catalog, publishing) style = None if sld is not None: try: cat.create_style(name, sld) style = cat.get_style(name) except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) logger.warn(msg) e.args = (msg, ) try: cat.create_style(name + '_layer', sld) style = cat.get_style(name + "_layer") except geoserver.catalog.ConflictingDataError as e: style = cat.get_style('point') msg = ('There was already a style named %s in GeoServer, ' 'cannot overwrite: "%s"' % (name, str(e))) logger.error(msg) e.args = (msg, ) # FIXME: Should we use the fully qualified typename? publishing.default_style = style cat.save(publishing) return gs_resource
def test_layer_upload_with_time(self): """ Try uploading a layer and verify that the user can administrate his own layer despite not being a site administrator. """ try: # user without change_layer_style cannot edit it self.assertTrue(self.client.login(username='******', password='******')) # grab bobby bobby = get_user_model().objects.get(username="******") anonymous_group, created = Group.objects.get_or_create( name='anonymous') # Upload to GeoServer saved_layer = geoserver_upload(Layer(), os.path.join( gisdata.GOOD_DATA, 'time/' "boxes_with_date.shp"), bobby, 'boxes_with_date_by_bobby', overwrite=True) # Test that layer owner can wipe GWC Cache ignore_errors = False skip_unadvertised = False skip_geonode_registered = False remove_deleted = True verbosity = 2 owner = bobby workspace = 'geonode' filter = None store = None permissions = { 'users': { "bobby": ['view_resourcebase', 'change_layer_data'] }, 'groups': { anonymous_group: ['view_resourcebase'] }, } gs_slurp(ignore_errors, verbosity=verbosity, owner=owner, workspace=workspace, store=store, filter=filter, skip_unadvertised=skip_unadvertised, skip_geonode_registered=skip_geonode_registered, remove_deleted=remove_deleted, permissions=permissions, execute_signals=True) saved_layer = Layer.objects.get(title='boxes_with_date_by_bobby') check_layer(saved_layer) from lxml import etree from geonode.geoserver.helpers import get_store from geonode.geoserver.signals import gs_catalog self.assertIsNotNone(saved_layer) workspace, name = saved_layer.alternate.split(':') self.assertIsNotNone(workspace) self.assertIsNotNone(name) ws = gs_catalog.get_workspace(workspace) self.assertIsNotNone(ws) store = get_store(gs_catalog, saved_layer.store, workspace=ws) self.assertIsNotNone(store) url = settings.OGC_SERVER['default']['LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] rest_path = 'rest/workspaces/geonode/datastores/{lyr_name}/featuretypes/{lyr_name}.xml'.\ format(lyr_name=name) import requests from requests.auth import HTTPBasicAuth r = requests.get(url + rest_path, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) _log(r.text) featureType = etree.ElementTree(etree.fromstring(r.text)) metadata = featureType.findall('./[metadata]') self.assertEquals(len(metadata), 0) payload = """<featureType> <metadata> <entry key="elevation"> <dimensionInfo> <enabled>false</enabled> </dimensionInfo> </entry> <entry key="time"> <dimensionInfo> <enabled>true</enabled> <attribute>date</attribute> <presentation>LIST</presentation> <units>ISO8601</units> <defaultValue/> <nearestMatchEnabled>false</nearestMatchEnabled> </dimensionInfo> </entry> </metadata></featureType>""" r = requests.put(url + rest_path, data=payload, headers={'Content-type': 'application/xml'}, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) r = requests.get(url + rest_path, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) _log(r.text) featureType = etree.ElementTree(etree.fromstring(r.text)) metadata = featureType.findall('./[metadata]') _log(etree.tostring(metadata[0], encoding='utf8', method='xml')) self.assertEquals(len(metadata), 1) saved_layer.set_default_permissions() from geonode.geoserver.views import get_layer_capabilities capab = get_layer_capabilities(saved_layer, tolerant=True) self.assertIsNotNone(capab) wms_capabilities_url = reverse('capabilities_layer', args=[saved_layer.id]) wms_capabilities_resp = self.client.get(wms_capabilities_url) self.assertTrue(wms_capabilities_resp.status_code, 200) all_times = None if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400: wms_capabilities = wms_capabilities_resp.getvalue() if wms_capabilities: namespaces = { 'wms': 'http://www.opengis.net/wms', 'xlink': 'http://www.w3.org/1999/xlink', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance' } e = etree.fromstring(wms_capabilities) for atype in e.findall( "./[wms:Name='%s']/wms:Dimension[@name='time']" % (saved_layer.alternate), namespaces): dim_name = atype.get('name') if dim_name: dim_name = str(dim_name).lower() if dim_name == 'time': dim_values = atype.text if dim_values: all_times = dim_values.split(",") break self.assertIsNotNone(all_times) self.assertEquals(all_times, [ '2000-03-01T00:00:00.000Z', '2000-03-02T00:00:00.000Z', '2000-03-03T00:00:00.000Z', '2000-03-04T00:00:00.000Z', '2000-03-05T00:00:00.000Z', '2000-03-06T00:00:00.000Z', '2000-03-07T00:00:00.000Z', '2000-03-08T00:00:00.000Z', '2000-03-09T00:00:00.000Z', '2000-03-10T00:00:00.000Z', '2000-03-11T00:00:00.000Z', '2000-03-12T00:00:00.000Z', '2000-03-13T00:00:00.000Z', '2000-03-14T00:00:00.000Z', '2000-03-15T00:00:00.000Z', '2000-03-16T00:00:00.000Z', '2000-03-17T00:00:00.000Z', '2000-03-18T00:00:00.000Z', '2000-03-19T00:00:00.000Z', '2000-03-20T00:00:00.000Z', '2000-03-21T00:00:00.000Z', '2000-03-22T00:00:00.000Z', '2000-03-23T00:00:00.000Z', '2000-03-24T00:00:00.000Z', '2000-03-25T00:00:00.000Z', '2000-03-26T00:00:00.000Z', '2000-03-27T00:00:00.000Z', '2000-03-28T00:00:00.000Z', '2000-03-29T00:00:00.000Z', '2000-03-30T00:00:00.000Z', '2000-03-31T00:00:00.000Z', '2000-04-01T00:00:00.000Z', '2000-04-02T00:00:00.000Z', '2000-04-03T00:00:00.000Z', '2000-04-04T00:00:00.000Z', '2000-04-05T00:00:00.000Z', '2000-04-06T00:00:00.000Z', '2000-04-07T00:00:00.000Z', '2000-04-08T00:00:00.000Z', '2000-04-09T00:00:00.000Z', '2000-04-10T00:00:00.000Z', '2000-04-11T00:00:00.000Z', '2000-04-12T00:00:00.000Z', '2000-04-13T00:00:00.000Z', '2000-04-14T00:00:00.000Z', '2000-04-15T00:00:00.000Z', '2000-04-16T00:00:00.000Z', '2000-04-17T00:00:00.000Z', '2000-04-18T00:00:00.000Z', '2000-04-19T00:00:00.000Z', '2000-04-20T00:00:00.000Z', '2000-04-21T00:00:00.000Z', '2000-04-22T00:00:00.000Z', '2000-04-23T00:00:00.000Z', '2000-04-24T00:00:00.000Z', '2000-04-25T00:00:00.000Z', '2000-04-26T00:00:00.000Z', '2000-04-27T00:00:00.000Z', '2000-04-28T00:00:00.000Z', '2000-04-29T00:00:00.000Z', '2000-04-30T00:00:00.000Z', '2000-05-01T00:00:00.000Z', '2000-05-02T00:00:00.000Z', '2000-05-03T00:00:00.000Z', '2000-05-04T00:00:00.000Z', '2000-05-05T00:00:00.000Z', '2000-05-06T00:00:00.000Z', '2000-05-07T00:00:00.000Z', '2000-05-08T00:00:00.000Z', '2000-05-09T00:00:00.000Z', '2000-05-10T00:00:00.000Z', '2000-05-11T00:00:00.000Z', '2000-05-12T00:00:00.000Z', '2000-05-13T00:00:00.000Z', '2000-05-14T00:00:00.000Z', '2000-05-15T00:00:00.000Z', '2000-05-16T00:00:00.000Z', '2000-05-17T00:00:00.000Z', '2000-05-18T00:00:00.000Z', '2000-05-19T00:00:00.000Z', '2000-05-20T00:00:00.000Z', '2000-05-21T00:00:00.000Z', '2000-05-22T00:00:00.000Z', '2000-05-23T00:00:00.000Z', '2000-05-24T00:00:00.000Z', '2000-05-25T00:00:00.000Z', '2000-05-26T00:00:00.000Z', '2000-05-27T00:00:00.000Z', '2000-05-28T00:00:00.000Z', '2000-05-29T00:00:00.000Z', '2000-05-30T00:00:00.000Z', '2000-05-31T00:00:00.000Z', '2000-06-01T00:00:00.000Z', '2000-06-02T00:00:00.000Z', '2000-06-03T00:00:00.000Z', '2000-06-04T00:00:00.000Z', '2000-06-05T00:00:00.000Z', '2000-06-06T00:00:00.000Z', '2000-06-07T00:00:00.000Z', '2000-06-08T00:00:00.000Z' ]) saved_layer.set_default_permissions() url = reverse('layer_metadata', args=[saved_layer.service_typename]) resp = self.client.get(url) self.assertEquals(resp.status_code, 200) finally: # Clean up and completely delete the layer try: saved_layer.delete() if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import cleanup cleanup(saved_layer.name, saved_layer.uuid) except BaseException: pass
def test_layer_upload_with_time(self): """ Try uploading a layer and verify that the user can administrate his own layer despite not being a site administrator. """ try: # user without change_layer_style cannot edit it self.assertTrue(self.client.login(username='******', password='******')) # grab bobby bobby = get_user_model().objects.get(username="******") anonymous_group, created = Group.objects.get_or_create(name='anonymous') # Upload to GeoServer saved_layer = geoserver_upload( Layer(), os.path.join( gisdata.GOOD_DATA, 'time/' "boxes_with_date.shp"), bobby, 'boxes_with_date_by_bobby', overwrite=True ) # Test that layer owner can wipe GWC Cache ignore_errors = False skip_unadvertised = False skip_geonode_registered = False remove_deleted = True verbosity = 2 owner = bobby workspace = 'geonode' filter = None store = None permissions = { 'users': {"bobby": ['view_resourcebase', 'change_layer_data']}, 'groups': {anonymous_group: ['view_resourcebase']}, } gs_slurp( ignore_errors, verbosity=verbosity, owner=owner, workspace=workspace, store=store, filter=filter, skip_unadvertised=skip_unadvertised, skip_geonode_registered=skip_geonode_registered, remove_deleted=remove_deleted, permissions=permissions, execute_signals=True) saved_layer = Layer.objects.get(title='boxes_with_date_by_bobby') check_layer(saved_layer) from lxml import etree from geonode.geoserver.helpers import get_store from geonode.geoserver.signals import gs_catalog self.assertIsNotNone(saved_layer) workspace, name = saved_layer.alternate.split(':') self.assertIsNotNone(workspace) self.assertIsNotNone(name) ws = gs_catalog.get_workspace(workspace) self.assertIsNotNone(ws) store = get_store(gs_catalog, saved_layer.store, workspace=ws) self.assertIsNotNone(store) url = settings.OGC_SERVER['default']['LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] rest_path = 'rest/workspaces/geonode/datastores/{lyr_name}/featuretypes/{lyr_name}.xml'.\ format(lyr_name=name) import requests from requests.auth import HTTPBasicAuth r = requests.get(url + rest_path, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) _log(r.text) featureType = etree.ElementTree(etree.fromstring(r.text)) metadata = featureType.findall('./[metadata]') self.assertEquals(len(metadata), 0) payload = """<featureType> <metadata> <entry key="elevation"> <dimensionInfo> <enabled>false</enabled> </dimensionInfo> </entry> <entry key="time"> <dimensionInfo> <enabled>true</enabled> <attribute>date</attribute> <presentation>LIST</presentation> <units>ISO8601</units> <defaultValue/> <nearestMatchEnabled>false</nearestMatchEnabled> </dimensionInfo> </entry> </metadata></featureType>""" r = requests.put(url + rest_path, data=payload, headers={ 'Content-type': 'application/xml' }, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) r = requests.get(url + rest_path, auth=HTTPBasicAuth(user, passwd)) self.assertEquals(r.status_code, 200) _log(r.text) featureType = etree.ElementTree(etree.fromstring(r.text)) metadata = featureType.findall('./[metadata]') _log(etree.tostring(metadata[0], encoding='utf8', method='xml')) self.assertEquals(len(metadata), 1) saved_layer.set_default_permissions() from geonode.geoserver.views import get_layer_capabilities capab = get_layer_capabilities(saved_layer, tolerant=True) self.assertIsNotNone(capab) wms_capabilities_url = reverse('capabilities_layer', args=[saved_layer.id]) wms_capabilities_resp = self.client.get(wms_capabilities_url) self.assertTrue(wms_capabilities_resp.status_code, 200) all_times = None if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400: wms_capabilities = wms_capabilities_resp.getvalue() if wms_capabilities: namespaces = {'wms': 'http://www.opengis.net/wms', 'xlink': 'http://www.w3.org/1999/xlink', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'} e = etree.fromstring(wms_capabilities) for atype in e.findall( "./[wms:Name='%s']/wms:Dimension[@name='time']" % (saved_layer.alternate), namespaces): dim_name = atype.get('name') if dim_name: dim_name = str(dim_name).lower() if dim_name == 'time': dim_values = atype.text if dim_values: all_times = dim_values.split(",") break self.assertIsNotNone(all_times) self.assertEquals(all_times, ['2000-03-01T00:00:00.000Z', '2000-03-02T00:00:00.000Z', '2000-03-03T00:00:00.000Z', '2000-03-04T00:00:00.000Z', '2000-03-05T00:00:00.000Z', '2000-03-06T00:00:00.000Z', '2000-03-07T00:00:00.000Z', '2000-03-08T00:00:00.000Z', '2000-03-09T00:00:00.000Z', '2000-03-10T00:00:00.000Z', '2000-03-11T00:00:00.000Z', '2000-03-12T00:00:00.000Z', '2000-03-13T00:00:00.000Z', '2000-03-14T00:00:00.000Z', '2000-03-15T00:00:00.000Z', '2000-03-16T00:00:00.000Z', '2000-03-17T00:00:00.000Z', '2000-03-18T00:00:00.000Z', '2000-03-19T00:00:00.000Z', '2000-03-20T00:00:00.000Z', '2000-03-21T00:00:00.000Z', '2000-03-22T00:00:00.000Z', '2000-03-23T00:00:00.000Z', '2000-03-24T00:00:00.000Z', '2000-03-25T00:00:00.000Z', '2000-03-26T00:00:00.000Z', '2000-03-27T00:00:00.000Z', '2000-03-28T00:00:00.000Z', '2000-03-29T00:00:00.000Z', '2000-03-30T00:00:00.000Z', '2000-03-31T00:00:00.000Z', '2000-04-01T00:00:00.000Z', '2000-04-02T00:00:00.000Z', '2000-04-03T00:00:00.000Z', '2000-04-04T00:00:00.000Z', '2000-04-05T00:00:00.000Z', '2000-04-06T00:00:00.000Z', '2000-04-07T00:00:00.000Z', '2000-04-08T00:00:00.000Z', '2000-04-09T00:00:00.000Z', '2000-04-10T00:00:00.000Z', '2000-04-11T00:00:00.000Z', '2000-04-12T00:00:00.000Z', '2000-04-13T00:00:00.000Z', '2000-04-14T00:00:00.000Z', '2000-04-15T00:00:00.000Z', '2000-04-16T00:00:00.000Z', '2000-04-17T00:00:00.000Z', '2000-04-18T00:00:00.000Z', '2000-04-19T00:00:00.000Z', '2000-04-20T00:00:00.000Z', '2000-04-21T00:00:00.000Z', '2000-04-22T00:00:00.000Z', '2000-04-23T00:00:00.000Z', '2000-04-24T00:00:00.000Z', '2000-04-25T00:00:00.000Z', '2000-04-26T00:00:00.000Z', '2000-04-27T00:00:00.000Z', '2000-04-28T00:00:00.000Z', '2000-04-29T00:00:00.000Z', '2000-04-30T00:00:00.000Z', '2000-05-01T00:00:00.000Z', '2000-05-02T00:00:00.000Z', '2000-05-03T00:00:00.000Z', '2000-05-04T00:00:00.000Z', '2000-05-05T00:00:00.000Z', '2000-05-06T00:00:00.000Z', '2000-05-07T00:00:00.000Z', '2000-05-08T00:00:00.000Z', '2000-05-09T00:00:00.000Z', '2000-05-10T00:00:00.000Z', '2000-05-11T00:00:00.000Z', '2000-05-12T00:00:00.000Z', '2000-05-13T00:00:00.000Z', '2000-05-14T00:00:00.000Z', '2000-05-15T00:00:00.000Z', '2000-05-16T00:00:00.000Z', '2000-05-17T00:00:00.000Z', '2000-05-18T00:00:00.000Z', '2000-05-19T00:00:00.000Z', '2000-05-20T00:00:00.000Z', '2000-05-21T00:00:00.000Z', '2000-05-22T00:00:00.000Z', '2000-05-23T00:00:00.000Z', '2000-05-24T00:00:00.000Z', '2000-05-25T00:00:00.000Z', '2000-05-26T00:00:00.000Z', '2000-05-27T00:00:00.000Z', '2000-05-28T00:00:00.000Z', '2000-05-29T00:00:00.000Z', '2000-05-30T00:00:00.000Z', '2000-05-31T00:00:00.000Z', '2000-06-01T00:00:00.000Z', '2000-06-02T00:00:00.000Z', '2000-06-03T00:00:00.000Z', '2000-06-04T00:00:00.000Z', '2000-06-05T00:00:00.000Z', '2000-06-06T00:00:00.000Z', '2000-06-07T00:00:00.000Z', '2000-06-08T00:00:00.000Z']) saved_layer.set_default_permissions() url = reverse('layer_metadata', args=[saved_layer.service_typename]) resp = self.client.get(url) self.assertEquals(resp.status_code, 200) finally: # Clean up and completely delete the layer try: saved_layer.delete() if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.helpers import cleanup cleanup(saved_layer.name, saved_layer.uuid) except BaseException: pass
def test_save_and_delete_signals(self): """Test that GeoServer Signals methods work as espected""" layers = Layer.objects.all()[:2].values_list('id', flat=True) test_perm_layer = Layer.objects.get(id=layers[0]) self.client.login(username='******', password='******') if check_ogc_backend(geoserver.BACKEND_PACKAGE): from geonode.geoserver.signals import (geoserver_pre_delete, geoserver_post_save, geoserver_post_save_local) # Handle Layer Save and Upload Signals geoserver_post_save(test_perm_layer, sender=Layer) geoserver_post_save_local(test_perm_layer) # Check instance bbox and links self.assertIsNotNone(test_perm_layer.bbox) self.assertIsNotNone(test_perm_layer.srid) self.assertIsNotNone(test_perm_layer.link_set) self.assertEquals(len(test_perm_layer.link_set.all()), 9) # Layer Manipulation from geonode.geoserver.upload import geoserver_upload from geonode.geoserver.signals import gs_catalog from geonode.geoserver.helpers import (check_geoserver_is_up, get_sld_for, fixup_style, set_layer_style, get_store, set_attributes_from_geoserver, set_styles, create_gs_thumbnail, cleanup) check_geoserver_is_up() admin_user = get_user_model().objects.get(username="******") saved_layer = geoserver_upload( test_perm_layer, os.path.join( gisdata.VECTOR_DATA, "san_andres_y_providencia_poi.shp"), admin_user, test_perm_layer.name, overwrite=True ) self.assertIsNotNone(saved_layer) _log(saved_layer) workspace, name = test_perm_layer.alternate.split(':') self.assertIsNotNone(workspace) self.assertIsNotNone(name) ws = gs_catalog.get_workspace(workspace) self.assertIsNotNone(ws) store = get_store(gs_catalog, name, workspace=ws) _log("1. ------------ %s " % store) self.assertIsNotNone(store) # Save layer attributes set_attributes_from_geoserver(test_perm_layer) # Save layer styles set_styles(test_perm_layer, gs_catalog) # set SLD sld = test_perm_layer.default_style.sld_body if test_perm_layer.default_style else None if sld: _log("2. ------------ %s " % sld) set_layer_style(test_perm_layer, test_perm_layer.alternate, sld) fixup_style(gs_catalog, test_perm_layer.alternate, None) self.assertIsNone(get_sld_for(gs_catalog, test_perm_layer)) _log("3. ------------ %s " % get_sld_for(gs_catalog, test_perm_layer)) create_gs_thumbnail(test_perm_layer, overwrite=True) self.assertIsNotNone(test_perm_layer.get_thumbnail_url()) self.assertTrue(test_perm_layer.has_thumbnail()) # Handle Layer Delete Signals geoserver_pre_delete(test_perm_layer, sender=Layer) # Check instance has been removed from GeoServer also from geonode.geoserver.views import get_layer_capabilities self.assertIsNone(get_layer_capabilities(test_perm_layer)) # Cleaning Up test_perm_layer.delete() cleanup(test_perm_layer.name, test_perm_layer.uuid)