def setUp(self): self.files = os.path.join(gisdata.GOOD_DATA, "vector/san_andres_y_providencia_water.shp") self.files_as_dict, self.tmpdir = get_files(self.files) self.cat = gs_catalog self.user = get_user_model().objects.get(username="******") self.sut = create_single_layer("san_andres_y_providencia_water.shp") self.sut.name = 'san_andres_y_providencia_water' self.sut.save() self.geoserver_url = settings.GEOSERVER_LOCATION self.geoserver_manager = GeoServerResourceManager()
def upload(self, fpath, use_url=False, import_id=None): """Try a complete import - create a session and upload the provided file. fpath can be a path to a zip file or the 'main' file if a shapefile or a tiff returns a uploader.api.Session object use_url - if True, will post a URL to geoserver, not the file itself for now, this only works with actual files, not remote urls import_id - if provided, PUT to the endpoint to create the specified id """ files = [ fpath ] if fpath.lower().endswith(".shp"): files = list(set(get_files(fpath).values())) session = self.start_import(import_id) session.upload_task(files, use_url) return session
def upload(self, fpath, use_url=False, import_id=None): """Try a complete import - create a session and upload the provided file. fpath can be a path to a zip file or the 'main' file if a shapefile or a tiff returns a uploader.api.Session object use_url - if True, will post a URL to geoserver, not the file itself for now, this only works with actual files, not remote urls import_id - if provided, PUT to the endpoint to create the specified id """ files = [fpath] if fpath.lower().endswith(".shp"): files = list(set(get_files(fpath).values())) session = self.start_import(import_id) session.upload_task(files, use_url) return session
def test_get_files(self): # Check that a well-formed Shapefile has its components all picked up d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(base="foo.shp", shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf")) finally: if d is not None: shutil.rmtree(d) # Check that a Shapefile missing required components raises an exception d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() self.assertRaises(GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including an SLD with a valid shapefile results in the SLD getting picked up d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.sld"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(base="foo.shp", shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", sld="foo.sld")) finally: if d is not None: shutil.rmtree(d) # Check that capitalized extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(base="foo.SHP", shp="foo.SHP", shx="foo.SHX", prj="foo.PRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that mixed capital and lowercase extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.shx", "foo.pRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict( (k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict(base="foo.SHP", shp="foo.SHP", shx="foo.shx", prj="foo.pRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase extensions raises an exception d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.shp", "foo.shx", "foo.prj", "foo.dbf") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase PRJ (this is special-cased in the implementation) d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.prj") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase SLD (this is special-cased in the implementation) d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.SLD", "foo.sld") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files(os.path.join(d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d)
def test_get_files(self): # Check that a well-formed Shapefile has its components all picked up d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf")) finally: if d is not None: shutil.rmtree(d) # Check that a Shapefile missing required components raises an # exception d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including an SLD with a valid shapefile results in the SLD # getting picked up if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ("foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.sld"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict( shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", sld="foo.sld")) finally: if d is not None: shutil.rmtree(d) # Check that including a QML with a valid shapefile # results in the QML # getting picked up if check_ogc_backend(qgis_server.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() for f in ( "foo.shp", "foo.shx", "foo.prj", "foo.dbf", "foo.qml", "foo.json"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.shp")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals( gotten_files, dict( shp="foo.shp", shx="foo.shx", prj="foo.prj", dbf="foo.dbf", qml="foo.qml", json="foo.json")) finally: if d is not None: shutil.rmtree(d) # Check that capitalized extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.SHP", shx="foo.SHX", prj="foo.PRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that mixed capital and lowercase extensions are ok d = None try: d = tempfile.mkdtemp() for f in ("foo.SHP", "foo.shx", "foo.pRJ", "foo.DBF"): path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() gotten_files = get_files(os.path.join(d, "foo.SHP")) gotten_files = dict((k, v[len(d) + 1:]) for k, v in gotten_files.iteritems()) self.assertEquals(gotten_files, dict(shp="foo.SHP", shx="foo.shx", prj="foo.pRJ", dbf="foo.DBF")) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase extensions raises an # exception d = None try: d = tempfile.mkdtemp() files = ( "foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.shp", "foo.shx", "foo.prj", "foo.dbf") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase PRJ (this is # special-cased in the implementation) d = None try: d = tempfile.mkdtemp() files = ("foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.prj") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d) # Check that including both capital and lowercase SLD (this is # special-cased in the implementation) if check_ogc_backend(geoserver.BACKEND_PACKAGE): d = None try: d = tempfile.mkdtemp() files = ( "foo.SHP", "foo.SHX", "foo.PRJ", "foo.DBF", "foo.SLD", "foo.sld") for f in files: path = os.path.join(d, f) # open and immediately close to create empty file open(path, 'w').close() # Only run the tests if this is a case sensitive OS if len(os.listdir(d)) == len(files): self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.SHP"))) self.assertRaises( GeoNodeException, lambda: get_files( os.path.join( d, "foo.shp"))) finally: if d is not None: shutil.rmtree(d)
def geoserver_upload(layer, base_file, user, name, overwrite=True, title=None, abstract=None, permissions=None, keywords=(), charset='UTF-8'): # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.debug( '>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_layer_type = geoserver_layer_type(base_file) # Get a short handle to the gsconfig geoserver catalog cat = gs_catalog # Ahmed Nour: get workspace by name instead of get default one. workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE) # Check if the store exists in geoserver try: store = get_store(cat, name, workspace=workspace) except geoserver.catalog.FailedRequestError: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # If the store is empty, we just delete it. if len(resources) == 0: cat.delete(store) else: # If our resource is already configured in the store it needs # to have the right resource type for resource in resources: if resource.name == name: msg = 'Name already in use and overwrite is False' assert overwrite, msg existing_type = resource.resource_type if existing_type != the_layer_type: msg = ('Type of uploaded file %s (%s) ' 'does not match type of existing ' 'resource type ' '%s' % (name, the_layer_type, existing_type)) logger.debug(msg) raise GeoNodeException(msg) # Step 3. Identify whether it is vector or raster and which extra files # are needed. logger.debug( '>>> Step 3. Identifying if [%s] is vector or raster and ' 'gathering extra files', name) if the_layer_type == FeatureType.resource_type: logger.debug('Uploading vector layer: [%s]', base_file) if ogc_server_settings.DATASTORE: create_store_and_resource = _create_db_featurestore else: create_store_and_resource = _create_featurestore elif the_layer_type == Coverage.resource_type: logger.debug("Uploading raster layer: [%s]", base_file) create_store_and_resource = _create_coveragestore else: msg = ('The layer type for name %s is %s. It should be ' '%s or %s,' % (name, the_layer_type, FeatureType.resource_type, Coverage.resource_type)) logger.warn(msg) raise GeoNodeException(msg) # Step 4. Create the store in GeoServer logger.debug('>>> Step 4. Starting upload of [%s] to GeoServer...', name) # Get the helper files if they exist files = get_files(base_file) data = files if 'shp' not in files: data = base_file try: store, gs_resource = create_store_and_resource(name, data, charset=charset, overwrite=overwrite, workspace=workspace) except UploadError as e: msg = ('Could not save the layer %s, there was an upload ' 'error: %s' % (name, str(e))) logger.warn(msg) e.args = (msg, ) raise except ConflictingDataError as e: # A datastore of this name already exists msg = ('GeoServer reported a conflict creating a store with name %s: ' '"%s". This should never happen because a brand new name ' 'should have been generated. But since it happened, ' 'try renaming the file or deleting the store in ' 'GeoServer.' % (name, str(e))) logger.warn(msg) e.args = (msg, ) raise else: logger.debug('Finished upload of [%s] to GeoServer without ' 'errors.', name) # Step 5. Create the resource in GeoServer logger.debug( '>>> Step 5. Generating the metadata for [%s] after ' 'successful import to GeoSever', name) # Verify the resource was created if not gs_resource: gs_resource = gs_catalog.get_resource(name=name, workspace=workspace) if not gs_resource: gs_resource = gs_catalog.get_resource(name=name) if not gs_resource: msg = ('GeoNode encountered problems when creating layer %s.' 'It cannot find the Layer that matches this Workspace.' 'try renaming your files.' % name) logger.warn(msg) raise GeoNodeException(msg) assert gs_resource.name == name # Step 6. Make sure our data always has a valid projection logger.debug('>>> Step 6. Making sure [%s] has a valid projection' % name) _native_bbox = None try: _native_bbox = gs_resource.native_bbox except Exception: pass if _native_bbox and len( _native_bbox) >= 5 and _native_bbox[4:5][0] == 'EPSG:4326': box = _native_bbox[:4] minx, maxx, miny, maxy = [float(a) for a in box] if -180 <= round(minx, 5) <= 180 and -180 <= round(maxx, 5) <= 180 and \ -90 <= round(miny, 5) <= 90 and -90 <= round(maxy, 5) <= 90: gs_resource.latlon_bbox = _native_bbox gs_resource.projection = "EPSG:4326" cat.save(gs_resource) else: logger.warning( 'BBOX coordinates outside normal EPSG:4326 values for layer ' '[%s].', name) _native_bbox = [-180, -90, 180, 90, "EPSG:4326"] gs_resource.latlon_bbox = _native_bbox gs_resource.projection = "EPSG:4326" try: cat.save(gs_resource) logger.debug( 'BBOX coordinates forced to [-180, -90, 180, 90] for layer ' '[%s].', name) except Exception as e: logger.exception( 'Error occurred while trying to force BBOX on resource', e) # Step 7. Create the style and assign it to the created resource logger.debug('>>> Step 7. Creating style for [%s]' % name) cat.save(gs_resource) publishing = cat.get_layer(name) or gs_resource if 'sld' in files: f = open(files['sld'], 'r') sld = f.read() f.close() else: sld = get_sld_for(cat, publishing) style = None if sld is not None: try: style = cat.get_style(name, workspace=settings.DEFAULT_WORKSPACE) except geoserver.catalog.FailedRequestError: style = cat.get_style(name) try: overwrite = style or False cat.create_style(name, sld, overwrite=overwrite, raw=True, workspace=settings.DEFAULT_WORKSPACE) cat.reset() except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) logger.warn(msg) e.args = (msg, ) except geoserver.catalog.UploadError as e: msg = ('Error while trying to upload style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) e.args = (msg, ) logger.exception(e) if style is None: try: style = cat.get_style(name, workspace=settings.DEFAULT_WORKSPACE ) or cat.get_style(name) except Exception: try: style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + '_layer') overwrite = style or False cat.create_style(name + '_layer', sld, overwrite=overwrite, raw=True, workspace=settings.DEFAULT_WORKSPACE) cat.reset() style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + '_layer') except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'cannot overwrite: "%s"' % (name, str(e))) logger.warn(msg) e.args = (msg, ) style = cat.get_style(name + "_layer", workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + "_layer") if style is None: style = cat.get_style('point') msg = ('Could not find any suitable style in GeoServer ' 'for Layer: "%s"' % (name)) logger.error(msg) if style: publishing.default_style = style logger.debug('default style set to %s', name) try: cat.save(publishing) except geoserver.catalog.FailedRequestError as e: msg = ( 'Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (publishing, str(e))) e.args = (msg, ) logger.exception(e) # Step 8. Create the Django record for the layer logger.debug('>>> Step 8. Creating Django record for [%s]', name) alternate = workspace.name + ':' + gs_resource.name layer_uuid = str(uuid.uuid1()) defaults = dict(store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=alternate, title=title or gs_resource.title, uuid=layer_uuid, abstract=abstract or gs_resource.abstract or '', owner=user) return name, workspace.name, defaults, gs_resource
def geoserver_upload( layer, base_file, user, name, overwrite=True, title=None, abstract=None, permissions=None, keywords=(), charset='UTF-8'): # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.info('>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_layer_type = geoserver_layer_type(base_file) # Get a short handle to the gsconfig geoserver catalog cat = gs_catalog # Ahmed Nour: get workspace by name instead of get default one. workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE) # Check if the store exists in geoserver try: store = get_store(cat, name, workspace=workspace) except geoserver.catalog.FailedRequestError as e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # If the store is empty, we just delete it. if len(resources) == 0: cat.delete(store) else: # If our resource is already configured in the store it needs # to have the right resource type for resource in resources: if resource.name == name: msg = 'Name already in use and overwrite is False' assert overwrite, msg existing_type = resource.resource_type if existing_type != the_layer_type: msg = ('Type of uploaded file %s (%s) ' 'does not match type of existing ' 'resource type ' '%s' % (name, the_layer_type, existing_type)) logger.info(msg) raise GeoNodeException(msg) # Step 3. Identify whether it is vector or raster and which extra files # are needed. logger.info('>>> Step 3. Identifying if [%s] is vector or raster and ' 'gathering extra files', name) if the_layer_type == FeatureType.resource_type: logger.debug('Uploading vector layer: [%s]', base_file) if ogc_server_settings.DATASTORE: create_store_and_resource = _create_db_featurestore else: create_store_and_resource = _create_featurestore elif the_layer_type == Coverage.resource_type: logger.debug("Uploading raster layer: [%s]", base_file) create_store_and_resource = _create_coveragestore else: msg = ('The layer type for name %s is %s. It should be ' '%s or %s,' % (name, the_layer_type, FeatureType.resource_type, Coverage.resource_type)) logger.warn(msg) raise GeoNodeException(msg) # Step 4. Create the store in GeoServer logger.info('>>> Step 4. Starting upload of [%s] to GeoServer...', name) # Get the helper files if they exist files = get_files(base_file) data = files if 'shp' not in files: data = base_file try: store, gs_resource = create_store_and_resource(name, data, charset=charset, overwrite=overwrite, workspace=workspace) except UploadError as e: msg = ('Could not save the layer %s, there was an upload ' 'error: %s' % (name, str(e))) logger.warn(msg) e.args = (msg,) raise except ConflictingDataError as e: # A datastore of this name already exists msg = ('GeoServer reported a conflict creating a store with name %s: ' '"%s". This should never happen because a brand new name ' 'should have been generated. But since it happened, ' 'try renaming the file or deleting the store in ' 'GeoServer.' % (name, str(e))) logger.warn(msg) e.args = (msg,) raise else: logger.debug('Finished upload of [%s] to GeoServer without ' 'errors.', name) # Step 5. Create the resource in GeoServer logger.info('>>> Step 5. Generating the metadata for [%s] after ' 'successful import to GeoSever', name) # Verify the resource was created if not gs_resource: gs_resource = gs_catalog.get_resource( name, workspace=workspace) if gs_resource is not None: assert gs_resource.name == name else: msg = ('GeoNode encountered problems when creating layer %s.' 'It cannot find the Layer that matches this Workspace.' 'try renaming your files.' % name) logger.warn(msg) raise GeoNodeException(msg) # Step 6. Make sure our data always has a valid projection # FIXME: Put this in gsconfig.py logger.info('>>> Step 6. Making sure [%s] has a valid projection' % name) _native_bbox = None try: _native_bbox = gs_resource.native_bbox except BaseException: pass if _native_bbox and len(_native_bbox) >= 5 and _native_bbox[4:5][0] == 'EPSG:4326': box = _native_bbox[:4] minx, maxx, miny, maxy = [float(a) for a in box] if -180 <= minx <= 180 and -180 <= maxx <= 180 and \ - 90 <= miny <= 90 and -90 <= maxy <= 90: logger.info('GeoServer failed to detect the projection for layer ' '[%s]. Guessing EPSG:4326', name) # If GeoServer couldn't figure out the projection, we just # assume it's lat/lon to avoid a bad GeoServer configuration gs_resource.latlon_bbox = _native_bbox gs_resource.projection = "EPSG:4326" cat.save(gs_resource) else: msg = ('GeoServer failed to detect the projection for layer ' '[%s]. It doesn\'t look like EPSG:4326, so backing out ' 'the layer.') logger.info(msg, name) cascading_delete(cat, name) raise GeoNodeException(msg % name) # Step 7. Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py logger.info('>>> Step 7. Creating style for [%s]' % name) cat.save(gs_resource) cat.reload() publishing = cat.get_layer(name) or gs_resource if 'sld' in files: f = open(files['sld'], 'r') sld = f.read() f.close() else: sld = get_sld_for(cat, publishing) style = None if sld is not None: try: style = cat.get_style(name, workspace=settings.DEFAULT_WORKSPACE) or cat.get_style(name) overwrite = style or False cat.create_style(name, sld, overwrite=overwrite, raw=True, workspace=settings.DEFAULT_WORKSPACE) except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) logger.warn(msg) e.args = (msg,) except geoserver.catalog.UploadError as e: msg = ('Error while trying to upload style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) e.args = (msg,) logger.exception(e) if style is None: try: style = cat.get_style(name, workspace=settings.DEFAULT_WORKSPACE) or cat.get_style(name) overwrite = style or False cat.create_style(name, sld, overwrite=overwrite, raw=True, workspace=settings.DEFAULT_WORKSPACE) except BaseException: try: style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + '_layer') overwrite = style or False cat.create_style(name + '_layer', sld, overwrite=overwrite, raw=True, workspace=settings.DEFAULT_WORKSPACE) style = cat.get_style(name + '_layer', workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + '_layer') except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'cannot overwrite: "%s"' % (name, str(e))) logger.warn(msg) e.args = (msg,) style = cat.get_style(name + "_layer", workspace=settings.DEFAULT_WORKSPACE) or \ cat.get_style(name + "_layer") if style is None: style = cat.get_style('point') msg = ('Could not find any suitable style in GeoServer ' 'for Layer: "%s"' % (name)) logger.error(msg) if style: publishing.default_style = style logger.info('default style set to %s', name) try: cat.save(publishing) except geoserver.catalog.FailedRequestError as e: msg = ('Error while trying to save resource named %s in GeoServer, ' 'try to use: "%s"' % (publishing, str(e))) e.args = (msg,) logger.exception(e) # Step 10. Create the Django record for the layer logger.info('>>> Step 10. Creating Django record for [%s]', name) # FIXME: Do this inside the layer object alternate = workspace.name + ':' + gs_resource.name layer_uuid = str(uuid.uuid1()) defaults = dict(store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=alternate, title=title or gs_resource.title, uuid=layer_uuid, abstract=abstract or gs_resource.abstract or '', owner=user) return name, workspace.name, defaults, gs_resource
def geoserver_upload(dataset, base_file, user, name, overwrite=True, title=None, abstract=None, permissions=None, keywords=(), charset='UTF-8'): # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.debug( '>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_dataset_type = geoserver_dataset_type(base_file) # Get a short handle to the gsconfig geoserver catalog cat = gs_catalog # Ahmed Nour: get workspace by name instead of get default one. workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE) # Check if the store exists in geoserver try: store = get_store(cat, name, workspace=workspace) except geoserver.catalog.FailedRequestError: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = cat.get_resources(names=[name], stores=[store], workspaces=[workspace]) if len(resources) > 0: # If our resource is already configured in the store it needs # to have the right resource type for resource in resources: if resource.name == name: msg = 'Name already in use and overwrite is False' assert overwrite, msg existing_type = resource.resource_type if existing_type != the_dataset_type: msg = ( f'Type of uploaded file {name} ({the_dataset_type}) ' 'does not match type of existing ' f'resource type {existing_type}') logger.debug(msg) raise GeoNodeException(msg) # Step 3. Identify whether it is vector or raster and which extra files # are needed. logger.debug( '>>> Step 3. Identifying if [%s] is vector or raster and ' 'gathering extra files', name) if the_dataset_type == FeatureType.resource_type: logger.debug('Uploading vector layer: [%s]', base_file) if ogc_server_settings.DATASTORE: create_store_and_resource = _create_db_featurestore else: create_store_and_resource = _create_featurestore elif the_dataset_type == Coverage.resource_type: logger.debug("Uploading raster layer: [%s]", base_file) create_store_and_resource = _create_coveragestore else: msg = ( f'The layer type for name {name} is {the_dataset_type}. It should be ' f'{FeatureType.resource_type} or {Coverage.resource_type},') logger.warn(msg) raise GeoNodeException(msg) # Step 4. Create the store in GeoServer logger.debug('>>> Step 4. Starting upload of [%s] to GeoServer...', name) # Get the helper files if they exist files, _tmpdir = get_files(base_file) data = files if 'shp' not in files: data = base_file try: store, gs_resource = create_store_and_resource(name, data, charset=charset, overwrite=overwrite, workspace=workspace) except UploadError as e: msg = (f'Could not save the layer {name}, there was an upload ' f'error: {e}') logger.warn(msg) e.args = (msg, ) raise except ConflictingDataError as e: # A datastore of this name already exists msg = ( f'GeoServer reported a conflict creating a store with name {name}: ' f'"{e}". This should never happen because a brand new name ' 'should have been generated. But since it happened, ' 'try renaming the file or deleting the store in GeoServer.') logger.warn(msg) e.args = (msg, ) raise finally: if _tmpdir is not None: shutil.rmtree(_tmpdir, ignore_errors=True) logger.debug(f'Finished upload of {name} to GeoServer without errors.') # Step 5. Create the resource in GeoServer logger.debug( f'>>> Step 5. Generating the metadata for {name} after successful import to GeoSever' ) # Verify the resource was created if not gs_resource: gs_resource = gs_catalog.get_resource(name=name, workspace=workspace) if not gs_resource: msg = f'GeoNode encountered problems when creating layer {name}.It cannot find the Dataset that matches this Workspace.try renaming your files.' logger.warn(msg) raise GeoNodeException(msg) assert gs_resource.name == name # Step 6. Make sure our data always has a valid projection logger.debug(f'>>> Step 6. Making sure [{name}] has a valid projection') _native_bbox = None try: _native_bbox = gs_resource.native_bbox except Exception: pass if _native_bbox and len( _native_bbox) >= 5 and _native_bbox[4:5][0] == 'EPSG:4326': box = _native_bbox[:4] minx, maxx, miny, maxy = [float(a) for a in box] if -180 <= round(minx, 5) <= 180 and -180 <= round(maxx, 5) <= 180 and \ -90 <= round(miny, 5) <= 90 and -90 <= round(maxy, 5) <= 90: gs_resource.latlon_bbox = _native_bbox gs_resource.projection = "EPSG:4326" else: logger.warning( 'BBOX coordinates outside normal EPSG:4326 values for layer ' '[%s].', name) _native_bbox = [-180, -90, 180, 90, "EPSG:4326"] gs_resource.latlon_bbox = _native_bbox gs_resource.projection = "EPSG:4326" logger.debug( 'BBOX coordinates forced to [-180, -90, 180, 90] for layer [%s].', name) # Step 7. Create the style and assign it to the created resource logger.debug(f'>>> Step 7. Creating style for [{name}]') cat.save(gs_resource) publishing = cat.get_layer(name) or gs_resource sld = None try: if 'sld' in files: with open(files['sld'], 'rb') as f: sld = f.read() else: sld = get_sld_for(cat, dataset) except Exception as e: logger.exception(e) style = None if sld: try: style = cat.get_style(name, workspace=workspace) except geoserver.catalog.FailedRequestError: style = cat.get_style(name) try: overwrite = style or False cat.create_style(name, sld, overwrite=overwrite, raw=True, workspace=workspace) cat.reset() except geoserver.catalog.ConflictingDataError as e: msg = ( f'There was already a style named {name}_dataset in GeoServer, ' f'try to use: "{e}"') logger.warn(msg) e.args = (msg, ) except geoserver.catalog.UploadError as e: msg = ( f'Error while trying to upload style named {name}_dataset in GeoServer, ' f'try to use: "{e}"') e.args = (msg, ) logger.exception(e) if style is None: try: style = cat.get_style( name, workspace=workspace) or cat.get_style(name) except Exception as e: style = cat.get_style('point') msg = f'Could not find any suitable style in GeoServer for Dataset: "{name}"' e.args = (msg, ) logger.exception(e) if style: publishing.default_style = style logger.debug('default style set to %s', name) try: cat.save(publishing) except geoserver.catalog.FailedRequestError as e: msg = ( f'Error while trying to save resource named {publishing} in GeoServer, ' f'try to use: "{e}"') e.args = (msg, ) logger.exception(e) # Step 8. Create the Django record for the layer logger.debug('>>> Step 8. Creating Django record for [%s]', name) alternate = f"{workspace.name}:{gs_resource.name}" dataset_uuid = str(uuid.uuid1()) defaults = dict(store=gs_resource.store.name, subtype=gs_resource.store.resource_type, alternate=alternate, title=title or gs_resource.title, uuid=dataset_uuid, abstract=abstract or gs_resource.abstract or '', owner=user) return name, workspace.name, defaults, gs_resource
def dataset_append_replace_view(request, layername, template, action_type): try: layer = _resolve_dataset(request, layername, 'base.change_resourcebase', _PERMISSION_MSG_MODIFY) except PermissionDenied: return HttpResponse("Not allowed", status=403) except Exception: raise Http404("Not found") if not layer: raise Http404("Not found") if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'resource': layer, 'is_featuretype': layer.is_vector(), 'is_dataset': True, } return render(request, template, context=ctx) elif request.method == 'POST': form = LayerUploadForm(request.POST, request.FILES) out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() files, _tmpdir = get_files(base_file) # validate input source resource_is_valid = validate_input_source( layer=layer, filename=base_file, files=files, action_type=action_type) out = {} if resource_is_valid: getattr(resource_manager, action_type)(layer, vals={ 'files': list(files.values()), 'user': request.user }) out['success'] = True out['url'] = layer.get_absolute_url() # invalidating resource chache set_geowebcache_invalidate_cache(layer.typename) except Exception as e: logger.exception(e) out['success'] = False out['errors'] = str(e) finally: if tempdir is not None: shutil.rmtree(tempdir, ignore_errors=True) if _tmpdir is not None: shutil.rmtree(_tmpdir, ignore_errors=True) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 register_event(request, 'change', layer) else: status_code = 400 return HttpResponse(json.dumps(out), content_type='application/json', status=status_code)
create_store_and_resource = _create_featurestore elif the_layer_type == Coverage.resource_type: logger.debug("Uploading raster layer: [%s]", base_file) create_store_and_resource = _create_coveragestore else: msg = ('The layer type for name %s is %s. It should be ' '%s or %s,' % (name, the_layer_type, FeatureType.resource_type, Coverage.resource_type)) logger.warn(msg) raise GeoNodeException(msg) # Step 4. Create the store in GeoServer logger.info('>>> Step 4. Starting upload of [%s] to GeoServer...', name) # Get the helper files if they exist files = get_files(base_file) data = files #FIXME: DONT DO THIS #------------------- if 'shp' not in files: if files['base'][-4:] == ".zip": _rename_zip(files['base'], name) main_file = files['base'] data = main_file # ------------------ try: store, gs_resource = create_store_and_resource(name, data,
logger.debug("Uploading raster layer: [%s]", base_file) create_store_and_resource = _create_coveragestore else: msg = ('The layer type for name %s is %s. It should be ' '%s or %s,' % (name, the_layer_type, FeatureType.resource_type, Coverage.resource_type)) logger.warn(msg) raise GeoNodeException(msg) # Step 4. Create the store in GeoServer logger.info('>>> Step 4. Starting upload of [%s] to GeoServer...', name) # Get the helper files if they exist files = get_files(base_file) data = files if 'shp' not in files: data = base_file try: store, gs_resource = create_store_and_resource(name, data, charset=charset, overwrite=overwrite) except UploadError, e: msg = ('Could not save the layer %s, there was an upload ' 'error: %s' % (name, str(e))) logger.warn(msg)