def handle(self, layer, layer_config, *args, **kwargs): """Update metadata from XML """ geonode_layer = Layer.objects.get(name=layer) path = os.path.join(UPLOAD_DIR, str(self.importer.upload_file.upload.id)) xmlfile = os.path.join(path, layer_config.get('metadata')) geonode_layer.metadata_uploaded = True identifier, vals, regions, keywords = set_metadata(open(xmlfile).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: geonode_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) geonode_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(geonode_layer, key, value) geonode_layer.save() return geonode_layer
def handle(self, layer, layer_config, *args, **kwargs): """Update metadata from XML """ geonode_layer = Layer.objects.get(name=layer) path = os.path.join(UPLOAD_DIR, str(self.importer.upload_file.upload.id)) xmlfile = os.path.join(path, layer_config.get('metadata')) geonode_layer.metadata_uploaded = True identifier, vals, regions, keywords = set_metadata( open(xmlfile).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: geonode_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) geonode_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(geonode_layer, key, value) geonode_layer.save() return geonode_layer
def _enrich_layer_metadata(self, geonode_layer): url = urlsplit(self.url) if url.scheme == 'https': conn = HTTPSConnection(url.hostname, url.port) else: conn = HTTPConnection(url.hostname, url.port) workspace, layername = geonode_layer.name.split( ":") if ":" in geonode_layer.name else (None, geonode_layer.name) conn.request('GET', '/api/layers/?name=%s' % layername, '', {}) response = conn.getresponse() content = response.read() status = response.status content_type = response.getheader("Content-Type", "text/plain") if status == 200 and 'application/json' == content_type: try: _json_obj = json.loads(content) if _json_obj['meta']['total_count'] == 1: _layer = _json_obj['objects'][0] if _layer: r_fields = {} # Update plain fields for field in GeoNodeServiceHandler.LAYER_FIELDS: if field in _layer and _layer[field]: r_fields[field] = _layer[field] if r_fields: Layer.objects.filter( id=geonode_layer.id).update( **r_fields) geonode_layer.refresh_from_db() # Update Thumbnail if "thumbnail_url" in _layer and _layer["thumbnail_url"]: thumbnail_remote_url = _layer["thumbnail_url"] _url = urlsplit(thumbnail_remote_url) if not _url.scheme: thumbnail_remote_url = "{}{}".format( geonode_layer.remote_service.service_url, _url.path) resp, image = http_client.request( thumbnail_remote_url) if 'ServiceException' in image or \ resp.status < 200 or resp.status > 299: msg = 'Unable to obtain thumbnail: %s' % image logger.debug(msg) # Replace error message with None. image = None if image is not None: thumbnail_name = 'layer-%s-thumb.png' % geonode_layer.uuid geonode_layer.save_thumbnail( thumbnail_name, image=image) else: self._create_layer_thumbnail(geonode_layer) # Add Keywords if "keywords" in _layer and _layer["keywords"]: keywords = _layer["keywords"] if keywords: geonode_layer.keywords.clear() geonode_layer.keywords.add(*keywords) # Add Regions if "regions" in _layer and _layer["regions"]: (regions_resolved, regions_unresolved) = resolve_regions( _layer["regions"]) if regions_resolved: geonode_layer.regions.clear() geonode_layer.regions.add(*regions_resolved) # Add Topic Category if "category__gn_description" in _layer and _layer["category__gn_description"]: try: categories = TopicCategory.objects.filter( Q(gn_description__iexact=_layer["category__gn_description"])) if categories: geonode_layer.category = categories[0] except BaseException: traceback.print_exc() except BaseException: traceback.print_exc() finally: geonode_layer.save()
def final_step(upload_session, user): from geonode.geoserver.helpers import get_sld_for import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) import_session = import_session.reload() upload_session.import_session = import_session # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = gs_catalog cat._cache.clear() # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] # @todo see above in save_step, regarding computed unique name name = task.layer.name _log('Getting from catalog [%s]', name) publishing = cat.get_layer(name) if not publishing: raise LayerNotReady("Expected to find layer named '%s' in geoserver" % name) _log('Creating style for [%s]', name) # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] f = open(sld_file, 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) if sld is not None: try: cat.create_style(name, sld) except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % ( name, str(e)) # what are we doing with this var? # style = cat.get_style(name) logger.warn(msg) e.args = (msg,) # FIXME: Should we use the fully qualified typename? publishing.default_style = cat.get_style(name) _log('default style set to %s', name) cat.save(publishing) _log('Creating Django record for [%s]', name) target = task.target typename = task.get_target_layer_name() layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # @todo hacking - any cached layers might cause problems (maybe # delete hook on layer should fix this?) cat._cache.clear() defaults = dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,) _log('record defaults: %s', defaults) saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=defaults ) # Should we throw a clearer error here? assert saved_layer is not None # @todo if layer was not created, need to ensure upload target is # same as existing target _log('layer was created : %s', created) if created: saved_layer.set_default_permissions() # Create the points of contact records for the layer _log('Creating points of contact records for [%s]', name) saved_layer.poc = user saved_layer.metadata_author = user # look for xml xml_file = upload_session.base_file[0].xml_files if xml_file: saved_layer.metadata_uploaded = True # get model properties from XML vals, regions, keywords = set_metadata(open(xml_file[0]).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: saved_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) saved_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(saved_layer, key, value) saved_layer.save() # Set default permissions on the newly created layer # FIXME: Do this as part of the post_save hook permissions = upload_session.permissions _log('Setting default permissions for [%s]', name) if permissions is not None: saved_layer.set_permissions(permissions) if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) upload = Upload.objects.get(import_id=import_session.id) upload.layer = saved_layer upload.complete = True upload.save() if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) signals.upload_complete.send(sender=final_step, layer=saved_layer) return saved_layer
def _enrich_layer_metadata(self, geonode_layer): workspace, layername = geonode_layer.name.split( ":") if ":" in geonode_layer.name else (None, geonode_layer.name) url = urlsplit(self.url) base_url = '%s://%s/' % (url.scheme, url.netloc) response = requests.get('%sapi/layers/?name=%s' % (base_url, layername), {}, timeout=10, verify=False) content = response.content status = response.status_code content_type = response.headers['Content-Type'] if status == 200 and 'application/json' == content_type: try: if isinstance(content, bytes): content = content.decode('UTF-8') _json_obj = json.loads(content) if _json_obj['meta']['total_count'] == 1: _layer = _json_obj['objects'][0] if _layer: r_fields = {} # Update plain fields for field in GeoNodeServiceHandler.LAYER_FIELDS: if field in _layer and _layer[field]: r_fields[field] = _layer[field] if r_fields: Layer.objects.filter(id=geonode_layer.id).update( **r_fields) geonode_layer.refresh_from_db() # Update Thumbnail if "thumbnail_url" in _layer and _layer[ "thumbnail_url"]: thumbnail_remote_url = _layer["thumbnail_url"] _url = urlsplit(thumbnail_remote_url) if not _url.scheme: thumbnail_remote_url = "{}{}".format( geonode_layer.remote_service.service_url, _url.path) resp, image = http_client.request( thumbnail_remote_url) if 'ServiceException' in str(image) or \ resp.status_code < 200 or resp.status_code > 299: msg = 'Unable to obtain thumbnail: %s' % image logger.debug(msg) # Replace error message with None. image = None if image is not None: thumbnail_name = 'layer-%s-thumb.png' % geonode_layer.uuid geonode_layer.save_thumbnail(thumbnail_name, image=image) else: self._create_layer_thumbnail(geonode_layer) else: self._create_layer_thumbnail(geonode_layer) # Add Keywords if "keywords" in _layer and _layer["keywords"]: keywords = _layer["keywords"] if keywords: geonode_layer.keywords.clear() geonode_layer.keywords.add(*keywords) # Add Regions if "regions" in _layer and _layer["regions"]: (regions_resolved, regions_unresolved) = resolve_regions( _layer["regions"]) if regions_resolved: geonode_layer.regions.clear() geonode_layer.regions.add(*regions_resolved) # Add Topic Category if "category__gn_description" in _layer and _layer[ "category__gn_description"]: try: categories = TopicCategory.objects.filter( Q(gn_description__iexact=_layer[ "category__gn_description"])) if categories: geonode_layer.category = categories[0] except Exception: traceback.print_exc() except Exception: traceback.print_exc() finally: try: geonode_layer.save(notify=True) except Exception as e: logger.error(e)
def final_step(upload_session, user): from geonode.geoserver.helpers import get_sld_for import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) import_session = import_session.reload() upload_session.import_session = import_session # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = gs_catalog cat._cache.clear() # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] # @todo see above in save_step, regarding computed unique name name = task.layer.name _log('Getting from catalog [%s]', name) publishing = cat.get_layer(name) if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': raise Exception('unknown item state: %s' % task.state) elif import_session.state == 'PENDING': if task.state == 'READY' and task.data.format != 'Shapefile': import_session.commit() if not publishing: raise LayerNotReady("Expected to find layer named '%s' in geoserver" % name) _log('Creating style for [%s]', name) # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] f = open(sld_file, 'r') sld = f.read() f.close() else: sld = get_sld_for(cat, publishing) style = None if sld is not None: try: cat.create_style(name, sld) except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, try using another name: "%s"' % ( name, str(e)) try: cat.create_style(name + '_layer', sld) except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % ( name, str(e)) logger.error(msg) e.args = (msg,) if style is None: try: style = cat.get_style(name) except: logger.warn('Could not retreive the Layer default Style name') # what are we doing with this var? msg = 'No style could be created for the layer, falling back to POINT default one' try: style = cat.get_style(name + '_layer') except: style = cat.get_style('point') logger.warn(msg) e.args = (msg,) if style: publishing.default_style = style _log('default style set to %s', name) cat.save(publishing) _log('Creating Django record for [%s]', name) target = task.target typename = task.get_target_layer_name() layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # @todo hacking - any cached layers might cause problems (maybe # delete hook on layer should fix this?) cat._cache.clear() # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: if upload_session.mosaic: import pytz import datetime from geonode.layers.models import TIME_REGEX_FORMAT # llbbox = publishing.resource.latlon_bbox start = None end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_time = True start = datetime.datetime.strptime(upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start else: has_time = False if not upload_session.append_to_mosaic_opts: saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,), temporal_extent_start=start, temporal_extent_end=end, is_mosaic=True, has_time=has_time, has_elevation=False, time_regex=upload_session.mosaic_time_regex ) else: # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name) # created = False saved_layer, created = Layer.objects.get_or_create(name=upload_session.append_to_mosaic_name) try: if saved_layer.temporal_extent_start and end: if pytz.utc.localize(saved_layer.temporal_extent_start, is_dst=False) < end: saved_layer.temporal_extent_end = end Layer.objects.filter(name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_layer.temporal_extent_start = end Layer.objects.filter(name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log('There was an error updating the mosaic temporal extent: ' + str(e)) else: saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,) ) # Should we throw a clearer error here? assert saved_layer is not None # @todo if layer was not created, need to ensure upload target is # same as existing target _log('layer was created : %s', created) if created: saved_layer.set_default_permissions() # Create the points of contact records for the layer _log('Creating points of contact records for [%s]', name) saved_layer.poc = user saved_layer.metadata_author = user # look for xml xml_file = upload_session.base_file[0].xml_files if xml_file: saved_layer.metadata_uploaded = True # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r') zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file[0] = os.path.dirname(archive) + '/' + xml_file[0] identifier, vals, regions, keywords = set_metadata(open(xml_file[0]).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: saved_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) saved_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(saved_layer, key, value) saved_layer.save() # Set default permissions on the newly created layer # FIXME: Do this as part of the post_save hook permissions = upload_session.permissions if created and permissions is not None: _log('Setting default permissions for [%s]', name) saved_layer.set_permissions(permissions) if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) upload = Upload.objects.get(import_id=import_session.id) upload.layer = saved_layer upload.complete = True upload.save() if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) signals.upload_complete.send(sender=final_step, layer=saved_layer) return saved_layer
def geoserver_finalize_upload(self, import_id, instance_id, permissions, created, xml_file, sld_file, sld_uploaded, tempdir): """ Finalize Layer and GeoServer configuration: - Sets Layer Metadata from XML and updates GeoServer Layer accordingly. - Sets Default Permissions """ instance = None try: instance = Layer.objects.get(id=instance_id) except Layer.DoesNotExist: logger.debug(f"Layer id {instance_id} does not exist yet!") raise lock_id = f'{self.request.id}' with AcquireLock(lock_id) as lock: if lock.acquire() is True: from geonode.upload.models import Upload upload = Upload.objects.get(import_id=import_id) upload.layer = instance upload.save() # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, six.string_types): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): instance.metadata_uploaded = True identifier, vals, regions, keywords = set_metadata( open(xml_file).read()) try: gs_resource = gs_catalog.get_resource( name=instance.name, store=instance.store, workspace=instance.workspace) except Exception: try: gs_resource = gs_catalog.get_resource( name=instance.alternate, store=instance.store, workspace=instance.workspace) except Exception: try: gs_resource = gs_catalog.get_resource( name=instance.alternate or instance.typename) except Exception: gs_resource = None if vals: title = vals.get('title', '') abstract = vals.get('abstract', '') # Updating GeoServer resource gs_resource.title = title gs_resource.abstract = abstract gs_catalog.save(gs_resource) else: vals = {} vals.update( dict(uuid=instance.uuid, name=instance.name, owner=instance.owner, store=gs_resource.store.name, storeType=gs_resource.store.resource_type, alternate=gs_resource.store.workspace.name + ':' + gs_resource.name, title=gs_resource.title or gs_resource.store.name, abstract=gs_resource.abstract or '')) instance.metadata_xml = xml_file regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # Assign the regions (needs to be done after saving) regions_resolved = list(set(regions_resolved)) if regions_resolved: if len(regions_resolved) > 0: if not instance.regions: instance.regions = regions_resolved else: instance.regions.clear() instance.regions.add(*regions_resolved) # Assign the keywords (needs to be done after saving) keywords = list(set(keywords)) if keywords: if len(keywords) > 0: if not instance.keywords: instance.keywords = keywords else: instance.keywords.add(*keywords) # set model properties defaults = {} for key, value in vals.items(): if key == 'spatial_representation_type': value = SpatialRepresentationType(identifier=value) elif key == 'topic_category': value, created = TopicCategory.objects.get_or_create( identifier=value.lower(), defaults={ 'description': '', 'gn_description': value }) key = 'category' defaults[key] = value else: defaults[key] = value # Save all the modified information in the instance without triggering signals. try: if not defaults.get('title', title): defaults['title'] = instance.title or instance.name if not defaults.get('abstract', abstract): defaults['abstract'] = instance.abstract or '' to_update = {} to_update['charset'] = defaults.pop( 'charset', instance.charset) to_update['storeType'] = defaults.pop( 'storeType', instance.storeType) for _key in ('name', 'workspace', 'store', 'storeType', 'alternate', 'typename'): if _key in defaults: to_update[_key] = defaults.pop(_key) else: to_update[_key] = getattr(instance, _key) to_update.update(defaults) with transaction.atomic(): ResourceBase.objects.filter( id=instance.resourcebase_ptr.id).update(**defaults) Layer.objects.filter(id=instance.id).update( **to_update) # Refresh from DB instance.refresh_from_db() except IntegrityError: raise if sld_uploaded: geoserver_set_style(instance.id, sld_file) else: geoserver_create_style(instance.id, instance.name, sld_file, tempdir) logger.debug( 'Finalizing (permissions and notifications) Layer {0}'.format( instance)) instance.handle_moderated_uploads() if permissions is not None: logger.debug( f'Setting permissions {permissions} for {instance.name}') instance.set_permissions(permissions, created=created) elif created: logger.debug( f'Setting default permissions for {instance.name}') instance.set_default_permissions() try: # Update the upload sessions geonode_upload_sessions = UploadSession.objects.filter( resource=instance) geonode_upload_sessions.update(processed=False) instance.upload_session = geonode_upload_sessions.first() except Exception as e: logger.exception(e) instance.save(notify=not created) try: logger.debug( f"... Cleaning up the temporary folders {tempdir}") if tempdir and os.path.exists(tempdir): shutil.rmtree(tempdir) finally: upload.complete = True upload.save() signals.upload_complete.send(sender=geoserver_finalize_upload, layer=instance)
def final_step(upload_session, user): from geonode.geoserver.helpers import get_sld_for import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) import_session = import_session.reload() upload_session.import_session = import_session # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = gs_catalog cat._cache.clear() # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] # @todo see above in save_step, regarding computed unique name name = task.layer.name _log('Getting from catalog [%s]', name) publishing = cat.get_layer(name) if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': raise Exception('unknown item state: %s' % task.state) elif import_session.state == 'PENDING': if task.state == 'READY' and task.data.format != 'Shapefile': import_session.commit() if not publishing: raise LayerNotReady("Expected to find layer named '%s' in geoserver" % name) _log('Creating style for [%s]', name) # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] f = open(sld_file, 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) style = None print " **************************************** " if sld is not None: try: cat.create_style(name, sld) style = cat.get_style(name) except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, try using another name: "%s"' % ( name, str(e)) try: cat.create_style(name + '_layer', sld) style = cat.get_style(name + '_layer') except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % ( name, str(e)) logger.error(msg) e.args = (msg,) # what are we doing with this var? msg = 'No style could be created for the layer, falling back to POINT default one' style = cat.get_style('point') logger.warn(msg) e.args = (msg,) # FIXME: Should we use the fully qualified typename? publishing.default_style = style _log('default style set to %s', name) cat.save(publishing) _log('Creating Django record for [%s]', name) target = task.target typename = task.get_target_layer_name() layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # @todo hacking - any cached layers might cause problems (maybe # delete hook on layer should fix this?) cat._cache.clear() # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: if upload_session.mosaic: import pytz import datetime from geonode.layers.models import TIME_REGEX_FORMAT # llbbox = publishing.resource.latlon_bbox start = None end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_time = True start = datetime.datetime.strptime(upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start else: has_time = False if not upload_session.append_to_mosaic_opts: saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,), temporal_extent_start=start, temporal_extent_end=end, is_mosaic=True, has_time=has_time, has_elevation=False, time_regex=upload_session.mosaic_time_regex ) else: # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name) # created = False saved_layer, created = Layer.objects.get_or_create(name=upload_session.append_to_mosaic_name) try: if saved_layer.temporal_extent_start and end: if pytz.utc.localize(saved_layer.temporal_extent_start, is_dst=False) < end: saved_layer.temporal_extent_end = end Layer.objects.filter(name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_layer.temporal_extent_start = end Layer.objects.filter(name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log('There was an error updating the mosaic temporal extent: ' + str(e)) else: saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,) ) # Should we throw a clearer error here? assert saved_layer is not None # @todo if layer was not created, need to ensure upload target is # same as existing target _log('layer was created : %s', created) if created: saved_layer.set_default_permissions() # Create the points of contact records for the layer _log('Creating points of contact records for [%s]', name) saved_layer.poc = user saved_layer.metadata_author = user # look for xml xml_file = upload_session.base_file[0].xml_files if xml_file: saved_layer.metadata_uploaded = True # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r') zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file[0] = os.path.dirname(archive) + '/' + xml_file[0] identifier, vals, regions, keywords = set_metadata(open(xml_file[0]).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: saved_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) saved_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(saved_layer, key, value) saved_layer.save() # Set default permissions on the newly created layer # FIXME: Do this as part of the post_save hook permissions = upload_session.permissions if created and permissions is not None: _log('Setting default permissions for [%s]', name) saved_layer.set_permissions(permissions) if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) upload = Upload.objects.get(import_id=import_session.id) upload.layer = saved_layer upload.complete = True upload.save() if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) signals.upload_complete.send(sender=final_step, layer=saved_layer) return saved_layer
def final_step(upload_session, user): from geonode.geoserver.helpers import get_sld_for import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) import_session = import_session.reload() upload_session.import_session = import_session # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = gs_catalog cat._cache.clear() # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] # @todo see above in save_step, regarding computed unique name name = task.layer.name _log('Getting from catalog [%s]', name) publishing = cat.get_layer(name) if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': raise Exception('unknown item state: %s' % task.state) elif import_session.state == 'PENDING': if task.state == 'READY' and task.data.format != 'Shapefile': import_session.commit() if not publishing: raise LayerNotReady("Expected to find layer named '%s' in geoserver" % name) _log('Creating style for [%s]', name) # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] f = open(sld_file, 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) if sld is not None: try: cat.create_style(name, sld) except geoserver.catalog.ConflictingDataError as e: msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % ( name, str(e)) # what are we doing with this var? # style = cat.get_style(name) logger.warn(msg) e.args = (msg,) # FIXME: Should we use the fully qualified typename? publishing.default_style = cat.get_style(name) _log('default style set to %s', name) cat.save(publishing) _log('Creating Django record for [%s]', name) target = task.target typename = task.get_target_layer_name() layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # @todo hacking - any cached layers might cause problems (maybe # delete hook on layer should fix this?) cat._cache.clear() defaults = dict(store=target.name, storeType=target.store_type, typename=typename, workspace=target.workspace_name, title=title, uuid=layer_uuid, abstract=abstract or '', owner=user,) _log('record defaults: %s', defaults) saved_layer, created = Layer.objects.get_or_create( name=task.layer.name, defaults=defaults ) # Should we throw a clearer error here? assert saved_layer is not None # @todo if layer was not created, need to ensure upload target is # same as existing target _log('layer was created : %s', created) if created: saved_layer.set_default_permissions() # Create the points of contact records for the layer _log('Creating points of contact records for [%s]', name) saved_layer.poc = user saved_layer.metadata_author = user # look for xml xml_file = upload_session.base_file[0].xml_files if xml_file: saved_layer.metadata_uploaded = True # get model properties from XML vals, regions, keywords = set_metadata(open(xml_file[0]).read()) regions_resolved, regions_unresolved = resolve_regions(regions) keywords.extend(regions_unresolved) # set regions regions_resolved = list(set(regions_resolved)) if regions: if len(regions) > 0: saved_layer.regions.add(*regions_resolved) # set taggit keywords keywords = list(set(keywords)) saved_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): if key == "spatial_representation_type": # value = SpatialRepresentationType.objects.get(identifier=value) pass else: setattr(saved_layer, key, value) saved_layer.save() # Set default permissions on the newly created layer # FIXME: Do this as part of the post_save hook permissions = upload_session.permissions if created and permissions is not None: _log('Setting default permissions for [%s]', name) saved_layer.set_permissions(permissions) if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) upload = Upload.objects.get(import_id=import_session.id) upload.layer = saved_layer upload.complete = True upload.save() if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) signals.upload_complete.send(sender=final_step, layer=saved_layer) return saved_layer
def _enrich_layer_metadata(self, geonode_service, geonode_layer): workspace, layername = geonode_layer.name.split( ":") if ":" in geonode_layer.name else (None, geonode_layer.name) url = urlsplit(self.url) base_url = f'{url.scheme}://{url.netloc}/' response = requests.get(f'{base_url}api/layers/?name={layername}', {}, timeout=10, verify=False) content = response.content status = response.status_code content_type = response.headers['Content-Type'] if status == 200 and 'application/json' == content_type: try: if isinstance(content, bytes): content = content.decode('UTF-8') _json_obj = json.loads(content) if _json_obj['meta']['total_count'] == 1: _layer = _json_obj['objects'][0] if _layer: r_fields = {} # Update plain fields for field in GeoNodeServiceHandler.LAYER_FIELDS: if field in _layer and _layer[field]: r_fields[field] = _layer[field] if r_fields: Layer.objects.filter(id=geonode_layer.id).update( **r_fields) geonode_layer.refresh_from_db() # Update Thumbnail if "thumbnail_url" in _layer and _layer[ "thumbnail_url"]: thumbnail_remote_url = _layer["thumbnail_url"] _url = urlsplit(thumbnail_remote_url) if not _url.scheme: thumbnail_remote_url = f"{geonode_layer.remote_service.service_url}{_url.path}" resp, image = http_client.request( thumbnail_remote_url) if 'ServiceException' in str(image) or \ resp.status_code < 200 or resp.status_code > 299: msg = f'Unable to obtain thumbnail: {image}' logger.debug(msg) # Replace error message with None. image = None if image is not None: thumbnail_name = f'layer-{geonode_layer.uuid}-thumb.png' geonode_layer.save_thumbnail(thumbnail_name, image=image) else: self._create_layer_thumbnail(geonode_layer) else: self._create_layer_thumbnail(geonode_layer) # Add Keywords if "keywords" in _layer and _layer["keywords"]: keywords = _layer["keywords"] if keywords: geonode_layer.keywords.clear() geonode_layer.keywords.add(*keywords) # Add Regions if "regions" in _layer and _layer["regions"]: (regions_resolved, regions_unresolved) = resolve_regions( _layer["regions"]) if regions_resolved: geonode_layer.regions.clear() geonode_layer.regions.add(*regions_resolved) # Remove temporary topic category here as the field converted into m2m field # any idea how to setup? # Add Topic Category # if "category" in _layer and _layer["category"]: # (categories_resolved, categories_unresolved) = resolve_categories(_layer["category"]) # if categories_resolved: # geonode_layer.category.clear() # geonode_layer.category.add(*categories_resolved) # if "category__gn_description" in _layer and _layer["category__gn_description"]: # try: # categories = TopicCategory.objects.filter( # Q(gn_description__iexact=_layer["category__gn_description"])) # if categories: # geonode_layer.category = categories[0] # except Exception: # traceback.print_exc() except Exception: traceback.print_exc() finally: try: geonode_layer.save(notify=True) except Exception as e: logger.error(e)