def csv_step(upload_session, lat_field, lng_field): import_session = upload_session.import_session task = import_session.tasks[0] transform = {'type': 'AttributesToPointGeometryTransform', 'latField': lat_field, 'lngField': lng_field, } task.remove_transforms([transform], by_field='type', save=False) task.add_transforms([transform], save=False) task.save_transforms() try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session)
def srs_step(upload_session, source, target): import_session = upload_session.import_session task = import_session.tasks[0] if source: logger.debug('Setting SRS to %s', source) task.set_srs(source) transform = {'type': 'ReprojectTransform', 'source': source, 'target': target, } task.remove_transforms([transform], by_field='type', save=False) task.add_transforms([transform], save=False) task.save_transforms() try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session)
def time_step_view(request, upload_session): if not upload_session: upload_session = _get_upload_session(request) import_session = upload_session.import_session assert import_session is not None force_ajax = '&force_ajax=true' if request and 'force_ajax' in request.GET and request.GET[ 'force_ajax'] == 'true' else '' if request.method == 'GET': layer = check_import_session_is_valid(request, upload_session, import_session) if layer: (has_time_dim, layer_values) = layer_eligible_for_time_dimension( request, layer, upload_session=upload_session) if has_time_dim and layer_values: upload_session.completed_step = 'check' if not force_ajax: context = { 'time_form': create_time_form(request, upload_session, None), 'layer_name': layer.name, 'layer_values': layer_values, 'layer_attributes': list(layer_values[0].keys()), 'async_upload': is_async_step(upload_session) } return render(request, 'upload/layer_upload_time.html', context=context) else: upload_session.completed_step = 'time' if _ALLOW_TIME_STEP else 'check' return next_step_response(request, upload_session) elif request.method != 'POST': raise Exception() form = create_time_form(request, upload_session, request.POST) if not form.is_valid(): logger.warning('Invalid upload form: %s', form.errors) return error_response(request, errors=["Invalid Submission"]) cleaned = form.cleaned_data start_attribute_and_type = cleaned.get('start_attribute', None) if upload_session.time_transforms: upload_session.import_session.tasks[0].remove_transforms( upload_session.time_transforms, save=True) upload_session.import_session.tasks[0].save_transforms() upload_session.time_transforms = None if upload_session.import_session.tasks[0].transforms: for transform in upload_session.import_session.tasks[0].transforms: if 'type' in transform and \ (str(transform['type']) == 'DateFormatTransform' or str(transform['type']) == 'CreateIndexTransform'): upload_session.import_session.tasks[0].remove_transforms( [transform], save=True) upload_session.import_session.tasks[0].save_transforms() try: upload_session.import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if start_attribute_and_type: def tx(type_name): # return None if type_name is None or type_name == 'Date' \ return None if type_name is None \ else 'DateFormatTransform' end_attribute, end_type = cleaned.get('end_attribute', (None, None)) time_step( upload_session, time_attribute=start_attribute_and_type[0], time_transform_type=tx(start_attribute_and_type[1]), time_format=cleaned.get('attribute_format', None), end_time_attribute=end_attribute, end_time_transform_type=tx(end_type), end_time_format=cleaned.get('end_attribute_format', None), presentation_strategy=cleaned['presentation_strategy'], precision_value=cleaned['precision_value'], precision_step=cleaned['precision_step'], ) upload_session.completed_step = 'check' return next_step_response(request, upload_session)
def final_step(upload_session, user, charset="UTF-8", dataset_id=None): import_session = upload_session.import_session import_id = import_session.id _log(f'Reloading session {import_id} to check validity') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if Upload.objects.filter(import_id=import_id).count(): Upload.objects.filter(import_id=import_id).update(complete=False) upload = Upload.objects.filter(import_id=import_id).get() if upload.state == enumerations.STATE_RUNNING: return upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name if dataset_id: name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name _log(f'Getting from catalog [{name}]') try: # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen gs_catalog.get_layer(name) except Exception: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'): import_session.commit() elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) _log(f'Creating Django record for [{name}]') target = task.target alternate = task.get_target_layer_name() dataset_uuid = None title = upload_session.dataset_title abstract = upload_session.dataset_abstract metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: try: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): dataset_uuid, vals, regions, keywords, custom = parse_metadata( open(xml_file).read()) metadata_uploaded = True except Exception as e: Upload.objects.invalidate_from_session(upload_session) logger.error(e) raise GeoNodeException( _("Exception occurred while parsing the provided Metadata file." ), e) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive logger.error(f'using uploaded sld file from {archive}') zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" else: _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}" logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]") try: shutil.copyfile(sld_file[0], _sld_file) sld_file = _sld_file except (IsADirectoryError, shutil.SameFileError) as e: logger.exception(e) sld_file = sld_file[0] except Exception as e: raise UploadException.from_exc(_('Error uploading Dataset'), e) sld_uploaded = True else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: logger.error('using provided sld file from importer') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}') # Make sure the layer does not exists already if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: saved_dataset = None has_time = has_elevation = False start = end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_elevation = True start = datetime.datetime.strptime( upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start if upload_session.time and upload_session.time_info and upload_session.time_transforms: has_time = True if upload_session.append_to_mosaic_opts: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( name=upload_session.append_to_mosaic_name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( name=upload_session.append_to_mosaic_name) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() if saved_dataset.temporal_extent_start and end: if pytz.utc.localize(saved_dataset.temporal_extent_start, is_dst=False) < end: saved_dataset.temporal_extent_end = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_dataset.temporal_extent_start = end Dataset.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}" ) else: try: with transaction.atomic(): saved_dataset_filter = Dataset.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_dataset = resource_manager.create( dataset_uuid, resource_type=Dataset, defaults=dict( store=target.name, subtype=get_dataset_storetype(target.store_type), alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or _('No abstract provided'), owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=has_elevation, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex)) created = True else: saved_dataset = saved_dataset_filter.get() created = False saved_dataset.set_dirty_state() except Exception as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Dataset'), e) assert saved_dataset if not created: return saved_dataset try: saved_dataset.set_dirty_state() with transaction.atomic(): Upload.objects.update_from_session(upload_session, resource=saved_dataset) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions # Finalize Upload resource_manager.set_permissions(None, instance=saved_dataset, permissions=permissions, created=created) resource_manager.update(None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded) resource_manager.exec('set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir) resource_manager.exec('set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info) resource_manager.set_thumbnail(None, instance=saved_dataset) saved_dataset.set_processing_state(enumerations.STATE_PROCESSED) except Exception as e: saved_dataset.set_processing_state(enumerations.STATE_INVALID) raise GeoNodeException(e) finally: saved_dataset.clear_dirty_state() try: logger.debug( f"... Cleaning up the temporary folders {upload_session.tempdir}") if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) except Exception as e: logger.warning(e) finally: Upload.objects.filter(import_id=import_id).update(complete=True) return saved_dataset
def time_step(upload_session, time_attribute, time_transform_type, presentation_strategy, precision_value, precision_step, end_time_attribute=None, end_time_transform_type=None, end_time_format=None, time_format=None): ''' time_attribute - name of attribute to use as time time_transform_type - name of transform. either DateFormatTransform or IntegerFieldToDateTransform time_format - optional string format end_time_attribute - optional name of attribute to use as end time end_time_transform_type - optional name of transform. either DateFormatTransform or IntegerFieldToDateTransform end_time_format - optional string format presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL precision_value - number precision_step - year, month, day, week, etc. ''' transforms = [] def build_time_transform(att, type, format, end_time_attribute, presentation_strategy): trans = {'type': type, 'field': att} if format: trans['format'] = format if end_time_attribute: trans['enddate'] = end_time_attribute if presentation_strategy: trans['presentation'] = presentation_strategy return trans def build_att_remap_transform(att): # @todo the target is so ugly it should be obvious return { 'type': 'AttributeRemapTransform', 'field': att, 'target': 'org.geotools.data.postgis.PostGISDialect$XDate' } use_big_date = getattr(settings, 'USE_BIG_DATE', False) if time_attribute: if time_transform_type: transforms.append( build_time_transform(time_attribute, time_transform_type, time_format, end_time_attribute, presentation_strategy)) # this must go after the remapping transform to ensure the # type change is applied if use_big_date: transforms.append(build_att_remap_transform(time_attribute)) if end_time_attribute: transforms.append( build_att_remap_transform(end_time_attribute)) transforms.append({ 'type': 'CreateIndexTransform', 'field': time_attribute }) # the time_info will be used in the last step to configure the # layer in geoserver - the dict matches the arguments of the # set_time_info helper function upload_session.time_info = dict(attribute=time_attribute, end_attribute=end_time_attribute, presentation=presentation_strategy, precision_value=precision_value, precision_step=precision_step) if upload_session.time_transforms: upload_session.import_session.tasks[0].remove_transforms( upload_session.time_transforms) if transforms: logger.debug(f'Setting transforms {transforms}') upload_session.import_session.tasks[0].add_transforms(transforms) try: upload_session.time_transforms = transforms upload_session.time = True except gsimporter.BadRequest as br: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring time:'), br) upload_session.import_session.tasks[0].save_transforms() else: upload_session.time = False
def final_step(upload_session, user, charset="UTF-8"): import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = gs_catalog # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name _log('Getting from catalog [%s]', name) publishing = cat.get_layer(name) if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') elif import_session.state == 'READY': import_session.commit() elif import_session.state == 'PENDING': if task.state == 'READY': # if not task.data.format or task.data.format != 'Shapefile': import_session.commit() try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) if not publishing: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) _log('Creating Django record for [%s]', name) target = task.target alternate = task.get_target_layer_name() layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # look for xml and finalize Layer metadata metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access( xml_file, os.R_OK): metadata_uploaded = True layer_uuid, vals, regions, keywords = set_metadata( open(xml_file).read()) # Make sure the layer does not exists already if Layer.objects.filter(uuid=layer_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error( "The UUID identifier from the XML Metadata is already in use in this system." ) raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system." )) # Is it a regular file or an ImageMosaic? # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: saved_layer = None if upload_session.mosaic: import pytz import datetime from geonode.layers.models import TIME_REGEX_FORMAT # llbbox = publishing.resource.latlon_bbox start = None end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_time = True start = datetime.datetime.strptime( upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start else: has_time = False if not upload_session.append_to_mosaic_opts: try: with transaction.atomic(): saved_layer, created = Layer.objects.get_or_create( uuid=layer_uuid, defaults=dict( store=target.name, storeType=target.store_type, alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or '', owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=True, has_time=has_time, has_elevation=False, time_regex=upload_session.mosaic_time_regex)) except IntegrityError as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Layer'), e) assert saved_layer else: # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name) # created = False saved_layer, created = Layer.objects.get_or_create( name=upload_session.append_to_mosaic_name) assert saved_layer try: if saved_layer.temporal_extent_start and end: if pytz.utc.localize(saved_layer.temporal_extent_start, is_dst=False) < end: saved_layer.temporal_extent_end = end Layer.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_layer.temporal_extent_start = end Layer.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}" ) else: _has_time = (True if upload_session.time and upload_session.time_info and upload_session.time_transforms else False) try: with transaction.atomic(): saved_layer, created = Layer.objects.get_or_create( uuid=layer_uuid, defaults=dict(store=target.name, storeType=target.store_type, alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or '', owner=user, has_time=_has_time)) except IntegrityError as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Layer'), e) assert saved_layer # Create a new upload session try: with transaction.atomic(): geonode_upload_session, created = UploadSession.objects.get_or_create( resource=saved_layer, user=user) geonode_upload_session.processed = False geonode_upload_session.save() Upload.objects.update_from_session(upload_session, layer=saved_layer) except IntegrityError as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc(_('Error configuring Layer'), e) # Add them to the upload session (new file fields are created). assigned_name = None def _store_file(saved_layer, geonode_upload_session, base_file, assigned_name, base=False): with open(base_file, 'rb') as f: file_name, type_name = os.path.splitext( os.path.basename(base_file)) geonode_upload_session.layerfile_set.create( name=file_name, base=base, file=File( f, name=f'{assigned_name or saved_layer.name}{type_name}')) # save the system assigned name for the remaining files if not assigned_name: the_file = geonode_upload_session.layerfile_set.all( )[0].file.name assigned_name = os.path.splitext(os.path.basename(the_file))[0] return assigned_name if upload_session.base_file: uploaded_files = upload_session.base_file[0] base_file = uploaded_files.base_file aux_files = uploaded_files.auxillary_files sld_files = uploaded_files.sld_files xml_files = uploaded_files.xml_files assigned_name = _store_file(saved_layer, geonode_upload_session, base_file, assigned_name, base=True) for _f in aux_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) for _f in sld_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) for _f in xml_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) # @todo if layer was not created, need to ensure upload target is # same as existing target # Create the points of contact records for the layer _log(f'Creating points of contact records for {name}') saved_layer.poc = user saved_layer.metadata_author = user saved_layer.metadata_uploaded = metadata_uploaded _log('Creating style for [%s]', name) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive)) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" sld_file = sld_file[0] sld_uploaded = True # geoserver_set_style.apply_async((saved_layer.id, sld_file)) else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False # geoserver_create_style.apply_async((saved_layer.id, name, sld_file, upload_session.tempdir)) if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) # saved keywords and thesaurus for the uploaded layer regions_resolved, regions_unresolved = resolve_regions(regions) if keywords and regions_unresolved: keywords.extend(convert_keyword(regions_unresolved)) saved_layer = utils.KeywordHandler(saved_layer, keywords).set_keywords() regions_resolved = list(set(regions_resolved)) if regions_resolved: if len(regions_resolved) > 0: if not saved_layer.regions: saved_layer.regions = regions_resolved else: saved_layer.regions.clear() saved_layer.regions.add(*regions_resolved) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions geoserver_finalize_upload.apply_async( (import_session.id, saved_layer.id, permissions, created, xml_file, sld_file, sld_uploaded, upload_session.tempdir)) return saved_layer
def time_step(upload_session, time_attribute, time_transform_type, presentation_strategy, precision_value, precision_step, end_time_attribute=None, end_time_transform_type=None, end_time_format=None, time_format=None): ''' time_attribute - name of attribute to use as time time_transform_type - name of transform. either DateFormatTransform or IntegerFieldToDateTransform time_format - optional string format end_time_attribute - optional name of attribute to use as end time end_time_transform_type - optional name of transform. either DateFormatTransform or IntegerFieldToDateTransform end_time_format - optional string format presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL precision_value - number precision_step - year, month, day, week, etc. ''' transforms = [] def build_time_transform(att, type, format, end_time_attribute, presentation_strategy): trans = {'type': type, 'field': att} if format: trans['format'] = format if end_time_attribute: trans['enddate'] = end_time_attribute if presentation_strategy: trans['presentation'] = presentation_strategy return trans def build_att_remap_transform(att): # @todo the target is so ugly it should be obvious return {'type': 'AttributeRemapTransform', 'field': att, 'target': 'org.geotools.data.postgis.PostGISDialect$XDate'} use_big_date = getattr( settings, 'USE_BIG_DATE', False) and not upload_session.geogig if time_attribute: if time_transform_type: transforms.append( build_time_transform( time_attribute, time_transform_type, time_format, end_time_attribute, presentation_strategy ) ) # this must go after the remapping transform to ensure the # type change is applied if use_big_date: transforms.append(build_att_remap_transform(time_attribute)) if end_time_attribute: transforms.append( build_att_remap_transform(end_time_attribute) ) transforms.append({ 'type': 'CreateIndexTransform', 'field': time_attribute }) # the time_info will be used in the last step to configure the # layer in geoserver - the dict matches the arguments of the # set_time_info helper function upload_session.time_info = dict( attribute=time_attribute, end_attribute=end_time_attribute, presentation=presentation_strategy, precision_value=precision_value, precision_step=precision_step ) if upload_session.time_transforms: upload_session.import_session.tasks[0].remove_transforms( upload_session.time_transforms ) if transforms: logger.debug('Setting transforms %s' % transforms) upload_session.import_session.tasks[0].add_transforms(transforms) try: upload_session.time_transforms = transforms upload_session.time = True except BadRequest as br: raise UploadException.from_exc('Error configuring time:', br) upload_session.import_session.tasks[0].save_transforms() else: upload_session.time = False
def final_step(upload_session, user, charset="UTF-8", layer_id=None): import_session = upload_session.import_session import_id = import_session.id _log(f'Reloading session {import_id} to check validity') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) if Upload.objects.filter(import_id=import_id).count(): Upload.objects.filter(import_id=import_id).update(complete=False) upload = Upload.objects.filter(import_id=import_id).get() if upload.state == Upload.STATE_RUNNING: return # WAITING state is set when lat and lng are not selected for a csv upload # The the final_step everything is ok, the state value can return to PENDING # During the final_step the state will change again to complete the operation if upload.state == Upload.STATE_WAITING: upload.set_processing_state(Upload.STATE_PENDING) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py task = import_session.tasks[0] task.set_charset(charset) # @todo see above in save_step, regarding computed unique name name = task.layer.name if layer_id: name = Layer.objects.get(resourcebase_ptr_id=layer_id).name _log(f'Getting from catalog [{name}]') try: # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen gs_catalog.get_layer(name) except Exception: Upload.objects.invalidate_from_session(upload_session) raise LayerNotReady( _(f"Expected to find layer named '{name}' in geoserver")) if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'): import_session.commit() elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR': Upload.objects.invalidate_from_session(upload_session) raise Exception(f'unknown item state: {task.state}') try: import_session = import_session.reload() except gsimporter.api.NotFound as e: Upload.objects.invalidate_from_session(upload_session) raise UploadException.from_exc( _("The GeoServer Import Session is no more available"), e) upload_session.import_session = import_session Upload.objects.update_from_session(upload_session) _log(f'Creating Django record for [{name}]') target = task.target alternate = task.get_target_layer_name() layer_uuid = None title = upload_session.layer_title abstract = upload_session.layer_abstract regions = [] keywords = [] vals = {} custom = {} # look for xml and finalize Layer metadata metadata_uploaded = False xml_file = upload_session.base_file[0].xml_files if xml_file: try: # get model properties from XML # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(xml_file[0], os.path.dirname(archive)) # Assign the absolute path to this file xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}" # Sanity checks if isinstance(xml_file, list): if len(xml_file) > 0: xml_file = xml_file[0] else: xml_file = None elif not isinstance(xml_file, str): xml_file = None if xml_file and os.path.exists(xml_file) and os.access(xml_file, os.R_OK): layer_uuid, vals, regions, keywords, custom = parse_metadata( open(xml_file).read()) metadata_uploaded = True except Exception as e: Upload.objects.invalidate_from_session(upload_session) logger.error(e) raise GeoNodeException( _("Exception occurred while parsing the provided Metadata file."), e) # Make sure the layer does not exists already if layer_uuid and Layer.objects.filter(uuid=layer_uuid).count(): Upload.objects.invalidate_from_session(upload_session) logger.error("The UUID identifier from the XML Metadata is already in use in this system.") raise GeoNodeException( _("The UUID identifier from the XML Metadata is already in use in this system.")) # Is it a regular file or an ImageMosaic? has_time = has_elevation = False start = end = None if upload_session.mosaic_time_regex and upload_session.mosaic_time_value: has_time = True start = datetime.datetime.strptime(upload_session.mosaic_time_value, TIME_REGEX_FORMAT[upload_session.mosaic_time_regex]) start = pytz.utc.localize(start, is_dst=False) end = start if upload_session.time and upload_session.time_info and upload_session.time_transforms: has_time = True saved_layer = None if upload_session.mosaic: if not upload_session.append_to_mosaic_opts: saved_dataset_filter = Layer.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_layer = Layer.objects.create( uuid=layer_uuid or str(uuid.uuid1()), store=target.name, storeType=target.store_type, alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or '', owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=True, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex) created = True else: saved_layer = saved_dataset_filter.get() created = False else: saved_layer, created = Layer.objects.get_or_create( name=upload_session.append_to_mosaic_name) try: if saved_layer.temporal_extent_start and end: if pytz.utc.localize( saved_layer.temporal_extent_start, is_dst=False) < end: saved_layer.temporal_extent_end = end Layer.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_end=end) else: saved_layer.temporal_extent_start = end Layer.objects.filter( name=upload_session.append_to_mosaic_name).update( temporal_extent_start=end) except Exception as e: _log( f"There was an error updating the mosaic temporal extent: {str(e)}") else: saved_dataset_filter = Layer.objects.filter( store=target.name, alternate=alternate, workspace=target.workspace_name, name=task.layer.name) if not saved_dataset_filter.exists(): saved_layer = Layer.objects.create( uuid=layer_uuid or str(uuid.uuid1()), store=target.name, storeType=target.store_type, alternate=alternate, workspace=target.workspace_name, title=title, name=task.layer.name, abstract=abstract or '', owner=user, temporal_extent_start=start, temporal_extent_end=end, is_mosaic=False, has_time=has_time, has_elevation=has_elevation, time_regex=upload_session.mosaic_time_regex) created = True else: saved_layer = saved_dataset_filter.get() created = False assert saved_layer if not created: return saved_layer # Hide the resource until finished saved_layer.set_dirty_state() # Create a new upload session geonode_upload_session, created = UploadSession.objects.get_or_create( resource=saved_layer, user=user ) geonode_upload_session.processed = False geonode_upload_session.save() Upload.objects.update_from_session(upload_session, layer=saved_layer) # Add them to the upload session (new file fields are created). assigned_name = None # Update Layer with information coming from XML File if available saved_layer = _update_layer_with_xml_info(saved_layer, xml_file, regions, keywords, vals) def _store_file(saved_layer, geonode_upload_session, base_file, assigned_name, base=False): if os.path.exists(base_file): with open(base_file, 'rb') as f: file_name, type_name = os.path.splitext(os.path.basename(base_file)) geonode_upload_session.layerfile_set.create( name=file_name, base=base, file=File( f, name=f'{assigned_name or saved_layer.name}{type_name}')) # save the system assigned name for the remaining files if not assigned_name: the_file = geonode_upload_session.layerfile_set.all()[0].file.name assigned_name = os.path.splitext(os.path.basename(the_file))[0] return assigned_name if upload_session.base_file: uploaded_files = upload_session.base_file[0] base_file = uploaded_files.base_file aux_files = uploaded_files.auxillary_files sld_files = uploaded_files.sld_files xml_files = uploaded_files.xml_files assigned_name = _store_file( saved_layer, geonode_upload_session, base_file, assigned_name, base=True) for _f in aux_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) for _f in sld_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) for _f in xml_files: _store_file(saved_layer, geonode_upload_session, _f, assigned_name) # @todo if layer was not created, need to ensure upload target is # same as existing target # Create the points of contact records for the layer _log(f'Creating points of contact records for {name}') saved_layer.poc = user saved_layer.metadata_author = user saved_layer.metadata_uploaded = metadata_uploaded _log('Creating style for [%s]', name) # look for SLD sld_file = upload_session.base_file[0].sld_files sld_uploaded = False if sld_file: # If it's contained within a zip, need to extract it if upload_session.base_file.archive: archive = upload_session.base_file.archive _log(f'using uploaded sld file from {archive}') zf = zipfile.ZipFile(archive, 'r', allowZip64=True) zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir) # Assign the absolute path to this file sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}" else: _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}" _log(f"copying [{sld_file[0]}] to [{_sld_file}]") try: shutil.copyfile(sld_file[0], _sld_file) sld_file = _sld_file except (IsADirectoryError, shutil.SameFileError) as e: logger.exception(e) sld_file = sld_file[0] except Exception as e: raise UploadException.from_exc(_('Error uploading Dataset'), e) sld_uploaded = True else: # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = base_file[0].sld_files[0] sld_uploaded = False _log(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}') if upload_session.time_info: set_time_info(saved_layer, **upload_session.time_info) # Set default permissions on the newly created layer and send notifications permissions = upload_session.permissions geoserver_finalize_upload.apply_async( (import_session.id, saved_layer.id, permissions, created, xml_file, sld_file, sld_uploaded, upload_session.tempdir)) saved_layer = utils.metadata_storers(saved_layer, custom) return saved_layer