def test_get_or_create_datastore(self): # --- Check that it creates a missing datastore # No importer needed importer = None gph = GeoserverPublishHandler(importer) connection_string = gph.get_default_store() ds_name = connection_string['name'] # FailedRequestError indicates the data store couldn't be found self.assertRaises(FailedRequestError, gs_catalog.get_store, ds_name) layer_config = {'geoserver_store': connection_string} gph.get_or_create_datastore(layer_config) ds2 = gs_catalog.get_store(ds_name) self.assertNotEqual(ds2, None)
def test_get_or_create_datastore(self): # --- Check that it creates a missing datastore # No importer needed importer = None gph = GeoserverPublishHandler(importer) connection_string = gph.get_default_store() ds_name = connection_string['name'] # FailedRequestError indicates the data store couldn't be found logging.disable(logging.ERROR) self.assertRaises(FailedRequestError, gs_catalog.get_store, ds_name) logging.disable(logging.NOTSET) layer_config = {'geoserver_store': connection_string} gph.get_or_create_datastore(layer_config, None) ds2 = gs_catalog.get_store(ds_name) self.assertNotEqual(ds2, None)
def save_step(user, layer, spatial_files, overwrite=True): _log('Uploading layer: [%s], files [%s]', layer, spatial_files) if len(spatial_files) > 1: # we only support more than one file if they're rasters for mosaicing if not all([f.file_type.layer_type == 'coverage' for f in spatial_files]): raise UploadException("Please upload only one type of file at a time") name = get_valid_layer_name(layer, overwrite) _log('Name for layer: [%s]', name) if not spatial_files: raise UploadException("Unable to recognize the uploaded file(s)") the_layer_type = spatial_files[0].file_type.layer_type # Check if the store exists in geoserver try: store = gs_catalog.get_store(name) except geoserver.catalog.FailedRequestError, e: # There is no store, ergo the road is clear pass
def save_step(user, layer, spatial_files, overwrite=True): _log('Uploading layer: [%s], files [%s]', layer, spatial_files) if len(spatial_files) > 1: # we only support more than one file if they're rasters for mosaicing if not all( [f.file_type.layer_type == 'coverage' for f in spatial_files]): raise UploadException( "Please upload only one type of file at a time") name = get_valid_layer_name(layer, overwrite) _log('Name for layer: [%s]', name) if not spatial_files: raise UploadException("Unable to recognize the uploaded file(s)") the_layer_type = spatial_files[0].file_type.layer_type # Check if the store exists in geoserver try: store = gs_catalog.get_store(name) except geoserver.catalog.FailedRequestError as e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # Is it empty? if len(resources) == 0: # What should we do about that empty store? if overwrite: # We can just delete it and recreate it later. store.delete() else: msg = ( 'The layer exists and the overwrite parameter is %s' % overwrite) raise GeoNodeException(msg) else: # If our resource is already configured in the store it # needs to have the right resource type for resource in resources: if resource.name == name: assert overwrite, "Name already in use and overwrite is False" existing_type = resource.resource_type if existing_type != the_layer_type: msg = ( 'Type of uploaded file %s (%s) does not match type ' 'of existing resource type %s' % (name, the_layer_type, existing_type)) _log(msg) raise GeoNodeException(msg) if the_layer_type not in ( FeatureType.resource_type, Coverage.resource_type): raise Exception( 'Expected the layer type to be a FeatureType or Coverage, not %s' % the_layer_type) _log('Uploading %s', the_layer_type) error_msg = None try: # importer tracks ids by autoincrement but is prone to corruption # which potentially may reset the id - hopefully prevent this... next_id = Upload.objects.all().aggregate(Max('import_id')).values()[0] next_id = next_id + 1 if next_id else 1 # save record of this whether valid or not - will help w/ debugging upload = Upload.objects.create( user=user, name=name, state=Upload.STATE_INVALID, upload_dir=spatial_files.dirname) # @todo settings for use_url or auto detection if geoserver is # on same host import_session = gs_uploader.upload_files( spatial_files.all_files(), use_url=False, import_id=next_id, mosaic=len(spatial_files) > 1) upload.import_id = import_session.id upload.save() # any unrecognized tasks/files must be deleted or we can't proceed import_session.delete_unrecognized_tasks() if not import_session.tasks: error_msg = 'No valid upload files could be found' elif import_session.tasks[0].state == 'NO_FORMAT': error_msg = 'There may be a problem with the data provided - ' \ 'we could not identify it' if len(import_session.tasks) > 1: error_msg = "Only a single upload is supported at the moment" if not error_msg and import_session.tasks: task = import_session.tasks[0] # single file tasks will have just a file entry if hasattr(task, 'files'): # @todo gsimporter - test this if not all([hasattr(f, 'timestamp') for f in task.source.files]): error_msg = ( "Not all timestamps could be recognized." "Please ensure your files contain the correct formats.") if error_msg: upload.state = upload.STATE_INVALID upload.save() # @todo once the random tmp9723481758915 type of name is not # around, need to track the name computed above, for now, the # target store name can be used except Exception as e: logger.exception('Error creating import session') raise e if error_msg: raise UploadException(error_msg) else: _log("Finished upload of [%s] to GeoServer without errors.", name) return import_session
def save_step(user, layer, spatial_files, overwrite=True, mosaic=False, append_to_mosaic_opts=None, append_to_mosaic_name=None, mosaic_time_regex=None, mosaic_time_value=None, time_presentation=None, time_presentation_res=None, time_presentation_default_value=None, time_presentation_reference_value=None): _log('Uploading layer: [%s], files [%s]', layer, spatial_files) if len(spatial_files) > 1: # we only support more than one file if they're rasters for mosaicing if not all( [f.file_type.layer_type == 'coverage' for f in spatial_files]): raise UploadException( "Please upload only one type of file at a time") name = get_valid_layer_name(layer, overwrite) _log('Name for layer: [%s]', name) if not spatial_files: raise UploadException("Unable to recognize the uploaded file(s)") the_layer_type = spatial_files[0].file_type.layer_type # Check if the store exists in geoserver try: store = gs_catalog.get_store(name) except geoserver.catalog.FailedRequestError as e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # Is it empty? if len(resources) == 0: # What should we do about that empty store? if overwrite: # We can just delete it and recreate it later. store.delete() else: msg = ( 'The layer exists and the overwrite parameter is %s' % overwrite) raise GeoNodeException(msg) else: # If our resource is already configured in the store it # needs to have the right resource type for resource in resources: if resource.name == name: assert overwrite, "Name already in use and overwrite is False" existing_type = resource.resource_type if existing_type != the_layer_type: msg = ( 'Type of uploaded file %s (%s) does not match type ' 'of existing resource type %s' % (name, the_layer_type, existing_type)) _log(msg) raise GeoNodeException(msg) if the_layer_type not in ( FeatureType.resource_type, Coverage.resource_type): raise Exception( 'Expected the layer type to be a FeatureType or Coverage, not %s' % the_layer_type) _log('Uploading %s', the_layer_type) error_msg = None try: # importer tracks ids by autoincrement but is prone to corruption # which potentially may reset the id - hopefully prevent this... upload_next_id = Upload.objects.all().aggregate(Max('import_id')).values()[0] upload_next_id = upload_next_id if upload_next_id else 0 # next_id = next_id + 1 if next_id else 1 importer_sessions = gs_uploader.get_sessions() last_importer_session = importer_sessions[len(importer_sessions)-1].id if importer_sessions else 0 next_id = max(int(last_importer_session), int(upload_next_id)) + 1 next_id = max(int(last_importer_session), int(upload_next_id)) + 1 # Truncate name to maximum length defined by the field. max_length = Upload._meta.get_field('name').max_length name = name[:max_length] # save record of this whether valid or not - will help w/ debugging upload = Upload.objects.create( user=user, name=name, state=Upload.STATE_INVALID, upload_dir=spatial_files.dirname) # @todo settings for use_url or auto detection if geoserver is # on same host # Is it a regular file or an ImageMosaic? # if mosaic_time_regex and mosaic_time_value: if mosaic: # we want to ingest as ImageMosaic target_store = import_imagemosaic_granules(spatial_files, append_to_mosaic_opts, append_to_mosaic_name, mosaic_time_regex, mosaic_time_value, time_presentation, time_presentation_res, time_presentation_default_value, time_presentation_reference_value) # moving forward with a regular Importer session import_session = gs_uploader.upload_files( spatial_files.all_files(), use_url=False, import_id=next_id, mosaic=len(spatial_files) > 1, target_store=target_store) upload.moasic = mosaic upload.append_to_mosaic_opts = append_to_mosaic_opts upload.append_to_mosaic_name = append_to_mosaic_name upload.mosaic_time_regex = mosaic_time_regex upload.mosaic_time_value = mosaic_time_value else: # moving forward with a regular Importer session import_session = gs_uploader.upload_files( spatial_files.all_files(), use_url=False, import_id=next_id, mosaic=len(spatial_files) > 1) upload.import_id = import_session.id upload.save() # any unrecognized tasks/files must be deleted or we can't proceed import_session.delete_unrecognized_tasks() if not import_session.tasks: error_msg = 'No valid upload files could be found' elif import_session.tasks[0].state == 'NO_FORMAT': error_msg = 'There may be a problem with the data provided - ' \ 'we could not identify it' if len(import_session.tasks) > 1: error_msg = "Only a single upload is supported at the moment" if not error_msg and import_session.tasks: task = import_session.tasks[0] # single file tasks will have just a file entry if hasattr(task, 'files'): # @todo gsimporter - test this if not all([hasattr(f, 'timestamp') for f in task.source.files]): error_msg = ( "Not all timestamps could be recognized." "Please ensure your files contain the correct formats.") if error_msg: upload.state = upload.STATE_INVALID upload.save() # @todo once the random tmp9723481758915 type of name is not # around, need to track the name computed above, for now, the # target store name can be used except Exception as e: logger.exception('Error creating import session') raise e if error_msg: raise UploadException(error_msg) else: _log("Finished upload of [%s] to GeoServer without errors.", name) return import_session