def delete(self, cascade=True): models.Model.delete(self) if cascade: try: session = gs_uploader().get_session(self.import_id) except NotFound: session = None if session: try: session.delete() except: logging.exception('error deleting upload session') if self.upload_dir and path.exists(self.upload_dir): shutil.rmtree(self.upload_dir)
def run_import(upload_session, async): """Run the import, possibly asynchronously. Returns the target datastore. """ import_session = upload_session.import_session import_session = gs_uploader().get_session(import_session.id) if import_session.state == 'INCOMPLETE': item = upload_session.import_session.tasks[0].items[0] if item.state == 'NO_CRS': err = 'No projection found' else: err = item.state or 'Session not ready for import.' if err: raise Exception(err) # if a target datastore is configured, ensure the datastore exists # in geoserver and set the uploader target appropriately if (hasattr(settings, 'GEOGIT_DATASTORE') and settings.GEOGIT_DATASTORE and upload_session.geogit == True and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='geogit') _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) elif (settings.DB_DATASTORE and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='postgis') _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) else: target = import_session.tasks[0].target if upload_session.update_mode: _log('setting updateMode to %s', upload_session.update_mode) import_session.tasks[0].set_update_mode(upload_session.update_mode) _log('running import session') # run async if using a database import_session.commit(async) # @todo check status of import session - it may fail, but due to protocol, # this will not be reported during the commit return target
def run_import(upload_session, async): """Run the import, possibly asynchronously. Returns the target datastore. """ import_session = upload_session.import_session import_session = gs_uploader().get_session(import_session.id) if import_session.state == 'INCOMPLETE': item = upload_session.import_session.tasks[0].items[0] if item.state == 'NO_CRS': err = 'No projection found' else: err = item.state or 'Session not ready for import.' if err: raise Exception(err) # if a target datastore is configured, ensure the datastore exists # in geoserver and set the uploader target appropriately if (hasattr(settings, 'GEOGIT_DATASTORE') and settings.GEOGIT_DATASTORE and upload_session.geogit == True and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='geogit', store_name = upload_session.geogit_store) _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) elif (settings.OGC_SERVER['default']['OPTIONS']['DATASTORE'] != '' and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='postgis', store_name = upload_session.geogit_store) _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) else: target = import_session.tasks[0].target if upload_session.update_mode: _log('setting updateMode to %s', upload_session.update_mode) import_session.tasks[0].set_update_mode(upload_session.update_mode) _log('running import session') # run async if using a database import_session.commit(async) # @todo check status of import session - it may fail, but due to protocol, # this will not be reported during the commit return target
raise GeoNodeException(msg) if the_layer_type not in (FeatureType.resource_type, Coverage.resource_type): raise Exception('Expected the layer type to be a FeatureType or Coverage, not %s' % the_layer_type) _log('Uploading %s', the_layer_type) error_msg = None try: # importer tracks ids by autoincrement but is prone to corruption # which potentially may reset the id - hopefully prevent this... next_id = Upload.objects.all().aggregate(Max('import_id')).values()[0] next_id = next_id + 1 if next_id else 1 # @todo settings for use_url or auto detection if geoserver is # on same host import_session = gs_uploader().upload( base_file, use_url=False, import_id=next_id) # save record of this whether valid or not - will help w/ debugging upload = Upload.objects.create_from_session(user, import_session) if not import_session.tasks: error_msg = 'No upload tasks were created' elif not import_session.tasks[0].items: error_msg = 'No upload items found for task' if error_msg: upload.state = upload.STATE_INVALID upload.save() # @todo once the random tmp9723481758915 type of name is not # around, need to track the name computed above, for now, the