def get_or_create_datastore(cat, workspace=None, charset="UTF-8"): """ Get a PostGIS database store or create it in GeoServer if does not exist. """ dsname = ogc_server_settings.datastore_db['NAME'] ds = create_geoserver_db_featurestore(store_name=dsname, workspace=workspace) return ds
def run_import(upload_session, async_upload=_ASYNC_UPLOAD): """Run the import, possibly asynchronously. Returns the target datastore. """ # run_import can raise an exception which callers should handle import_session = upload_session.import_session import_session = gs_uploader.get_session(import_session.id) task = import_session.tasks[0] import_execution_requested = False if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': raise Exception(_('unknown item state: %s' % task.state)) elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore': if task.state == 'READY': import_session.commit(async_upload) import_execution_requested = True if task.state == 'ERROR': progress = task.get_progress() raise Exception(_( 'error during import: %s' % progress.get('message'))) # if a target datastore is configured, ensure the datastore exists # in geoserver and set the uploader target appropriately if ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore': target = create_geoserver_db_featurestore( # store_name=ogc_server_settings.DATASTORE, store_name=ogc_server_settings.datastore_db['NAME'], workspace=settings.DEFAULT_WORKSPACE ) _log( 'setting target datastore %s %s', target.name, target.workspace.name) task.set_target(target.name, target.workspace.name) else: target = task.target if upload_session.update_mode: _log('setting updateMode to %s', upload_session.update_mode) task.set_update_mode(upload_session.update_mode) _log('running import session') # run async if using a database if not import_execution_requested: import_session.commit(async_upload) # @todo check status of import session - it may fail, but due to protocol, # this will not be reported during the commit return target
def run_import(upload_session, async_upload=_ASYNC_UPLOAD): """Run the import, possibly asynchronously. Returns the target datastore. """ # run_import can raise an exception which callers should handle import_session = upload_session.import_session import_session = gs_uploader.get_session(import_session.id) task = import_session.tasks[0] import_execution_requested = False if import_session.state == 'INCOMPLETE': if task.state != 'ERROR': raise Exception(_('unknown item state: %s' % task.state)) elif import_session.state == 'PENDING' and task.target.store_type == 'coverageStore': if task.state == 'READY': import_session.commit(async_upload) import_execution_requested = True if task.state == 'ERROR': progress = task.get_progress() raise Exception(_( 'error during import: %s' % progress.get('message'))) # if a target datastore is configured, ensure the datastore exists # in geoserver and set the uploader target appropriately if ogc_server_settings.datastore_db and task.target.store_type != 'coverageStore': target = create_geoserver_db_featurestore( # store_name=ogc_server_settings.DATASTORE, store_name=ogc_server_settings.datastore_db['NAME'] ) _log( 'setting target datastore %s %s', target.name, target.workspace.name) task.set_target(target.name, target.workspace.name) else: target = task.target if upload_session.update_mode: _log('setting updateMode to %s', upload_session.update_mode) task.set_update_mode(upload_session.update_mode) _log('running import session') # run async if using a database if not import_execution_requested: import_session.commit(async_upload) # @todo check status of import session - it may fail, but due to protocol, # this will not be reported during the commit return target