def _fixup_base_file(absolute_base_file, tempdir=None): tempdir_was_created = False if not tempdir or not os.path.exists(tempdir): tempdir = mkdtemp() tempdir_was_created = True try: if not os.path.isfile(absolute_base_file): tmp_files = [f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f))] for f in tmp_files: if zipfile.is_zipfile(os.path.join(tempdir, f)): absolute_base_file = unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) elif zipfile.is_zipfile(absolute_base_file): absolute_base_file = unzip_file(absolute_base_file, '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) if os.path.exists(absolute_base_file): return absolute_base_file else: raise Exception(_(f'File does not exist: {absolute_base_file}')) finally: if tempdir_was_created: # Get rid if temporary files that have been uploaded via Upload form logger.debug(f"... Cleaning up the temporary folders {tempdir}") shutil.rmtree(tempdir, ignore_errors=True)
def _get_layer_values(layer, upload_session, expand=0): layer_values = [] if upload_session: absolute_base_file = upload_session.base_file[0].base_file tempdir = upload_session.tempdir if not os.path.isfile(absolute_base_file): tmp_files = [f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f))] for f in tmp_files: if zipfile.is_zipfile(os.path.join(tempdir, f)): absolute_base_file = unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) elif zipfile.is_zipfile(absolute_base_file): absolute_base_file = unzip_file(upload_session.base_file[0].base_file, '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) inDataSource = ogr.Open(absolute_base_file) lyr = inDataSource.GetLayer(str(layer.name)) limit = 100 for feat in islice(lyr, 0, limit): feat_values = json_loads_byteified(feat.ExportToJson()).get('properties') for k in feat_values.keys(): type_code = feat.GetFieldDefnRef(k).GetType() binding = feat.GetFieldDefnRef(k).GetFieldTypeName(type_code) feat_value = feat_values[k] if str(feat_values[k]) != 'None' else 0 if expand > 0: ff = {'value': feat_value, 'binding': binding} feat_values[k] = ff else: feat_values[k] = feat_value layer_values.append(feat_values) return layer_values
def test_fixup_shp_columnnames(self): project_root = os.path.abspath(os.path.dirname(__file__)) layer_zip = os.path.join(project_root, "data", "ming_female_1.zip") self.failUnless(zipfile.is_zipfile(layer_zip)) layer_shp = unzip_file(layer_zip) expected_fieldnames = [ "ID", "_f", "__1", "__2", "m", "_", "_M2", "_M2_1", "l", "x", "y", "_WU", "_1", ] _, _, fieldnames = fixup_shp_columnnames(layer_shp, "windows-1258") inDriver = ogr.GetDriverByName("ESRI Shapefile") inDataSource = inDriver.Open(layer_shp, 0) inLayer = inDataSource.GetLayer() inLayerDefn = inLayer.GetLayerDefn() self.assertEqual(inLayerDefn.GetFieldCount(), len(expected_fieldnames)) for i, fn in enumerate(expected_fieldnames): self.assertEqual(inLayerDefn.GetFieldDefn(i).GetName(), fn) inDataSource.Destroy() # Cleanup temp dir shp_parent = os.path.dirname(layer_shp) if shp_parent.startswith(tempfile.gettempdir()): shutil.rmtree(shp_parent)
def _get_layer_values(layer, upload_session, expand=0): layer_values = [] if upload_session: absolute_base_file = upload_session.base_file[0].base_file tempdir = upload_session.tempdir if not os.path.isfile(absolute_base_file): tmp_files = [ f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f)) ] for f in tmp_files: if zipfile.is_zipfile(os.path.join(tempdir, f)): absolute_base_file = unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) elif zipfile.is_zipfile(absolute_base_file): absolute_base_file = unzip_file( upload_session.base_file[0].base_file, '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) inDataSource = ogr.Open(absolute_base_file) lyr = inDataSource.GetLayer(str(layer.name)) limit = 100 for feat in islice(lyr, 0, limit): try: feat_values = json_loads_byteified( feat.ExportToJson(), upload_session.charset).get('properties') for k in feat_values.keys(): type_code = feat.GetFieldDefnRef(k).GetType() binding = feat.GetFieldDefnRef(k).GetFieldTypeName( type_code) feat_value = feat_values[k] if str( feat_values[k]) != 'None' else 0 if expand > 0: ff = {'value': feat_value, 'binding': binding} feat_values[k] = ff else: feat_values[k] = feat_value layer_values.append(feat_values) except BaseException as e: logger.exception(e) return layer_values
def _fixup_base_file(absolute_base_file, tempdir=None): if not tempdir: tempdir = tempfile.mkdtemp() if not os.path.isfile(absolute_base_file): tmp_files = [f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f))] for f in tmp_files: if zipfile.is_zipfile(os.path.join(tempdir, f)): absolute_base_file = unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) elif zipfile.is_zipfile(absolute_base_file): absolute_base_file = unzip_file(absolute_base_file, '.shp', tempdir=tempdir) absolute_base_file = os.path.join(tempdir, absolute_base_file) if os.path.exists(absolute_base_file): return absolute_base_file else: raise Exception(_('File does not exist: %s' % absolute_base_file))
def write_files(self): absolute_base_file = None tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) if zipfile.is_zipfile(self.cleaned_data['base_file']): absolute_base_file = unzip_file(self.cleaned_data['base_file'], '.shp', tempdir=tempdir) else: for field in self.spatial_files: f = self.cleaned_data[field] if f is not None: path = os.path.join(tempdir, f.name) with open(path, 'wb') as writable: for c in f.chunks(): writable.write(c) absolute_base_file = os.path.join(tempdir, self.cleaned_data["base_file"].name) return tempdir, absolute_base_file
def write_files(self): absolute_base_file = None tempdir = tempfile.mkdtemp() if zipfile.is_zipfile(self.cleaned_data['base_file']): absolute_base_file = unzip_file(self.cleaned_data['base_file'], '.shp', tempdir=tempdir) else: for field in self.spatial_files: f = self.cleaned_data[field] if f is not None: path = os.path.join(tempdir, f.name) with open(path, 'wb') as writable: for c in f.chunks(): writable.write(c) absolute_base_file = os.path.join(tempdir, self.cleaned_data["base_file"].name) return tempdir, absolute_base_file
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {} # Verify if the filename is in ascii format. try: filename.decode('ascii') except UnicodeEncodeError: msg = "Please use only characters from the english alphabet for the filename. '%s' is not yet supported." \ % os.path.basename(filename).encode('UTF-8') raise GeoNodeException(msg) # Let's unzip the filname in case it is a ZIP file import tempfile import zipfile from geonode.utils import unzip_file if zipfile.is_zipfile(filename): tempdir = tempfile.mkdtemp() filename = unzip_file(filename, '.shp', tempdir=tempdir) if not filename: # We need to iterate files as filename could be the zipfile import ntpath from geonode.upload.utils import _SUPPORTED_EXT file_basename, file_ext = ntpath.splitext(filename) for item in os.listdir(tempdir): item_basename, item_ext = ntpath.splitext(item) if ntpath.basename(item_basename) == ntpath.basename(file_basename) and ( item_ext.lower() in _SUPPORTED_EXT): filename = os.path.join(tempdir, item) break # Make sure the file exists. if not os.path.exists(filename): msg = ('Could not open %s. Make sure you are using a ' 'valid file' % filename) logger.warn(msg) raise GeoNodeException(msg) base_name, extension = os.path.splitext(filename) # Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict( shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.iteritems(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ('Expected helper file %s does not exist; a Shapefile ' 'requires helper files with the following extensions: ' '%s') % (base_name + "." + ext, required_extensions.keys()) raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(glob_name + ".[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) elif extension.lower() in cov_exts: files[extension.lower().replace('.', '')] = filename # Only for GeoServer if check_ogc_backend(geoserver.BACKEND_PACKAGE): matches = glob.glob(glob_name + ".[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files (sld) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(glob_name + ".[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(filename + ".[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) # Only for QGIS Server if check_ogc_backend(qgis_server.BACKEND_PACKAGE): matches = glob.glob(glob_name + ".[qQ][mM][lL]") logger.debug('Checking QML file') logger.debug('Number of matches QML file : %s' % len(matches)) logger.debug('glob name: %s' % glob_name) if len(matches) == 1: files['qml'] = matches[0] elif len(matches) > 1: msg = ('Multiple style files (qml) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) # Provides json files for additional extra data matches = glob.glob(glob_name + ".[jJ][sS][oO][nN]") logger.debug('Checking JSON File') logger.debug( 'Number of matches JSON file : %s' % len(matches)) logger.debug('glob name: %s' % glob) if len(matches) == 1: files['json'] = matches[0] elif len(matches) > 1: msg = ('Multiple json files (json) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files
def upload(incoming, user=None, overwrite=False, name=None, title=None, abstract=None, date=None, license=None, category=None, keywords=None, regions=None, skip=True, ignore_errors=True, verbosity=1, console=None, private=False, metadata_uploaded_preserve=False, charset='UTF-8'): """Upload a directory of spatial data files to GeoNode This function also verifies that each layer is in GeoServer. Supported extensions are: .shp, .tif, .tar, .tar.gz, and .zip (of a shapefile). It catches GeoNodeExceptions and gives a report per file """ if verbosity > 1: print >> console, "Verifying that GeoNode is running ..." if console is None: console = open(os.devnull, 'w') potential_files = [] if os.path.isfile(incoming): ___, short_filename = os.path.split(incoming) basename, extension = os.path.splitext(short_filename) filename = incoming if extension in ['.tif', '.shp', '.tar', '.zip']: potential_files.append((basename, filename)) elif short_filename.endswith('.tar.gz'): potential_files.append((basename, filename)) elif not os.path.isdir(incoming): msg = ('Please pass a filename or a directory name as the "incoming" ' 'parameter, instead of %s: %s' % (incoming, type(incoming))) logger.exception(msg) raise GeoNodeException(msg) else: datadir = incoming for root, dirs, files in os.walk(datadir): for short_filename in files: basename, extension = os.path.splitext(short_filename) filename = os.path.join(root, short_filename) if extension in ['.tif', '.shp', '.tar', '.zip']: potential_files.append((basename, filename)) elif short_filename.endswith('.tar.gz'): potential_files.append((basename, filename)) # After gathering the list of potential files, # let's process them one by one. number = len(potential_files) if verbosity > 1: msg = "Found %d potential layers." % number print >> console, msg if (number > 1) and (name is not None): msg = 'Failed to process. Cannot specify name with multiple imports.' raise Exception(msg) output = [] for i, file_pair in enumerate(potential_files): basename, filename = file_pair existing_layers = Layer.objects.filter(name=basename) if existing_layers.count() > 0: existed = True else: existed = False if existed and skip: save_it = False status = 'skipped' layer = existing_layers[0] if verbosity > 0: msg = ('Stopping process because ' '--overwrite was not set ' 'and a layer with this name already exists.') print >> sys.stderr, msg else: save_it = True if save_it: try: if is_zipfile(filename): filename = unzip_file(filename) if tarfile.is_tarfile(filename): filename = extract_tarfile(filename) layer = file_upload( filename, name=name, title=title, abstract=abstract, date=date, user=user, overwrite=overwrite, license=license, category=category, keywords=keywords, regions=regions, metadata_uploaded_preserve=metadata_uploaded_preserve, charset=charset) if not existed: status = 'created' else: status = 'updated' if private and user: perm_spec = { "users": { "AnonymousUser": [], user.username: [ "change_resourcebase_metadata", "change_layer_data", "change_layer_style", "change_resourcebase", "delete_resourcebase", "change_resourcebase_permissions", "publish_resourcebase"]}, "groups": {}} layer.set_permissions(perm_spec) if getattr(settings, 'SLACK_ENABLED', False): try: from geonode.contrib.slack.utils import build_slack_message_layer, send_slack_messages send_slack_messages( build_slack_message_layer( ("layer_new" if status == "created" else "layer_edit"), layer)) except BaseException: print "Could not send slack message." except Exception as e: if ignore_errors: status = 'failed' exception_type, error, traceback = sys.exc_info() else: if verbosity > 0: msg = ('Stopping process because ' '--ignore-errors was not set ' 'and an error was found.') print >> sys.stderr, msg msg = 'Failed to process %s' % filename raise Exception(msg, e), None, sys.exc_info()[2] msg = "[%s] Layer for '%s' (%d/%d)" % (status, filename, i + 1, number) info = {'file': filename, 'status': status} if status == 'failed': info['traceback'] = traceback info['exception_type'] = exception_type info['error'] = error else: info['name'] = layer.name output.append(info) if verbosity > 0: print >> console, msg return output
def upload(incoming, user=None, overwrite=False, name=None, title=None, abstract=None, date=None, license=None, category=None, keywords=None, regions=None, skip=True, ignore_errors=True, verbosity=1, console=None, private=False, metadata_uploaded_preserve=False, charset='UTF-8'): """Upload a directory of spatial data files to GeoNode This function also verifies that each layer is in GeoServer. Supported extensions are: .shp, .tif, .tar, .tar.gz, and .zip (of a shapefile). It catches GeoNodeExceptions and gives a report per file """ if verbosity > 1: print >> console, "Verifying that GeoNode is running ..." if console is None: console = open(os.devnull, 'w') potential_files = [] if os.path.isfile(incoming): ___, short_filename = os.path.split(incoming) basename, extension = os.path.splitext(short_filename) filename = incoming if extension in ['.tif', '.shp', '.tar', '.zip']: potential_files.append((basename, filename)) elif short_filename.endswith('.tar.gz'): potential_files.append((basename, filename)) elif not os.path.isdir(incoming): msg = ('Please pass a filename or a directory name as the "incoming" ' 'parameter, instead of %s: %s' % (incoming, type(incoming))) logger.exception(msg) raise GeoNodeException(msg) else: datadir = incoming for root, dirs, files in os.walk(datadir): for short_filename in files: basename, extension = os.path.splitext(short_filename) filename = os.path.join(root, short_filename) if extension in ['.tif', '.shp', '.tar', '.zip']: potential_files.append((basename, filename)) elif short_filename.endswith('.tar.gz'): potential_files.append((basename, filename)) # After gathering the list of potential files, # let's process them one by one. number = len(potential_files) if verbosity > 1: msg = "Found %d potential layers." % number print >> console, msg if (number > 1) and (name is not None): msg = 'Failed to process. Cannot specify name with multiple imports.' raise Exception(msg) output = [] for i, file_pair in enumerate(potential_files): basename, filename = file_pair existing_layers = Layer.objects.filter(name=basename) if existing_layers.count() > 0: existed = True else: existed = False if existed and skip: save_it = False status = 'skipped' layer = existing_layers[0] if verbosity > 0: msg = ('Stopping process because ' '--overwrite was not set ' 'and a layer with this name already exists.') print >> sys.stderr, msg else: save_it = True if save_it: try: if is_zipfile(filename): filename = unzip_file(filename) if tarfile.is_tarfile(filename): filename = extract_tarfile(filename) layer = file_upload( filename, name=name, title=title, abstract=abstract, date=date, user=user, overwrite=overwrite, license=license, category=category, keywords=keywords, regions=regions, metadata_uploaded_preserve=metadata_uploaded_preserve, charset=charset) if not existed: status = 'created' else: status = 'updated' if private and user: perm_spec = { "users": { "AnonymousUser": [], user.username: [ "change_resourcebase_metadata", "change_layer_data", "change_layer_style", "change_resourcebase", "delete_resourcebase", "change_resourcebase_permissions", "publish_resourcebase"]}, "groups": {}} layer.set_permissions(perm_spec) if getattr(settings, 'SLACK_ENABLED', False): try: from geonode.contrib.slack.utils import build_slack_message_layer, send_slack_messages send_slack_messages( build_slack_message_layer( ("layer_new" if status == "created" else "layer_edit"), layer)) except BaseException: logger.error("Could not send slack message.") except Exception as e: if ignore_errors: status = 'failed' exception_type, error, traceback = sys.exc_info() else: if verbosity > 0: msg = ('Stopping process because ' '--ignore-errors was not set ' 'and an error was found.') print >> sys.stderr, msg msg = 'Failed to process %s' % filename raise Exception(msg, e), None, sys.exc_info()[2] msg = "[%s] Layer for '%s' (%d/%d)" % (status, filename, i + 1, number) info = {'file': filename, 'status': status} if status == 'failed': info['traceback'] = traceback info['exception_type'] = exception_type info['error'] = error else: info['name'] = layer.name output.append(info) if verbosity > 0: print >> console, msg return output
def save_step_view(req, session): if req.method == 'GET': return render( req, 'upload/layer_upload.html', { 'async_upload': _ASYNC_UPLOAD, 'incomplete': Upload.objects.get_incomplete_uploads(req.user), 'charsets': CHARSETS }) form = LayerUploadForm(req.POST, req.FILES) if form.is_valid(): tempdir = tempfile.mkdtemp(dir=settings.FILE_UPLOAD_TEMP_DIR) relevant_files = _select_relevant_files( form.cleaned_data["valid_extensions"], req.FILES.itervalues()) _write_uploaded_files_to_disk(tempdir, relevant_files) base_file = os.path.join(tempdir, form.cleaned_data["base_file"].name) name, ext = os.path.splitext(os.path.basename(base_file)) logger.debug('Name: {0}, ext: {1}'.format(name, ext)) logger.debug("base_file: {}".format(base_file)) spatial_files = scan_file(base_file, scan_hint=get_scan_hint( form.cleaned_data["valid_extensions"])) logger.debug("spatial_files: {}".format(spatial_files)) import_session = save_step( req.user, name, spatial_files, overwrite=False, mosaic=form.cleaned_data['mosaic'], append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'], append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'], mosaic_time_regex=form.cleaned_data['mosaic_time_regex'], mosaic_time_value=form.cleaned_data['mosaic_time_value'], time_presentation=form.cleaned_data['time_presentation'], time_presentation_res=form.cleaned_data['time_presentation_res'], time_presentation_default_value=form. cleaned_data['time_presentation_default_value'], time_presentation_reference_value=form. cleaned_data['time_presentation_reference_value']) sld = None if spatial_files[0].sld_files: sld = spatial_files[0].sld_files[0] if not os.path.isfile(os.path.join(tempdir, spatial_files[0].base_file)): tmp_files = [ f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f)) ] for f in tmp_files: if zipfile.is_zipfile(os.path.join(tempdir, f)): unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir) _log('provided sld is %s' % sld) # upload_type = get_upload_type(base_file) upload_session = UploaderSession( tempdir=tempdir, base_file=spatial_files, name=name, import_session=import_session, layer_abstract=form.cleaned_data["abstract"], layer_title=form.cleaned_data["layer_title"], permissions=form.cleaned_data["permissions"], import_sld_file=sld, upload_type=spatial_files[0].file_type.code, geogig=form.cleaned_data['geogig'], geogig_store=form.cleaned_data['geogig_store'], time=form.cleaned_data['time'], mosaic=form.cleaned_data['mosaic'], append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'], append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'], mosaic_time_regex=form.cleaned_data['mosaic_time_regex'], mosaic_time_value=form.cleaned_data['mosaic_time_value'], user=req.user) req.session[str(upload_session.import_session.id)] = upload_session _log('saved session : %s', req.session[str(upload_session.import_session.id)]) return next_step_response(req, upload_session, force_ajax=True) else: errors = [] for e in form.errors.values(): errors.extend([escape(v) for v in e]) return error_response(req, errors=errors)
def get_files(filename): """Converts the data to Shapefiles or Geotiffs and returns a dictionary with all the required files """ files = {} # Verify if the filename is in ascii format. try: filename.encode('ascii') except UnicodeEncodeError: msg = f"Please use only characters from the english alphabet for the filename. '{os.path.basename(filename).encode('UTF-8', 'strict')}' is not yet supported." raise GeoNodeException(msg) # Let's unzip the filname in case it is a ZIP file import tempfile from geonode.utils import unzip_file if is_zipfile(filename): tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT) _filename = unzip_file(filename, '.shp', tempdir=tempdir) if not _filename: # We need to iterate files as filename could be the zipfile import ntpath from geonode.upload.utils import _SUPPORTED_EXT file_basename, file_ext = ntpath.splitext(filename) for item in os.listdir(tempdir): item_basename, item_ext = ntpath.splitext(item) if ntpath.basename(item_basename) == ntpath.basename( file_basename) and (item_ext.lower() in _SUPPORTED_EXT): filename = os.path.join(tempdir, item) break else: filename = _filename # Make sure the file exists. if not os.path.exists(filename): msg = f'Could not open {filename}. Make sure you are using a valid file' logger.debug(msg) raise GeoNodeException(msg) base_name, extension = os.path.splitext(filename) # Replace special characters in filenames - []{}() glob_name = re.sub(r'([\[\]\(\)\{\}])', r'[\g<1>]', base_name) if extension.lower() == '.shp': required_extensions = dict(shp='.[sS][hH][pP]', dbf='.[dD][bB][fF]', shx='.[sS][hH][xX]') for ext, pattern in required_extensions.items(): matches = glob.glob(glob_name + pattern) if len(matches) == 0: msg = ( f'Expected helper file {base_name}.{ext} does not exist; a Shapefile ' 'requires helper files with the following extensions: ' f'{list(required_extensions.keys())}') raise GeoNodeException(msg) elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) else: files[ext] = matches[0] matches = glob.glob(f"{glob_name}.[pP][rR][jJ]") if len(matches) == 1: files['prj'] = matches[0] elif len(matches) > 1: msg = ('Multiple helper files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) elif extension.lower() in cov_exts: files[extension.lower().replace('.', '')] = filename # Only for GeoServer if check_ogc_backend(geoserver.BACKEND_PACKAGE): matches = glob.glob(f"{os.path.dirname(glob_name)}.[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] else: matches = glob.glob(f"{glob_name}.[sS][lL][dD]") if len(matches) == 1: files['sld'] = matches[0] elif len(matches) > 1: msg = ( 'Multiple style files (sld) for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) matches = glob.glob(f"{glob_name}.[xX][mM][lL]") # shapefile XML metadata is sometimes named base_name.shp.xml # try looking for filename.xml if base_name.xml does not exist if len(matches) == 0: matches = glob.glob(f"{filename}.[xX][mM][lL]") if len(matches) == 1: files['xml'] = matches[0] elif len(matches) > 1: msg = ('Multiple XML files for %s exist; they need to be ' 'distinct by spelling and not just case.') % filename raise GeoNodeException(msg) return files