Exemple #1
0
def csv_step(upload_session, lat_field, lng_field):
    import_session = upload_session.import_session
    task = import_session.tasks[0]

    transform = {'type': 'AttributesToPointGeometryTransform',
                 'latField': lat_field,
                 'lngField': lng_field,
                 }
    task.remove_transforms([transform], by_field='type', save=False)
    task.add_transforms([transform], save=False)
    task.save_transforms()
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)
Exemple #2
0
def srs_step(upload_session, source, target):
    import_session = upload_session.import_session
    task = import_session.tasks[0]
    if source:
        logger.debug('Setting SRS to %s', source)
        task.set_srs(source)

    transform = {'type': 'ReprojectTransform',
                 'source': source,
                 'target': target,
                 }
    task.remove_transforms([transform], by_field='type', save=False)
    task.add_transforms([transform], save=False)
    task.save_transforms()
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)
Exemple #3
0
def import_imagemosaic_granules(spatial_files, append_to_mosaic_opts,
                                append_to_mosaic_name, mosaic_time_regex,
                                mosaic_time_value, time_presentation,
                                time_presentation_res,
                                time_presentation_default_value,
                                time_presentation_reference_value):

    # The very first step is to rename the granule by adding the selected regex
    #  matching value to the filename.

    f = spatial_files[0].base_file
    dirname = os.path.dirname(f)
    basename = os.path.basename(f)
    head, tail = os.path.splitext(basename)

    if not mosaic_time_regex:
        mosaic_time_regex, mosaic_time_format = _get_time_regex(
            spatial_files, basename)

    # 0. A Time Regex is mandartory to validate the files
    if not mosaic_time_regex:
        raise UploadException(
            _("Could not find any valid Time Regex for the Mosaic files."))

    for spatial_file in spatial_files:
        f = spatial_file.base_file
        basename = os.path.basename(f)
        head, tail = os.path.splitext(basename)
        regexp = re.compile(mosaic_time_regex)
        if regexp.match(head).groups():
            mosaic_time_value = regexp.match(head).groups()[0]
            head = head.replace(
                regexp.match(head).groups()[0], '{mosaic_time_value}')
        if mosaic_time_value:
            dst_file = os.path.join(
                dirname,
                head.replace('{mosaic_time_value}', mosaic_time_value) + tail)
            os.rename(f, dst_file)
            spatial_file.base_file = dst_file

    # We use the GeoServer REST APIs in order to create the ImageMosaic
    #  and later add the granule through the GeoServer Importer.
    head = head.replace('{mosaic_time_value}', '')
    head = re.sub('^[^a-zA-z]*|[^a-zA-Z]*$', '', head)

    # 1. Create a zip file containing the ImageMosaic .properties files
    # 1a. Let's check and prepare the DB based DataStore
    cat = gs_catalog
    workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE)
    db = ogc_server_settings.datastore_db
    db_engine = 'postgis' if \
        'postgis' in db['ENGINE'] else db['ENGINE']

    if not db_engine == 'postgis':
        raise UploadException(_("Unsupported DataBase for Mosaics!"))

    # dsname = ogc_server_settings.DATASTORE
    dsname = db['NAME']

    ds_exists = False
    try:
        ds = get_store(cat, dsname, workspace=workspace)
        ds_exists = (ds is not None)
    except FailedRequestError:
        ds = cat.create_datastore(dsname, workspace=workspace)
        db = ogc_server_settings.datastore_db
        db_engine = 'postgis' if \
            'postgis' in db['ENGINE'] else db['ENGINE']
        ds.connection_parameters.update({
            'validate connections':
            'true',
            'max connections':
            '10',
            'min connections':
            '1',
            'fetch size':
            '1000',
            'host':
            db['HOST'],
            'port':
            db['PORT'] if isinstance(db['PORT'], basestring) else
            str(db['PORT']) or '5432',
            'database':
            db['NAME'],
            'user':
            db['USER'],
            'passwd':
            db['PASSWORD'],
            'dbtype':
            db_engine
        })
        cat.save(ds)
        ds = get_store(cat, dsname, workspace=workspace)
        ds_exists = (ds is not None)

    if not ds_exists:
        raise UploadException(_("Unsupported DataBase for Mosaics!"))

    context = {
        "abs_path_flag":
        "True",
        "time_attr":
        "time",
        "aux_metadata_flag":
        "False",
        "mosaic_time_regex":
        mosaic_time_regex,
        "db_host":
        db['HOST'],
        "db_port":
        db['PORT'],
        "db_name":
        db['NAME'],
        "db_user":
        db['USER'],
        "db_password":
        db['PASSWORD'],
        "db_conn_timeout":
        db['CONN_TOUT'] if 'CONN_TOUT' in db else "10",
        "db_conn_min":
        db['CONN_MIN'] if 'CONN_MIN' in db else "1",
        "db_conn_max":
        db['CONN_MAX'] if 'CONN_MAX' in db else "5",
        "db_conn_validate":
        db['CONN_VALIDATE'] if 'CONN_VALIDATE' in db else "true",
    }

    if mosaic_time_regex:
        indexer_template = """AbsolutePath={abs_path_flag}
TimeAttribute={time_attr}
Schema= the_geom:Polygon,location:String,{time_attr}:java.util.Date
PropertyCollectors=TimestampFileNameExtractorSPI[timeregex]({time_attr})
CheckAuxiliaryMetadata={aux_metadata_flag}
SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi"""

        timeregex_template = """regex=(?<=_)({mosaic_time_regex})"""

        if not os.path.exists(dirname + '/timeregex.properties'):
            with open(dirname + '/timeregex.properties',
                      'w') as timeregex_prop_file:
                timeregex_prop_file.write(timeregex_template.format(**context))
    else:
        indexer_template = """AbsolutePath={abs_path_flag}
Schema= the_geom:Polygon,location:String,{time_attr}
CheckAuxiliaryMetadata={aux_metadata_flag}
SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi"""

    datastore_template = r"""SPI=org.geotools.data.postgis.PostgisNGDataStoreFactory
host={db_host}
port={db_port}
database={db_name}
user={db_user}
passwd={db_password}
Loose\ bbox=true
Estimated\ extends=false
validate\ connections={db_conn_validate}
Connection\ timeout={db_conn_timeout}
min\ connections={db_conn_min}
max\ connections={db_conn_max}"""

    if not os.path.exists(dirname + '/indexer.properties'):
        with open(dirname + '/indexer.properties', 'w') as indexer_prop_file:
            indexer_prop_file.write(indexer_template.format(**context))

    if not os.path.exists(dirname + '/datastore.properties'):
        with open(dirname + '/datastore.properties',
                  'w') as datastore_prop_file:
            datastore_prop_file.write(datastore_template.format(**context))

    files_to_upload = []
    if not append_to_mosaic_opts and spatial_files:
        z = zipfile.ZipFile(dirname + '/' + head + '.zip', "w")
        for spatial_file in spatial_files:
            f = spatial_file.base_file
            dst_basename = os.path.basename(f)
            dst_head, dst_tail = os.path.splitext(dst_basename)
            if not files_to_upload:
                # Let's import only the first granule
                z.write(spatial_file.base_file, arcname=dst_head + dst_tail)
            files_to_upload.append(spatial_file.base_file)
        if os.path.exists(dirname + '/indexer.properties'):
            z.write(dirname + '/indexer.properties',
                    arcname='indexer.properties')
        if os.path.exists(dirname + '/datastore.properties'):
            z.write(dirname + '/datastore.properties',
                    arcname='datastore.properties')
        if mosaic_time_regex:
            z.write(dirname + '/timeregex.properties',
                    arcname='timeregex.properties')
        z.close()

        # 2. Send a "create ImageMosaic" request to GeoServer through gs_config
        cat._cache.clear()
        # - name = name of the ImageMosaic (equal to the base_name)
        # - data = abs path to the zip file
        # - configure = parameter allows for future configuration after harvesting
        name = head
        data = open(dirname + '/' + head + '.zip', 'rb')
        try:
            cat.create_imagemosaic(name, data)
        except ConflictingDataError:
            # Trying to append granules to an existing mosaic
            pass

        # configure time as LIST
        if mosaic_time_regex:
            set_time_dimension(cat, name, workspace, time_presentation,
                               time_presentation_res,
                               time_presentation_default_value,
                               time_presentation_reference_value)

        # - since GeoNode will upload the first granule again through the Importer, we need to /
        #   delete the one created by the gs_config
        # mosaic_delete_first_granule(cat, name)
        if len(spatial_files) > 1:
            spatial_files = spatial_files[0]
        return head, files_to_upload
    else:
        cat._cache.clear()
        cat.reset()
        # cat.reload()
        return append_to_mosaic_name, files_to_upload
Exemple #4
0
def time_step_view(request, upload_session):
    if not upload_session:
        upload_session = _get_upload_session(request)
    import_session = upload_session.import_session
    assert import_session is not None

    force_ajax = '&force_ajax=true' if request and 'force_ajax' in request.GET and request.GET[
        'force_ajax'] == 'true' else ''
    if request.method == 'GET':
        layer = check_import_session_is_valid(request, upload_session,
                                              import_session)
        if layer:
            (has_time_dim, layer_values) = layer_eligible_for_time_dimension(
                request, layer, upload_session=upload_session)
            if has_time_dim and layer_values:
                upload_session.completed_step = 'check'
                if not force_ajax:
                    context = {
                        'time_form':
                        create_time_form(request, upload_session, None),
                        'layer_name':
                        layer.name,
                        'layer_values':
                        layer_values,
                        'layer_attributes':
                        list(layer_values[0].keys()),
                        'async_upload':
                        is_async_step(upload_session)
                    }
                    return render(request,
                                  'upload/layer_upload_time.html',
                                  context=context)
            else:
                upload_session.completed_step = 'time' if _ALLOW_TIME_STEP else 'check'
        return next_step_response(request, upload_session)
    elif request.method != 'POST':
        raise Exception()

    form = create_time_form(request, upload_session, request.POST)
    if not form.is_valid():
        logger.warning('Invalid upload form: %s', form.errors)
        return error_response(request, errors=["Invalid Submission"])

    cleaned = form.cleaned_data
    start_attribute_and_type = cleaned.get('start_attribute', None)
    if upload_session.time_transforms:
        upload_session.import_session.tasks[0].remove_transforms(
            upload_session.time_transforms, save=True)
        upload_session.import_session.tasks[0].save_transforms()
        upload_session.time_transforms = None

    if upload_session.import_session.tasks[0].transforms:
        for transform in upload_session.import_session.tasks[0].transforms:
            if 'type' in transform and \
                    (str(transform['type']) == 'DateFormatTransform' or
                     str(transform['type']) == 'CreateIndexTransform'):
                upload_session.import_session.tasks[0].remove_transforms(
                    [transform], save=True)
                upload_session.import_session.tasks[0].save_transforms()

    try:
        upload_session.import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)

    if start_attribute_and_type:

        def tx(type_name):
            # return None if type_name is None or type_name == 'Date' \
            return None if type_name is None \
                else 'DateFormatTransform'

        end_attribute, end_type = cleaned.get('end_attribute', (None, None))
        time_step(
            upload_session,
            time_attribute=start_attribute_and_type[0],
            time_transform_type=tx(start_attribute_and_type[1]),
            time_format=cleaned.get('attribute_format', None),
            end_time_attribute=end_attribute,
            end_time_transform_type=tx(end_type),
            end_time_format=cleaned.get('end_attribute_format', None),
            presentation_strategy=cleaned['presentation_strategy'],
            precision_value=cleaned['precision_value'],
            precision_step=cleaned['precision_step'],
        )

    upload_session.completed_step = 'check'
    return next_step_response(request, upload_session)
Exemple #5
0
def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
    import_session = upload_session.import_session
    import_id = import_session.id

    _log(f'Reloading session {import_id} to check validity')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)

    if Upload.objects.filter(import_id=import_id).count():
        Upload.objects.filter(import_id=import_id).update(complete=False)
        upload = Upload.objects.filter(import_id=import_id).get()
        if upload.state == enumerations.STATE_RUNNING:
            return

    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    if dataset_id:
        name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name

    _log(f'Getting from catalog [{name}]')
    try:
        # the importer chooses an available featuretype name late in the game need
        # to verify the resource.name otherwise things will fail.  This happens
        # when the same data is uploaded a second time and the default name is
        # chosen
        gs_catalog.get_layer(name)
    except Exception:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    if import_session.state == 'READY' or (import_session.state == 'PENDING'
                                           and task.state == 'READY'):
        import_session.commit()
    elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR':
        Upload.objects.invalidate_from_session(upload_session)
        raise Exception(f'unknown item state: {task.state}')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    _log(f'Creating Django record for [{name}]')
    target = task.target
    alternate = task.get_target_layer_name()
    dataset_uuid = None
    title = upload_session.dataset_title
    abstract = upload_session.dataset_abstract

    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        try:
            # get model properties from XML
            # If it's contained within a zip, need to extract it
            if upload_session.base_file.archive:
                archive = upload_session.base_file.archive
                zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
                zf.extract(xml_file[0], os.path.dirname(archive))
                # Assign the absolute path to this file
                xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(
                    xml_file, os.R_OK):
                dataset_uuid, vals, regions, keywords, custom = parse_metadata(
                    open(xml_file).read())
                metadata_uploaded = True
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            logger.error(e)
            raise GeoNodeException(
                _("Exception occurred while parsing the provided Metadata file."
                  ), e)

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            logger.error(f'using uploaded sld file from {archive}')
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0],
                       os.path.dirname(archive),
                       path=upload_session.tempdir)
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        else:
            _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}"
            logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]")
            try:
                shutil.copyfile(sld_file[0], _sld_file)
                sld_file = _sld_file
            except (IsADirectoryError, shutil.SameFileError) as e:
                logger.exception(e)
                sld_file = sld_file[0]
            except Exception as e:
                raise UploadException.from_exc(_('Error uploading Dataset'), e)
        sld_uploaded = True
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            logger.error('using provided sld file from importer')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
    logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}')

    # Make sure the layer does not exists already
    if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error(
            "The UUID identifier from the XML Metadata is already in use in this system."
        )
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."
              ))

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    saved_dataset = None
    has_time = has_elevation = False
    start = end = None
    if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
        has_elevation = True
        start = datetime.datetime.strptime(
            upload_session.mosaic_time_value,
            TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
        start = pytz.utc.localize(start, is_dst=False)
        end = start
    if upload_session.time and upload_session.time_info and upload_session.time_transforms:
        has_time = True

    if upload_session.append_to_mosaic_opts:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    name=upload_session.append_to_mosaic_name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        name=upload_session.append_to_mosaic_name)
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
                if saved_dataset.temporal_extent_start and end:
                    if pytz.utc.localize(saved_dataset.temporal_extent_start,
                                         is_dst=False) < end:
                        saved_dataset.temporal_extent_end = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_end=end)
                    else:
                        saved_dataset.temporal_extent_start = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_start=end)
        except Exception as e:
            _log(
                f"There was an error updating the mosaic temporal extent: {str(e)}"
            )
    else:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    store=target.name,
                    alternate=alternate,
                    workspace=target.workspace_name,
                    name=task.layer.name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        dataset_uuid,
                        resource_type=Dataset,
                        defaults=dict(
                            store=target.name,
                            subtype=get_dataset_storetype(target.store_type),
                            alternate=alternate,
                            workspace=target.workspace_name,
                            title=title,
                            name=task.layer.name,
                            abstract=abstract or _('No abstract provided'),
                            owner=user,
                            temporal_extent_start=start,
                            temporal_extent_end=end,
                            is_mosaic=has_elevation,
                            has_time=has_time,
                            has_elevation=has_elevation,
                            time_regex=upload_session.mosaic_time_regex))
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            raise UploadException.from_exc(_('Error configuring Dataset'), e)

    assert saved_dataset

    if not created:
        return saved_dataset

    try:
        saved_dataset.set_dirty_state()
        with transaction.atomic():
            Upload.objects.update_from_session(upload_session,
                                               resource=saved_dataset)

            # Set default permissions on the newly created layer and send notifications
            permissions = upload_session.permissions

            # Finalize Upload
            resource_manager.set_permissions(None,
                                             instance=saved_dataset,
                                             permissions=permissions,
                                             created=created)
            resource_manager.update(None,
                                    instance=saved_dataset,
                                    xml_file=xml_file,
                                    metadata_uploaded=metadata_uploaded)
            resource_manager.exec('set_style',
                                  None,
                                  instance=saved_dataset,
                                  sld_uploaded=sld_uploaded,
                                  sld_file=sld_file,
                                  tempdir=upload_session.tempdir)
            resource_manager.exec('set_time_info',
                                  None,
                                  instance=saved_dataset,
                                  time_info=upload_session.time_info)
            resource_manager.set_thumbnail(None, instance=saved_dataset)
            saved_dataset.set_processing_state(enumerations.STATE_PROCESSED)
    except Exception as e:
        saved_dataset.set_processing_state(enumerations.STATE_INVALID)
        raise GeoNodeException(e)
    finally:
        saved_dataset.clear_dirty_state()

    try:
        logger.debug(
            f"... Cleaning up the temporary folders {upload_session.tempdir}")
        if upload_session.tempdir and os.path.exists(upload_session.tempdir):
            shutil.rmtree(upload_session.tempdir)
    except Exception as e:
        logger.warning(e)
    finally:
        Upload.objects.filter(import_id=import_id).update(complete=True)

    return saved_dataset
Exemple #6
0
def time_step(upload_session,
              time_attribute,
              time_transform_type,
              presentation_strategy,
              precision_value,
              precision_step,
              end_time_attribute=None,
              end_time_transform_type=None,
              end_time_format=None,
              time_format=None):
    '''
    time_attribute - name of attribute to use as time

    time_transform_type - name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    time_format - optional string format
    end_time_attribute - optional name of attribute to use as end time

    end_time_transform_type - optional name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    end_time_format - optional string format
    presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL
    precision_value - number
    precision_step - year, month, day, week, etc.
    '''
    transforms = []

    def build_time_transform(att, type, format, end_time_attribute,
                             presentation_strategy):
        trans = {'type': type, 'field': att}
        if format:
            trans['format'] = format
        if end_time_attribute:
            trans['enddate'] = end_time_attribute
        if presentation_strategy:
            trans['presentation'] = presentation_strategy
        return trans

    def build_att_remap_transform(att):
        # @todo the target is so ugly it should be obvious
        return {
            'type': 'AttributeRemapTransform',
            'field': att,
            'target': 'org.geotools.data.postgis.PostGISDialect$XDate'
        }

    use_big_date = getattr(settings, 'USE_BIG_DATE', False)

    if time_attribute:
        if time_transform_type:
            transforms.append(
                build_time_transform(time_attribute, time_transform_type,
                                     time_format, end_time_attribute,
                                     presentation_strategy))

        # this must go after the remapping transform to ensure the
        # type change is applied
        if use_big_date:
            transforms.append(build_att_remap_transform(time_attribute))
            if end_time_attribute:

                transforms.append(
                    build_att_remap_transform(end_time_attribute))

        transforms.append({
            'type': 'CreateIndexTransform',
            'field': time_attribute
        })
        # the time_info will be used in the last step to configure the
        # layer in geoserver - the dict matches the arguments of the
        # set_time_info helper function
        upload_session.time_info = dict(attribute=time_attribute,
                                        end_attribute=end_time_attribute,
                                        presentation=presentation_strategy,
                                        precision_value=precision_value,
                                        precision_step=precision_step)

    if upload_session.time_transforms:
        upload_session.import_session.tasks[0].remove_transforms(
            upload_session.time_transforms)

    if transforms:
        logger.debug(f'Setting transforms {transforms}')
        upload_session.import_session.tasks[0].add_transforms(transforms)
        try:
            upload_session.time_transforms = transforms
            upload_session.time = True
        except gsimporter.BadRequest as br:
            Upload.objects.invalidate_from_session(upload_session)
            raise UploadException.from_exc(_('Error configuring time:'), br)
        upload_session.import_session.tasks[0].save_transforms()
    else:
        upload_session.time = False
Exemple #7
0
def save_step(user,
              layer,
              spatial_files,
              overwrite=True,
              mosaic=False,
              append_to_mosaic_opts=None,
              append_to_mosaic_name=None,
              mosaic_time_regex=None,
              mosaic_time_value=None,
              time_presentation=None,
              time_presentation_res=None,
              time_presentation_default_value=None,
              time_presentation_reference_value=None,
              charset_encoding="UTF-8",
              target_store=None):
    logger.debug(f'Uploading layer: {layer}, files {spatial_files}')
    if len(spatial_files) > 1:
        # we only support more than one file if they're rasters for mosaicing
        if not all(
            [f.file_type.dataset_type == 'coverage' for f in spatial_files]):
            msg = "Please upload only one type of file at a time"
            raise UploadException(msg)
    name = get_valid_dataset_name(layer, overwrite)
    logger.debug(f'Name for layer: {name}')
    if not any(spatial_files.all_files()):
        msg = "Unable to recognize the uploaded file(s)"
        raise UploadException(msg)
    the_dataset_type = get_dataset_type(spatial_files)
    _check_geoserver_store(name, the_dataset_type, overwrite)
    if the_dataset_type not in (FeatureType.resource_type,
                                Coverage.resource_type):
        msg = f"Expected layer type to FeatureType or Coverage, not {the_dataset_type}"
        raise RuntimeError(msg)
    files_to_upload = preprocess_files(spatial_files)
    logger.debug(f"files_to_upload: {files_to_upload}")
    logger.debug(f'Uploading {the_dataset_type}')
    error_msg = None
    try:
        next_id = _get_next_id()
        # Truncate name to maximum length defined by the field.
        max_length = Upload._meta.get_field('name').max_length
        name = name[:max_length]
        # save record of this whether valid or not - will help w/ debugging
        upload, _ = Upload.objects.get_or_create(
            user=user,
            name=name,
            state=enumerations.STATE_READY,
            upload_dir=spatial_files.dirname)

        # @todo settings for use_url or auto detection if geoserver is
        # on same host

        # Is it a regular file or an ImageMosaic?
        # if mosaic_time_regex and mosaic_time_value:
        if mosaic:  # we want to ingest as ImageMosaic
            target_store, files_to_upload = utils.import_imagemosaic_granules(
                spatial_files, append_to_mosaic_opts, append_to_mosaic_name,
                mosaic_time_regex, mosaic_time_value, time_presentation,
                time_presentation_res, time_presentation_default_value,
                time_presentation_reference_value)
            upload.mosaic = mosaic
            upload.append_to_mosaic_opts = append_to_mosaic_opts
            upload.append_to_mosaic_name = append_to_mosaic_name
            upload.mosaic_time_regex = mosaic_time_regex
            upload.mosaic_time_value = mosaic_time_value
            # moving forward with a regular Importer session
            if len(files_to_upload) > 1:
                import_session = gs_uploader.upload_files(
                    files_to_upload[1:],
                    use_url=False,
                    # import_id=next_id,
                    target_store=target_store,
                    charset_encoding=charset_encoding)
            else:
                import_session = gs_uploader.upload_files(
                    files_to_upload,
                    use_url=False,
                    # import_id=next_id,
                    target_store=target_store,
                    charset_encoding=charset_encoding)
            next_id = import_session.id if import_session else None
            if not next_id:
                error_msg = 'No valid Importer Session could be found'
        else:
            # moving forward with a regular Importer session
            import_session = gs_uploader.upload_files(
                files_to_upload,
                use_url=False,
                import_id=next_id,
                mosaic=False,
                target_store=target_store,
                name=name,
                charset_encoding=charset_encoding)
        upload.import_id = import_session.id
        upload.save()

        # any unrecognized tasks/files must be deleted or we can't proceed
        import_session.delete_unrecognized_tasks()

        if not mosaic:
            if not import_session.tasks:
                error_msg = 'No valid upload files could be found'
        if import_session.tasks:
            if import_session.tasks[0].state == 'NO_FORMAT' \
                    or import_session.tasks[0].state == 'BAD_FORMAT':
                error_msg = 'There may be a problem with the data provided - ' \
                            'we could not identify its format'

        if not mosaic and len(import_session.tasks) > 1:
            error_msg = "Only a single upload is supported at the moment"

        if not error_msg and import_session.tasks:
            task = import_session.tasks[0]
            # single file tasks will have just a file entry
            if hasattr(task, 'files'):
                # @todo gsimporter - test this
                if not all(
                    [hasattr(f, 'timestamp') for f in task.source.files]):
                    error_msg = (
                        "Not all timestamps could be recognized."
                        "Please ensure your files contain the correct formats."
                    )

        if error_msg:
            upload.set_processing_state(enumerations.STATE_INVALID)

        # @todo once the random tmp9723481758915 type of name is not
        # around, need to track the name computed above, for now, the
        # target store name can be used
    except Exception as e:
        tb = traceback.format_exc()
        logger.debug(tb)
        logger.exception('Error creating import session')
        raise e

    if error_msg:
        raise UploadException(error_msg)
    else:
        _log("Finished upload of [%s] to GeoServer without errors.", name)

    return import_session, upload
Exemple #8
0
def final_step(upload_session, user, charset="UTF-8"):
    import_session = upload_session.import_session
    _log('Reloading session %s to check validity', import_session.id)
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # the importer chooses an available featuretype name late in the game need
    # to verify the resource.name otherwise things will fail.  This happens
    # when the same data is uploaded a second time and the default name is
    # chosen
    cat = gs_catalog

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    _log('Getting from catalog [%s]', name)
    publishing = cat.get_layer(name)

    if import_session.state == 'INCOMPLETE':
        if task.state != 'ERROR':
            Upload.objects.invalidate_from_session(upload_session)
            raise Exception(f'unknown item state: {task.state}')
    elif import_session.state == 'READY':
        import_session.commit()
    elif import_session.state == 'PENDING':
        if task.state == 'READY':
            # if not task.data.format or task.data.format != 'Shapefile':
            import_session.commit()

    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    if not publishing:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    _log('Creating Django record for [%s]', name)
    target = task.target
    alternate = task.get_target_layer_name()
    layer_uuid = str(uuid.uuid1())
    title = upload_session.layer_title
    abstract = upload_session.layer_abstract

    # look for xml and finalize Layer metadata
    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        # get model properties from XML
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(xml_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

        # Sanity checks
        if isinstance(xml_file, list):
            if len(xml_file) > 0:
                xml_file = xml_file[0]
            else:
                xml_file = None
        elif not isinstance(xml_file, str):
            xml_file = None

        if xml_file and os.path.exists(xml_file) and os.access(
                xml_file, os.R_OK):
            metadata_uploaded = True
            layer_uuid, vals, regions, keywords = set_metadata(
                open(xml_file).read())

    # Make sure the layer does not exists already
    if Layer.objects.filter(uuid=layer_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error(
            "The UUID identifier from the XML Metadata is already in use in this system."
        )
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."
              ))

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    saved_layer = None
    if upload_session.mosaic:
        import pytz
        import datetime
        from geonode.layers.models import TIME_REGEX_FORMAT

        # llbbox = publishing.resource.latlon_bbox
        start = None
        end = None
        if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
            has_time = True
            start = datetime.datetime.strptime(
                upload_session.mosaic_time_value,
                TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
            start = pytz.utc.localize(start, is_dst=False)
            end = start
        else:
            has_time = False

        if not upload_session.append_to_mosaic_opts:
            try:
                with transaction.atomic():
                    saved_layer, created = Layer.objects.get_or_create(
                        uuid=layer_uuid,
                        defaults=dict(
                            store=target.name,
                            storeType=target.store_type,
                            alternate=alternate,
                            workspace=target.workspace_name,
                            title=title,
                            name=task.layer.name,
                            abstract=abstract or '',
                            owner=user,
                            temporal_extent_start=start,
                            temporal_extent_end=end,
                            is_mosaic=True,
                            has_time=has_time,
                            has_elevation=False,
                            time_regex=upload_session.mosaic_time_regex))
            except IntegrityError as e:
                Upload.objects.invalidate_from_session(upload_session)
                raise UploadException.from_exc(_('Error configuring Layer'), e)
            assert saved_layer
        else:
            # saved_layer = Layer.objects.filter(name=upload_session.append_to_mosaic_name)
            # created = False
            saved_layer, created = Layer.objects.get_or_create(
                name=upload_session.append_to_mosaic_name)
            assert saved_layer
            try:
                if saved_layer.temporal_extent_start and end:
                    if pytz.utc.localize(saved_layer.temporal_extent_start,
                                         is_dst=False) < end:
                        saved_layer.temporal_extent_end = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_end=end)
                    else:
                        saved_layer.temporal_extent_start = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_start=end)
            except Exception as e:
                _log(
                    f"There was an error updating the mosaic temporal extent: {str(e)}"
                )
    else:
        _has_time = (True if upload_session.time and upload_session.time_info
                     and upload_session.time_transforms else False)
        try:
            with transaction.atomic():
                saved_layer, created = Layer.objects.get_or_create(
                    uuid=layer_uuid,
                    defaults=dict(store=target.name,
                                  storeType=target.store_type,
                                  alternate=alternate,
                                  workspace=target.workspace_name,
                                  title=title,
                                  name=task.layer.name,
                                  abstract=abstract or '',
                                  owner=user,
                                  has_time=_has_time))
        except IntegrityError as e:
            Upload.objects.invalidate_from_session(upload_session)
            raise UploadException.from_exc(_('Error configuring Layer'), e)
        assert saved_layer

    # Create a new upload session
    try:
        with transaction.atomic():
            geonode_upload_session, created = UploadSession.objects.get_or_create(
                resource=saved_layer, user=user)
            geonode_upload_session.processed = False
            geonode_upload_session.save()
            Upload.objects.update_from_session(upload_session,
                                               layer=saved_layer)
    except IntegrityError as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(_('Error configuring Layer'), e)

    # Add them to the upload session (new file fields are created).
    assigned_name = None

    def _store_file(saved_layer,
                    geonode_upload_session,
                    base_file,
                    assigned_name,
                    base=False):
        with open(base_file, 'rb') as f:
            file_name, type_name = os.path.splitext(
                os.path.basename(base_file))
            geonode_upload_session.layerfile_set.create(
                name=file_name,
                base=base,
                file=File(
                    f, name=f'{assigned_name or saved_layer.name}{type_name}'))
            # save the system assigned name for the remaining files
            if not assigned_name:
                the_file = geonode_upload_session.layerfile_set.all(
                )[0].file.name
                assigned_name = os.path.splitext(os.path.basename(the_file))[0]

            return assigned_name

    if upload_session.base_file:
        uploaded_files = upload_session.base_file[0]
        base_file = uploaded_files.base_file
        aux_files = uploaded_files.auxillary_files
        sld_files = uploaded_files.sld_files
        xml_files = uploaded_files.xml_files

        assigned_name = _store_file(saved_layer,
                                    geonode_upload_session,
                                    base_file,
                                    assigned_name,
                                    base=True)

        for _f in aux_files:
            _store_file(saved_layer, geonode_upload_session, _f, assigned_name)

        for _f in sld_files:
            _store_file(saved_layer, geonode_upload_session, _f, assigned_name)

        for _f in xml_files:
            _store_file(saved_layer, geonode_upload_session, _f, assigned_name)

    # @todo if layer was not created, need to ensure upload target is
    # same as existing target
    # Create the points of contact records for the layer
    _log(f'Creating points of contact records for {name}')
    saved_layer.poc = user
    saved_layer.metadata_author = user
    saved_layer.metadata_uploaded = metadata_uploaded

    _log('Creating style for [%s]', name)
    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0], os.path.dirname(archive))
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        sld_file = sld_file[0]
        sld_uploaded = True
        # geoserver_set_style.apply_async((saved_layer.id, sld_file))
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            _log('using provided sld file')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
        # geoserver_create_style.apply_async((saved_layer.id, name, sld_file, upload_session.tempdir))

    if upload_session.time_info:
        set_time_info(saved_layer, **upload_session.time_info)

    # saved keywords and thesaurus for the uploaded layer
    regions_resolved, regions_unresolved = resolve_regions(regions)
    if keywords and regions_unresolved:
        keywords.extend(convert_keyword(regions_unresolved))

    saved_layer = utils.KeywordHandler(saved_layer, keywords).set_keywords()

    regions_resolved = list(set(regions_resolved))
    if regions_resolved:
        if len(regions_resolved) > 0:
            if not saved_layer.regions:
                saved_layer.regions = regions_resolved
            else:
                saved_layer.regions.clear()
                saved_layer.regions.add(*regions_resolved)

    # Set default permissions on the newly created layer and send notifications
    permissions = upload_session.permissions
    geoserver_finalize_upload.apply_async(
        (import_session.id, saved_layer.id, permissions, created, xml_file,
         sld_file, sld_uploaded, upload_session.tempdir))

    return saved_layer
Exemple #9
0
def time_step(upload_session, time_attribute, time_transform_type,
              presentation_strategy, precision_value, precision_step,
              end_time_attribute=None,
              end_time_transform_type=None,
              end_time_format=None,
              time_format=None):
    '''
    time_attribute - name of attribute to use as time

    time_transform_type - name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    time_format - optional string format
    end_time_attribute - optional name of attribute to use as end time

    end_time_transform_type - optional name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    end_time_format - optional string format
    presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL
    precision_value - number
    precision_step - year, month, day, week, etc.
    '''
    transforms = []

    def build_time_transform(att, type, format, end_time_attribute, presentation_strategy):
        trans = {'type': type, 'field': att}
        if format:
            trans['format'] = format
        if end_time_attribute:
            trans['enddate'] = end_time_attribute
        if presentation_strategy:
            trans['presentation'] = presentation_strategy
        return trans

    def build_att_remap_transform(att):
        # @todo the target is so ugly it should be obvious
        return {'type': 'AttributeRemapTransform',
                'field': att,
                'target': 'org.geotools.data.postgis.PostGISDialect$XDate'}

    use_big_date = getattr(
        settings,
        'USE_BIG_DATE',
        False) and not upload_session.geogig

    if time_attribute:
        if time_transform_type:
            transforms.append(
                build_time_transform(
                    time_attribute,
                    time_transform_type,
                    time_format,
                    end_time_attribute,
                    presentation_strategy
                )
            )

        # this must go after the remapping transform to ensure the
        # type change is applied
        if use_big_date:
            transforms.append(build_att_remap_transform(time_attribute))
            if end_time_attribute:

                transforms.append(
                    build_att_remap_transform(end_time_attribute)
                )

        transforms.append({
            'type': 'CreateIndexTransform',
            'field': time_attribute
        })
        # the time_info will be used in the last step to configure the
        # layer in geoserver - the dict matches the arguments of the
        # set_time_info helper function
        upload_session.time_info = dict(
            attribute=time_attribute,
            end_attribute=end_time_attribute,
            presentation=presentation_strategy,
            precision_value=precision_value,
            precision_step=precision_step
        )

    if upload_session.time_transforms:
        upload_session.import_session.tasks[0].remove_transforms(
            upload_session.time_transforms
        )

    if transforms:
        logger.debug('Setting transforms %s' % transforms)
        upload_session.import_session.tasks[0].add_transforms(transforms)
        try:
            upload_session.time_transforms = transforms
            upload_session.time = True
        except BadRequest as br:
            raise UploadException.from_exc('Error configuring time:', br)
        upload_session.import_session.tasks[0].save_transforms()
    else:
        upload_session.time = False
Exemple #10
0
def final_step(upload_session, user, charset="UTF-8", layer_id=None):
    import_session = upload_session.import_session
    import_id = import_session.id

    _log(f'Reloading session {import_id} to check validity')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)

    if Upload.objects.filter(import_id=import_id).count():
        Upload.objects.filter(import_id=import_id).update(complete=False)
        upload = Upload.objects.filter(import_id=import_id).get()
        if upload.state == Upload.STATE_RUNNING:
            return
        # WAITING state is set when lat and lng are not selected for a csv upload
        # The the final_step everything is ok, the state value can return to PENDING
        # During the final_step the state will change again to complete the operation
        if upload.state == Upload.STATE_WAITING:
            upload.set_processing_state(Upload.STATE_PENDING)

    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    if layer_id:
        name = Layer.objects.get(resourcebase_ptr_id=layer_id).name

    _log(f'Getting from catalog [{name}]')
    try:
        # the importer chooses an available featuretype name late in the game need
        # to verify the resource.name otherwise things will fail.  This happens
        # when the same data is uploaded a second time and the default name is
        # chosen
        gs_catalog.get_layer(name)
    except Exception:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'):
        import_session.commit()
    elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR':
        Upload.objects.invalidate_from_session(upload_session)
        raise Exception(f'unknown item state: {task.state}')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    _log(f'Creating Django record for [{name}]')
    target = task.target
    alternate = task.get_target_layer_name()
    layer_uuid = None
    title = upload_session.layer_title
    abstract = upload_session.layer_abstract
    regions = []
    keywords = []
    vals = {}
    custom = {}
    # look for xml and finalize Layer metadata
    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        try:
            # get model properties from XML
            # If it's contained within a zip, need to extract it
            if upload_session.base_file.archive:
                archive = upload_session.base_file.archive
                zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
                zf.extract(xml_file[0], os.path.dirname(archive))
                # Assign the absolute path to this file
                xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(xml_file, os.R_OK):
                layer_uuid, vals, regions, keywords, custom = parse_metadata(
                    open(xml_file).read())
                metadata_uploaded = True
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            logger.error(e)
            raise GeoNodeException(
                _("Exception occurred while parsing the provided Metadata file."), e)

    # Make sure the layer does not exists already
    if layer_uuid and Layer.objects.filter(uuid=layer_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error("The UUID identifier from the XML Metadata is already in use in this system.")
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."))

    # Is it a regular file or an ImageMosaic?
    has_time = has_elevation = False
    start = end = None
    if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
        has_time = True
        start = datetime.datetime.strptime(upload_session.mosaic_time_value,
                                           TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
        start = pytz.utc.localize(start, is_dst=False)
        end = start
    if upload_session.time and upload_session.time_info and upload_session.time_transforms:
        has_time = True

    saved_layer = None
    if upload_session.mosaic:
        if not upload_session.append_to_mosaic_opts:
            saved_dataset_filter = Layer.objects.filter(
                store=target.name,
                alternate=alternate,
                workspace=target.workspace_name,
                name=task.layer.name)
            if not saved_dataset_filter.exists():
                saved_layer = Layer.objects.create(
                    uuid=layer_uuid or str(uuid.uuid1()),
                    store=target.name,
                    storeType=target.store_type,
                    alternate=alternate,
                    workspace=target.workspace_name,
                    title=title,
                    name=task.layer.name,
                    abstract=abstract or '',
                    owner=user,
                    temporal_extent_start=start,
                    temporal_extent_end=end,
                    is_mosaic=True,
                    has_time=has_time,
                    has_elevation=has_elevation,
                    time_regex=upload_session.mosaic_time_regex)
                created = True
            else:
                saved_layer = saved_dataset_filter.get()
                created = False
        else:
            saved_layer, created = Layer.objects.get_or_create(
                name=upload_session.append_to_mosaic_name)
            try:
                if saved_layer.temporal_extent_start and end:
                    if pytz.utc.localize(
                            saved_layer.temporal_extent_start,
                            is_dst=False) < end:
                        saved_layer.temporal_extent_end = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_end=end)
                    else:
                        saved_layer.temporal_extent_start = end
                        Layer.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                            temporal_extent_start=end)
            except Exception as e:
                _log(
                    f"There was an error updating the mosaic temporal extent: {str(e)}")
    else:
        saved_dataset_filter = Layer.objects.filter(
            store=target.name,
            alternate=alternate,
            workspace=target.workspace_name,
            name=task.layer.name)
        if not saved_dataset_filter.exists():
            saved_layer = Layer.objects.create(
                uuid=layer_uuid or str(uuid.uuid1()),
                store=target.name,
                storeType=target.store_type,
                alternate=alternate,
                workspace=target.workspace_name,
                title=title,
                name=task.layer.name,
                abstract=abstract or '',
                owner=user,
                temporal_extent_start=start,
                temporal_extent_end=end,
                is_mosaic=False,
                has_time=has_time,
                has_elevation=has_elevation,
                time_regex=upload_session.mosaic_time_regex)
            created = True
        else:
            saved_layer = saved_dataset_filter.get()
            created = False

    assert saved_layer

    if not created:
        return saved_layer

    # Hide the resource until finished
    saved_layer.set_dirty_state()

    # Create a new upload session
    geonode_upload_session, created = UploadSession.objects.get_or_create(
        resource=saved_layer, user=user
    )
    geonode_upload_session.processed = False
    geonode_upload_session.save()
    Upload.objects.update_from_session(upload_session, layer=saved_layer)

    # Add them to the upload session (new file fields are created).
    assigned_name = None

    # Update Layer with information coming from XML File if available
    saved_layer = _update_layer_with_xml_info(saved_layer, xml_file, regions, keywords, vals)

    def _store_file(saved_layer,
                    geonode_upload_session,
                    base_file,
                    assigned_name,
                    base=False):
        if os.path.exists(base_file):
            with open(base_file, 'rb') as f:
                file_name, type_name = os.path.splitext(os.path.basename(base_file))
                geonode_upload_session.layerfile_set.create(
                    name=file_name,
                    base=base,
                    file=File(
                        f, name=f'{assigned_name or saved_layer.name}{type_name}'))
                # save the system assigned name for the remaining files
                if not assigned_name:
                    the_file = geonode_upload_session.layerfile_set.all()[0].file.name
                    assigned_name = os.path.splitext(os.path.basename(the_file))[0]

            return assigned_name

    if upload_session.base_file:
        uploaded_files = upload_session.base_file[0]
        base_file = uploaded_files.base_file
        aux_files = uploaded_files.auxillary_files
        sld_files = uploaded_files.sld_files
        xml_files = uploaded_files.xml_files

        assigned_name = _store_file(
            saved_layer,
            geonode_upload_session,
            base_file,
            assigned_name,
            base=True)

        for _f in aux_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

        for _f in sld_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

        for _f in xml_files:
            _store_file(saved_layer,
                        geonode_upload_session,
                        _f,
                        assigned_name)

    # @todo if layer was not created, need to ensure upload target is
    # same as existing target
    # Create the points of contact records for the layer
    _log(f'Creating points of contact records for {name}')
    saved_layer.poc = user
    saved_layer.metadata_author = user
    saved_layer.metadata_uploaded = metadata_uploaded

    _log('Creating style for [%s]', name)
    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            _log(f'using uploaded sld file from {archive}')
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir)
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        else:
            _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}"
            _log(f"copying [{sld_file[0]}] to [{_sld_file}]")
            try:
                shutil.copyfile(sld_file[0], _sld_file)
                sld_file = _sld_file
            except (IsADirectoryError, shutil.SameFileError) as e:
                logger.exception(e)
                sld_file = sld_file[0]
            except Exception as e:
                raise UploadException.from_exc(_('Error uploading Dataset'), e)
        sld_uploaded = True
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            _log('using provided sld file')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
    _log(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}')

    if upload_session.time_info:
        set_time_info(saved_layer, **upload_session.time_info)

    # Set default permissions on the newly created layer and send notifications
    permissions = upload_session.permissions
    geoserver_finalize_upload.apply_async(
        (import_session.id, saved_layer.id, permissions, created,
         xml_file, sld_file, sld_uploaded, upload_session.tempdir))

    saved_layer = utils.metadata_storers(saved_layer, custom)

    return saved_layer