Exemple #1
0
def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
    import_session = upload_session.import_session
    import_id = import_session.id

    _log(f'Reloading session {import_id} to check validity')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)

    if Upload.objects.filter(import_id=import_id).count():
        Upload.objects.filter(import_id=import_id).update(complete=False)
        upload = Upload.objects.filter(import_id=import_id).get()
        if upload.state == enumerations.STATE_RUNNING:
            return

    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    if dataset_id:
        name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name

    _log(f'Getting from catalog [{name}]')
    try:
        # the importer chooses an available featuretype name late in the game need
        # to verify the resource.name otherwise things will fail.  This happens
        # when the same data is uploaded a second time and the default name is
        # chosen
        gs_catalog.get_layer(name)
    except Exception:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    if import_session.state == 'READY' or (import_session.state == 'PENDING'
                                           and task.state == 'READY'):
        import_session.commit()
    elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR':
        Upload.objects.invalidate_from_session(upload_session)
        raise Exception(f'unknown item state: {task.state}')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    _log(f'Creating Django record for [{name}]')
    target = task.target
    alternate = task.get_target_layer_name()
    dataset_uuid = None
    title = upload_session.dataset_title
    abstract = upload_session.dataset_abstract

    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        try:
            # get model properties from XML
            # If it's contained within a zip, need to extract it
            if upload_session.base_file.archive:
                archive = upload_session.base_file.archive
                zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
                zf.extract(xml_file[0], os.path.dirname(archive))
                # Assign the absolute path to this file
                xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(
                    xml_file, os.R_OK):
                dataset_uuid, vals, regions, keywords, custom = parse_metadata(
                    open(xml_file).read())
                metadata_uploaded = True
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            logger.error(e)
            raise GeoNodeException(
                _("Exception occurred while parsing the provided Metadata file."
                  ), e)

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            logger.error(f'using uploaded sld file from {archive}')
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0],
                       os.path.dirname(archive),
                       path=upload_session.tempdir)
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        else:
            _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}"
            logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]")
            try:
                shutil.copyfile(sld_file[0], _sld_file)
                sld_file = _sld_file
            except (IsADirectoryError, shutil.SameFileError) as e:
                logger.exception(e)
                sld_file = sld_file[0]
            except Exception as e:
                raise UploadException.from_exc(_('Error uploading Dataset'), e)
        sld_uploaded = True
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            logger.error('using provided sld file from importer')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
    logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}')

    # Make sure the layer does not exists already
    if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error(
            "The UUID identifier from the XML Metadata is already in use in this system."
        )
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."
              ))

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    saved_dataset = None
    has_time = has_elevation = False
    start = end = None
    if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
        has_elevation = True
        start = datetime.datetime.strptime(
            upload_session.mosaic_time_value,
            TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
        start = pytz.utc.localize(start, is_dst=False)
        end = start
    if upload_session.time and upload_session.time_info and upload_session.time_transforms:
        has_time = True

    if upload_session.append_to_mosaic_opts:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    name=upload_session.append_to_mosaic_name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        name=upload_session.append_to_mosaic_name)
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
                if saved_dataset.temporal_extent_start and end:
                    if pytz.utc.localize(saved_dataset.temporal_extent_start,
                                         is_dst=False) < end:
                        saved_dataset.temporal_extent_end = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_end=end)
                    else:
                        saved_dataset.temporal_extent_start = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_start=end)
        except Exception as e:
            _log(
                f"There was an error updating the mosaic temporal extent: {str(e)}"
            )
    else:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    store=target.name,
                    alternate=alternate,
                    workspace=target.workspace_name,
                    name=task.layer.name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        dataset_uuid,
                        resource_type=Dataset,
                        defaults=dict(
                            store=target.name,
                            subtype=get_dataset_storetype(target.store_type),
                            alternate=alternate,
                            workspace=target.workspace_name,
                            title=title,
                            name=task.layer.name,
                            abstract=abstract or _('No abstract provided'),
                            owner=user,
                            temporal_extent_start=start,
                            temporal_extent_end=end,
                            is_mosaic=has_elevation,
                            has_time=has_time,
                            has_elevation=has_elevation,
                            time_regex=upload_session.mosaic_time_regex))
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            raise UploadException.from_exc(_('Error configuring Dataset'), e)

    assert saved_dataset

    if not created:
        return saved_dataset

    try:
        saved_dataset.set_dirty_state()
        with transaction.atomic():
            Upload.objects.update_from_session(upload_session,
                                               resource=saved_dataset)

            # Set default permissions on the newly created layer and send notifications
            permissions = upload_session.permissions

            # Finalize Upload
            resource_manager.set_permissions(None,
                                             instance=saved_dataset,
                                             permissions=permissions,
                                             created=created)
            resource_manager.update(None,
                                    instance=saved_dataset,
                                    xml_file=xml_file,
                                    metadata_uploaded=metadata_uploaded)
            resource_manager.exec('set_style',
                                  None,
                                  instance=saved_dataset,
                                  sld_uploaded=sld_uploaded,
                                  sld_file=sld_file,
                                  tempdir=upload_session.tempdir)
            resource_manager.exec('set_time_info',
                                  None,
                                  instance=saved_dataset,
                                  time_info=upload_session.time_info)
            resource_manager.set_thumbnail(None, instance=saved_dataset)
            saved_dataset.set_processing_state(enumerations.STATE_PROCESSED)
    except Exception as e:
        saved_dataset.set_processing_state(enumerations.STATE_INVALID)
        raise GeoNodeException(e)
    finally:
        saved_dataset.clear_dirty_state()

    try:
        logger.debug(
            f"... Cleaning up the temporary folders {upload_session.tempdir}")
        if upload_session.tempdir and os.path.exists(upload_session.tempdir):
            shutil.rmtree(upload_session.tempdir)
    except Exception as e:
        logger.warning(e)
    finally:
        Upload.objects.filter(import_id=import_id).update(complete=True)

    return saved_dataset
Exemple #2
0
 def test_remoteStore_should_return_remote(self):
     el = get_dataset_storetype('remoteStore')
     self.assertEqual('remote', el)
Exemple #3
0
 def test_datastore_should_return_vector(self):
     el = get_dataset_storetype('dataStore')
     self.assertEqual('vector', el)
Exemple #4
0
 def test_coverageStore_should_return_raster(self):
     el = get_dataset_storetype('coverageStore')
     self.assertEqual('raster', el)
Exemple #5
0
 def test_return_element_if_not_exists_in_the_subtypes(self):
     el = get_dataset_storetype('not-existing-type')
     self.assertEqual('not-existing-type', el)