예제 #1
0
def csv_step(upload_session, lat_field, lng_field):
    import_session = upload_session.import_session
    task = import_session.tasks[0]

    transform = {'type': 'AttributesToPointGeometryTransform',
                 'latField': lat_field,
                 'lngField': lng_field,
                 }
    task.remove_transforms([transform], by_field='type', save=False)
    task.add_transforms([transform], save=False)
    task.save_transforms()
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        logger.exception(e)
        Upload.objects.invalidate_from_session(upload_session)
        raise GeneralUploadException(detail=_("The GeoServer Import Session is no more available ") + e.args[0])
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)
예제 #2
0
def srs_step(upload_session, source, target):
    import_session = upload_session.import_session
    task = import_session.tasks[0]
    if source:
        logger.debug('Setting SRS to %s', source)
        task.set_srs(source)

    transform = {'type': 'ReprojectTransform',
                 'source': source,
                 'target': target,
                 }
    task.remove_transforms([transform], by_field='type', save=False)
    task.add_transforms([transform], save=False)
    task.save_transforms()
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        logger.exception(e)
        Upload.objects.invalidate_from_session(upload_session)
        raise GeneralUploadException(detail=_("The GeoServer Import Session is no more available ") + e.args[0])
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)
예제 #3
0
파일: utils.py 프로젝트: pasing/geonode
def import_imagemosaic_granules(spatial_files, append_to_mosaic_opts,
                                append_to_mosaic_name, mosaic_time_regex,
                                mosaic_time_value, time_presentation,
                                time_presentation_res,
                                time_presentation_default_value,
                                time_presentation_reference_value):

    # The very first step is to rename the granule by adding the selected regex
    #  matching value to the filename.

    f = spatial_files[0].base_file
    dirname = os.path.dirname(f)
    basename = os.path.basename(f)
    head, tail = os.path.splitext(basename)

    if not mosaic_time_regex:
        mosaic_time_regex, mosaic_time_format = _get_time_regex(
            spatial_files, basename)

    # 0. A Time Regex is mandartory to validate the files
    if not mosaic_time_regex:
        raise GeneralUploadException(detail=_(
            "Could not find any valid Time Regex for the Mosaic files."))

    for spatial_file in spatial_files:
        f = spatial_file.base_file
        basename = os.path.basename(f)
        head, tail = os.path.splitext(basename)
        regexp = re.compile(mosaic_time_regex)
        if regexp.match(head).groups():
            mosaic_time_value = regexp.match(head).groups()[0]
            head = head.replace(
                regexp.match(head).groups()[0], '{mosaic_time_value}')
        if mosaic_time_value:
            dst_file = os.path.join(
                dirname,
                head.replace('{mosaic_time_value}', mosaic_time_value) + tail)
            os.rename(f, dst_file)
            spatial_file.base_file = dst_file

    # We use the GeoServer REST APIs in order to create the ImageMosaic
    #  and later add the granule through the GeoServer Importer.
    head = head.replace('{mosaic_time_value}', '')
    head = re.sub('^[^a-zA-Z]*|[^a-zA-Z]*$', '', head)

    # 1. Create a zip file containing the ImageMosaic .properties files
    # 1a. Let's check and prepare the DB based DataStore
    cat = gs_catalog
    workspace = cat.get_workspace(settings.DEFAULT_WORKSPACE)
    db = ogc_server_settings.datastore_db
    db_engine = 'postgis' if \
        'postgis' in db['ENGINE'] else db['ENGINE']

    if not db_engine == 'postgis':
        raise GeneralUploadException(
            detail=_("Unsupported DataBase for Mosaics!"))

    # dsname = ogc_server_settings.DATASTORE
    dsname = db['NAME']

    ds_exists = False
    try:
        ds = get_store(cat, dsname, workspace=workspace)
        ds_exists = (ds is not None)
    except FailedRequestError:
        ds = cat.create_datastore(dsname, workspace=workspace)
        db = ogc_server_settings.datastore_db
        db_engine = 'postgis' if \
            'postgis' in db['ENGINE'] else db['ENGINE']
        ds.connection_parameters.update({
            'validate connections':
            'true',
            'max connections':
            '10',
            'min connections':
            '1',
            'fetch size':
            '1000',
            'host':
            db['HOST'],
            'port':
            db['PORT']
            if isinstance(db['PORT'], str) else str(db['PORT']) or '5432',
            'database':
            db['NAME'],
            'user':
            db['USER'],
            'passwd':
            db['PASSWORD'],
            'dbtype':
            db_engine
        })
        cat.save(ds)
        ds = get_store(cat, dsname, workspace=workspace)
        ds_exists = (ds is not None)

    if not ds_exists:
        raise GeneralUploadException(
            detail=_("Unsupported DataBase for Mosaics!"))

    context = {
        "abs_path_flag":
        "True",
        "time_attr":
        "time",
        "aux_metadata_flag":
        "False",
        "mosaic_time_regex":
        mosaic_time_regex,
        "db_host":
        db['HOST'],
        "db_port":
        db['PORT'],
        "db_name":
        db['NAME'],
        "db_user":
        db['USER'],
        "db_password":
        db['PASSWORD'],
        "db_conn_timeout":
        db['CONN_TOUT'] if 'CONN_TOUT' in db else "10",
        "db_conn_min":
        db['CONN_MIN'] if 'CONN_MIN' in db else "1",
        "db_conn_max":
        db['CONN_MAX'] if 'CONN_MAX' in db else "5",
        "db_conn_validate":
        db['CONN_VALIDATE'] if 'CONN_VALIDATE' in db else "true",
    }

    indexer_template = """AbsolutePath={abs_path_flag}
Schema= the_geom:Polygon,location:String,{time_attr}
CheckAuxiliaryMetadata={aux_metadata_flag}
SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi"""
    if mosaic_time_regex:
        indexer_template = """AbsolutePath={abs_path_flag}
TimeAttribute={time_attr}
Schema= the_geom:Polygon,location:String,{time_attr}:java.util.Date
PropertyCollectors=TimestampFileNameExtractorSPI[timeregex]({time_attr})
CheckAuxiliaryMetadata={aux_metadata_flag}
SuggestedSPI=it.geosolutions.imageioimpl.plugins.tiff.TIFFImageReaderSpi"""

        timeregex_template = """regex=(?<=_)({mosaic_time_regex})"""

        if not os.path.exists(f"{dirname}/timeregex.properties"):
            with open(f"{dirname}/timeregex.properties",
                      'w') as timeregex_prop_file:
                timeregex_prop_file.write(timeregex_template.format(**context))

    datastore_template = r"""SPI=org.geotools.data.postgis.PostgisNGDataStoreFactory
host={db_host}
port={db_port}
database={db_name}
user={db_user}
passwd={db_password}
Loose\ bbox=true
Estimated\ extends=false
validate\ connections={db_conn_validate}
Connection\ timeout={db_conn_timeout}
min\ connections={db_conn_min}
max\ connections={db_conn_max}"""

    if not os.path.exists(f"{dirname}/indexer.properties"):
        with open(f"{dirname}/indexer.properties", 'w') as indexer_prop_file:
            indexer_prop_file.write(indexer_template.format(**context))

    if not os.path.exists(f"{dirname}/datastore.properties"):
        with open(f"{dirname}/datastore.properties",
                  'w') as datastore_prop_file:
            datastore_prop_file.write(datastore_template.format(**context))

    files_to_upload = []
    if not append_to_mosaic_opts and spatial_files:
        z = zipfile.ZipFile(f"{dirname}/{head}.zip", "w", allowZip64=True)
        for spatial_file in spatial_files:
            f = spatial_file.base_file
            dst_basename = os.path.basename(f)
            dst_head, dst_tail = os.path.splitext(dst_basename)
            if not files_to_upload:
                # Let's import only the first granule
                z.write(spatial_file.base_file, arcname=dst_head + dst_tail)
            files_to_upload.append(spatial_file.base_file)
        if os.path.exists(f"{dirname}/indexer.properties"):
            z.write(f"{dirname}/indexer.properties",
                    arcname='indexer.properties')
        if os.path.exists(f"{dirname}/datastore.properties"):
            z.write(f"{dirname}/datastore.properties",
                    arcname='datastore.properties')
        if mosaic_time_regex:
            z.write(f"{dirname}/timeregex.properties",
                    arcname='timeregex.properties')
        z.close()

        # 2. Send a "create ImageMosaic" request to GeoServer through gs_config
        # - name = name of the ImageMosaic (equal to the base_name)
        # - data = abs path to the zip file
        # - configure = parameter allows for future configuration after harvesting
        name = head

        with open(f"{dirname}/{head}.zip", 'rb') as data:
            try:
                cat.create_imagemosaic(name, data)
            except ConflictingDataError:
                # Trying to append granules to an existing mosaic
                pass

        # configure time as LIST
        if mosaic_time_regex:
            set_time_dimension(cat, name, workspace, time_presentation,
                               time_presentation_res,
                               time_presentation_default_value,
                               time_presentation_reference_value)

        # - since GeoNode will upload the first granule again through the Importer, we need to /
        #   delete the one created by the gs_config
        # mosaic_delete_first_granule(cat, name)
        if len(spatial_files) > 1:
            spatial_files = spatial_files[0]
        return head, files_to_upload
    else:
        cat._cache.clear()
        cat.reset()
        # cat.reload()
        return append_to_mosaic_name, files_to_upload
예제 #4
0
def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
    import_session = upload_session.import_session
    import_id = import_session.id

    _log(f'Reloading session {import_id} to check validity')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        logger.exception(e)
        Upload.objects.invalidate_from_session(upload_session)
        raise GeneralUploadException(detail=_("The GeoServer Import Session is no more available ") + e.args[0])

    if Upload.objects.filter(import_id=import_id).count():
        Upload.objects.filter(import_id=import_id).update(complete=False)
        upload = Upload.objects.filter(import_id=import_id).get()
        if upload.state == enumerations.STATE_RUNNING:
            return

    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    if dataset_id:
        name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name

    _log(f'Getting from catalog [{name}]')
    try:
        # the importer chooses an available featuretype name late in the game need
        # to verify the resource.name otherwise things will fail.  This happens
        # when the same data is uploaded a second time and the default name is
        # chosen
        gs_catalog.get_layer(name)
    except Exception:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    if import_session.state == 'READY' or (import_session.state == 'PENDING' and task.state == 'READY'):
        import_session.commit()
    elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR':
        Upload.objects.invalidate_from_session(upload_session)
        raise Exception(f'unknown item state: {task.state}')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        logger.exception(e)
        Upload.objects.invalidate_from_session(upload_session)
        raise GeneralUploadException(detail=_("The GeoServer Import Session is no more available ") + e.args[0])
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    _log(f'Creating Django record for [{name}]')
    target = task.target
    alternate = task.get_target_layer_name()
    dataset_uuid = None
    title = upload_session.dataset_title
    abstract = upload_session.dataset_abstract

    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        try:
            # get model properties from XML
            # If it's contained within a zip, need to extract it
            if upload_session.base_file.archive:
                archive = upload_session.base_file.archive
                zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
                zf.extract(xml_file[0], os.path.dirname(archive))
                # Assign the absolute path to this file
                xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(xml_file, os.R_OK):
                dataset_uuid, vals, regions, keywords, custom = parse_metadata(
                    open(xml_file).read())
                metadata_uploaded = True
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            logger.error(e)
            raise GeoNodeException(
                _("Exception occurred while parsing the provided Metadata file."), e)

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            logger.debug(f'using uploaded sld file from {archive}')
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0], os.path.dirname(archive), path=upload_session.tempdir)
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        else:
            _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}"
            logger.debug(f"copying [{sld_file[0]}] to [{_sld_file}]")
            try:
                shutil.copyfile(sld_file[0], _sld_file)
                sld_file = _sld_file
            except (IsADirectoryError, shutil.SameFileError) as e:
                logger.exception(e)
                sld_file = sld_file[0]
            except Exception as e:
                logger.exception(e)
                raise GeneralUploadException(detail=_('Error uploading Dataset') + e.args[0])
        sld_uploaded = True
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            logger.debug('using provided sld file from importer')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
    logger.debug(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}')

    # Make sure the layer does not exists already
    if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error("The UUID identifier from the XML Metadata is already in use in this system.")
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."))

    # Is it a regular file or an ImageMosaic?
    saved_dataset = None
    is_mosaic = False
    has_time = has_elevation = False
    start = end = None
    if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
        has_time = True
        is_mosaic = True
        start = datetime.datetime.strptime(upload_session.mosaic_time_value,
                                           TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
        start = pytz.utc.localize(start, is_dst=False)
        end = start
    if upload_session.time and upload_session.time_info and upload_session.time_transforms:
        has_time = True

    if upload_session.append_to_mosaic_opts:
        # Is it a mosaic or a granule that must be added to an Image Mosaic?
        saved_dataset_filter = Dataset.objects.filter(
            name=upload_session.append_to_mosaic_name)
        if not saved_dataset_filter.exists():
            saved_dataset = resource_manager.create(
                name=upload_session.append_to_mosaic_name,
                defaults=dict(
                    dirty_state=True,
                    state=enumerations.STATE_READY)
            )
            created = True
        else:
            saved_dataset = saved_dataset_filter.get()
            created = False
        saved_dataset.set_dirty_state()
        if saved_dataset.temporal_extent_start and end:
            if pytz.utc.localize(
                    saved_dataset.temporal_extent_start,
                    is_dst=False) < end:
                saved_dataset.temporal_extent_end = end
                Dataset.objects.filter(
                    name=upload_session.append_to_mosaic_name).update(
                    temporal_extent_end=end)
            else:
                saved_dataset.temporal_extent_start = end
                Dataset.objects.filter(
                    name=upload_session.append_to_mosaic_name).update(
                    temporal_extent_start=end)
    else:
        # The dataset is a standard one, no mosaic options enabled...
        saved_dataset_filter = Dataset.objects.filter(
            store=target.name,
            alternate=alternate,
            workspace=target.workspace_name,
            name=task.layer.name)
        if not saved_dataset_filter.exists():
            saved_dataset = resource_manager.create(
                dataset_uuid,
                resource_type=Dataset,
                defaults=dict(
                    store=target.name,
                    subtype=get_dataset_storetype(target.store_type),
                    alternate=alternate,
                    workspace=target.workspace_name,
                    title=title,
                    name=task.layer.name,
                    abstract=abstract or _('No abstract provided'),
                    owner=user,
                    dirty_state=True,
                    state=enumerations.STATE_READY,
                    temporal_extent_start=start,
                    temporal_extent_end=end,
                    is_mosaic=is_mosaic,
                    has_time=has_time,
                    has_elevation=has_elevation,
                    time_regex=upload_session.mosaic_time_regex))
            created = True
        else:
            saved_dataset = saved_dataset_filter.get()
            created = False

    assert saved_dataset

    if not created:
        return saved_dataset

    try:
        # Update the state from session...
        Upload.objects.update_from_session(upload_session, resource=saved_dataset)

        # Hide the dataset until the upload process finishes...
        saved_dataset.set_dirty_state()

        # Finalize the upload...
        with transaction.atomic():
            # Set default permissions on the newly created layer and send notifications
            permissions = upload_session.permissions

            # Finalize Upload
            resource_manager.set_permissions(
                None, instance=saved_dataset, permissions=permissions, created=created)
            resource_manager.update(
                None, instance=saved_dataset, xml_file=xml_file, metadata_uploaded=metadata_uploaded)
            resource_manager.exec(
                'set_style', None, instance=saved_dataset, sld_uploaded=sld_uploaded, sld_file=sld_file, tempdir=upload_session.tempdir)
            resource_manager.exec(
                'set_time_info', None, instance=saved_dataset, time_info=upload_session.time_info)
            resource_manager.set_thumbnail(
                None, instance=saved_dataset)

            Upload.objects.filter(resource=saved_dataset.get_self_resource()).update(complete=True)
            Upload.objects.get(resource=saved_dataset.get_self_resource()).set_processing_state(enumerations.STATE_PROCESSED)
    except Exception as e:
        raise GeoNodeException(e)
    finally:
        if upload.state in (enumerations.STATE_PROCESSED, enumerations.STATE_INVALID):
            # Get rid if temporary files that have been uploaded via Upload form
            logger.debug(f"... Cleaning up the temporary folders {upload_session.tempdir}")
            shutil.rmtree(upload_session.tempdir, ignore_errors=True)

    return saved_dataset
예제 #5
0
def time_step(upload_session, time_attribute, time_transform_type,
              presentation_strategy, precision_value, precision_step,
              end_time_attribute=None,
              end_time_transform_type=None,
              end_time_format=None,
              time_format=None):
    '''
    time_attribute - name of attribute to use as time

    time_transform_type - name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    time_format - optional string format
    end_time_attribute - optional name of attribute to use as end time

    end_time_transform_type - optional name of transform. either
    DateFormatTransform or IntegerFieldToDateTransform

    end_time_format - optional string format
    presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL
    precision_value - number
    precision_step - year, month, day, week, etc.
    '''
    transforms = []

    def build_time_transform(att, type, format, end_time_attribute, presentation_strategy):
        trans = {'type': type, 'field': att}
        if format:
            trans['format'] = format
        if end_time_attribute:
            trans['enddate'] = end_time_attribute
        if presentation_strategy:
            trans['presentation'] = presentation_strategy
        return trans

    def build_att_remap_transform(att):
        # @todo the target is so ugly it should be obvious
        return {'type': 'AttributeRemapTransform',
                'field': att,
                'target': 'org.geotools.data.postgis.PostGISDialect$XDate'}

    use_big_date = getattr(
        settings,
        'USE_BIG_DATE',
        False)

    if time_attribute:
        if time_transform_type:
            transforms.append(
                build_time_transform(
                    time_attribute,
                    time_transform_type,
                    time_format,
                    end_time_attribute,
                    presentation_strategy
                )
            )

        # this must go after the remapping transform to ensure the
        # type change is applied
        if use_big_date:
            transforms.append(build_att_remap_transform(time_attribute))
            if end_time_attribute:

                transforms.append(
                    build_att_remap_transform(end_time_attribute)
                )

        transforms.append({
            'type': 'CreateIndexTransform',
            'field': time_attribute
        })
        # the time_info will be used in the last step to configure the
        # layer in geoserver - the dict matches the arguments of the
        # set_time_info helper function
        upload_session.time_info = dict(
            attribute=time_attribute,
            end_attribute=end_time_attribute,
            presentation=presentation_strategy,
            precision_value=precision_value,
            precision_step=precision_step
        )

    if upload_session.time_transforms:
        upload_session.import_session.tasks[0].remove_transforms(
            upload_session.time_transforms
        )

    if transforms:
        logger.debug(f'Setting transforms {transforms}')
        upload_session.import_session.tasks[0].add_transforms(transforms)
        try:
            upload_session.time_transforms = transforms
            upload_session.time = True
        except gsimporter.BadRequest as br:
            logger.exception(br)
            Upload.objects.invalidate_from_session(upload_session)
            raise GeneralUploadException(detail=_('Error configuring time: ') + br)
        upload_session.import_session.tasks[0].save_transforms()
    else:
        upload_session.time = False
예제 #6
0
def save_step(user, layer, spatial_files, overwrite=True, store_spatial_files=True,
              mosaic=False, append_to_mosaic_opts=None, append_to_mosaic_name=None,
              mosaic_time_regex=None, mosaic_time_value=None,
              time_presentation=None, time_presentation_res=None,
              time_presentation_default_value=None,
              time_presentation_reference_value=None,
              charset_encoding="UTF-8", target_store=None):
    logger.debug(
        f'Uploading layer: {layer}, files {spatial_files}')
    if len(spatial_files) > 1:
        # we only support more than one file if they're rasters for mosaicing
        if not all(
                [f.file_type.dataset_type == 'coverage' for f in spatial_files]):
            msg = "Please upload only one type of file at a time"
            logger.exception(Exception(msg))
            raise GeneralUploadException(detail=msg)
    name = get_valid_dataset_name(layer, overwrite)
    logger.debug(f'Name for layer: {name}')
    if not any(spatial_files.all_files()):
        msg = "Unable to recognize the uploaded file(s)"
        logger.exception(Exception(msg))
        raise GeneralUploadException(detail=msg)
    the_dataset_type = get_dataset_type(spatial_files)
    _check_geoserver_store(name, the_dataset_type, overwrite)
    if the_dataset_type not in (
            FeatureType.resource_type,
            Coverage.resource_type):
        msg = f"Expected layer type to FeatureType or Coverage, not {the_dataset_type}"
        logger.exception(Exception(msg))
        raise GeneralUploadException(msg)
    files_to_upload = preprocess_files(spatial_files)
    logger.debug(f"files_to_upload: {files_to_upload}")
    logger.debug(f'Uploading {the_dataset_type}')
    error_msg = None
    try:
        next_id = _get_next_id()
        # Truncate name to maximum length defined by the field.
        max_length = Upload._meta.get_field('name').max_length
        name = name[:max_length]
        # save record of this whether valid or not - will help w/ debugging
        upload, _ = Upload.objects.get_or_create(
            user=user,
            name=name,
            state=enumerations.STATE_READY,
            upload_dir=spatial_files.dirname
        )
        upload.store_spatial_files = store_spatial_files

        # @todo settings for use_url or auto detection if geoserver is
        # on same host

        # Is it a regular file or an ImageMosaic?
        # if mosaic_time_regex and mosaic_time_value:
        if mosaic:  # we want to ingest as ImageMosaic
            target_store, files_to_upload = utils.import_imagemosaic_granules(
                spatial_files,
                append_to_mosaic_opts,
                append_to_mosaic_name,
                mosaic_time_regex,
                mosaic_time_value,
                time_presentation,
                time_presentation_res,
                time_presentation_default_value,
                time_presentation_reference_value)
            upload.mosaic = mosaic
            upload.append_to_mosaic_opts = append_to_mosaic_opts
            upload.append_to_mosaic_name = append_to_mosaic_name
            upload.mosaic_time_regex = mosaic_time_regex
            upload.mosaic_time_value = mosaic_time_value
            # moving forward with a regular Importer session
            if len(files_to_upload) > 1:
                import_session = gs_uploader.upload_files(
                    files_to_upload[1:],
                    use_url=False,
                    # import_id=next_id,
                    target_store=target_store,
                    charset_encoding=charset_encoding
                )
            else:
                import_session = gs_uploader.upload_files(
                    files_to_upload,
                    use_url=False,
                    # import_id=next_id,
                    target_store=target_store,
                    charset_encoding=charset_encoding
                )
            next_id = import_session.id if import_session else None
            if not next_id:
                error_msg = 'No valid Importer Session could be found'
        else:
            # moving forward with a regular Importer session
            import_session = gs_uploader.upload_files(
                files_to_upload,
                use_url=False,
                import_id=next_id,
                mosaic=False,
                target_store=target_store,
                name=name,
                charset_encoding=charset_encoding
            )
        upload.import_id = import_session.id
        upload.save()

        # any unrecognized tasks/files must be deleted or we can't proceed
        import_session.delete_unrecognized_tasks()

        if not mosaic:
            if not import_session.tasks:
                error_msg = 'No valid upload files could be found'
        if import_session.tasks:
            if import_session.tasks[0].state == 'NO_FORMAT' \
                    or import_session.tasks[0].state == 'BAD_FORMAT':
                error_msg = 'There may be a problem with the data provided - ' \
                            'we could not identify its format'

        if not mosaic and len(import_session.tasks) > 1:
            error_msg = "Only a single upload is supported at the moment"

        if not error_msg and import_session.tasks:
            task = import_session.tasks[0]
            # single file tasks will have just a file entry
            if hasattr(task, 'files'):
                # @todo gsimporter - test this
                if not all([hasattr(f, 'timestamp')
                            for f in task.source.files]):
                    error_msg = (
                        "Not all timestamps could be recognized."
                        "Please ensure your files contain the correct formats.")

        if error_msg:
            upload.set_processing_state(enumerations.STATE_INVALID)

        # @todo once the random tmp9723481758915 type of name is not
        # around, need to track the name computed above, for now, the
        # target store name can be used
    except Exception as e:
        logger.exception(e)
        raise e

    if error_msg:
        logger.exception(Exception(error_msg))
        raise GeneralUploadException(detail=error_msg)
    else:
        _log("Finished upload of [%s] to GeoServer without errors.", name)

    return import_session, upload
예제 #7
0
파일: views.py 프로젝트: pasing/geonode
def view(req, step=None):
    """Main uploader view"""

    config = Configuration.load()
    if config.read_only or config.maintenance:
        raise AuthenticationFailed()

    upload_session = None
    upload_id = req.GET.get('id', None)

    if step is None:
        if upload_id:
            # upload recovery
            upload_obj = get_object_or_404(
                Upload,
                import_id=upload_id,
                user=req.user)
            session = upload_obj.get_session
            if session:
                return next_step_response(req, session)
        step = 'save'

        # delete existing session
        if upload_id and upload_id in req.session:
            del req.session[upload_id]
            req.session.modified = True
    else:
        if not upload_id:
            return render(
                req,
                "upload/dataset_upload_invalid.html",
                context={})

        upload_obj = get_object_or_404(
            Upload, import_id=upload_id, user=req.user)
        session = upload_obj.get_session
        try:
            if session:
                upload_session = session
            else:
                upload_session = _get_upload_session(req)
        except Exception as e:
            logger.exception(e)
    try:
        if req.method == 'GET' and upload_session:
            # set the current step to match the requested page - this
            # could happen if the form is ajax w/ progress monitoring as
            # the advance would have already happened @hacky
            _completed_step = upload_session.completed_step
            try:
                _completed_step = get_previous_step(
                    upload_session,
                    step)
                upload_session.completed_step = _completed_step
            except Exception as e:
                logger.exception(e)
                if isinstance(e, APIException):
                    raise e
                raise GeneralUploadException(detail=traceback.format_exc())

        resp = _steps[step](req, upload_session)
        resp_js = None
        try:
            if 'json' in resp.headers.get('Content-Type', ''):
                content = resp.content
                if isinstance(content, bytes):
                    content = content.decode('UTF-8')
                resp_js = json.loads(content)
        except Exception as e:
            logger.exception(e)
            if isinstance(e, APIException):
                raise e
            raise GeneralUploadException(detail=traceback.format_exc())

        # must be put back to update object in session
        if upload_session:
            if resp_js and step == 'final':
                try:
                    delete_session = resp_js.get('status') != 'pending'
                    if delete_session:
                        # we're done with this session, wax it
                        upload_session = None
                        del req.session[upload_id]
                        req.session.modified = True
                except Exception:
                    pass
        else:
            upload_session = _get_upload_session(req)
        if upload_session:
            Upload.objects.update_from_session(upload_session)
        if resp_js:
            _success = resp_js.get('success', False)
            _redirect_to = resp_js.get('redirect_to', '')
            _required_input = resp_js.get('required_input', False)
            if _success and (_required_input or 'upload/final' in _redirect_to):
                from geonode.upload.tasks import finalize_incomplete_session_uploads
                finalize_incomplete_session_uploads.apply_async()
        return resp
    except BadStatusLine:
        logger.exception('bad status line, geoserver down?')
        raise GeneralUploadException(detail=_geoserver_down_error_msg)
    except gsimporter.RequestFailed as e:
        logger.exception(e)
        errors = e.args
        # http bad gateway or service unavailable
        if int(errors[0]) in (502, 503):
            errors = [_geoserver_down_error_msg]
        raise GeneralUploadException(detail=errors)
    except gsimporter.BadRequest as e:
        logger.exception(e)
        raise GeneralUploadException(detail=e.args[0])
    except Exception as e:
        logger.exception(e)
        if isinstance(e, APIException):
            raise e
        raise GeneralUploadException(detail=traceback.format_exc())
예제 #8
0
파일: views.py 프로젝트: pasing/geonode
def time_step_view(request, upload_session):
    if not upload_session:
        upload_session = _get_upload_session(request)
    import_session = upload_session.import_session
    assert import_session is not None

    force_ajax = '&force_ajax=true' if request and 'force_ajax' in request.GET and request.GET['force_ajax'] == 'true' else ''
    if request.method == 'GET':
        layer = check_import_session_is_valid(
            request, upload_session, import_session)
        if layer:
            (has_time_dim, dataset_values) = dataset_eligible_for_time_dimension(request, layer, upload_session=upload_session)
            if has_time_dim and dataset_values:
                upload_session.completed_step = 'check'
                if not force_ajax:
                    context = {
                        'time_form': create_time_form(request, upload_session, None),
                        'dataset_name': layer.name,
                        'dataset_values': dataset_values,
                        'dataset_attributes': list(dataset_values[0].keys()),
                        'async_upload': is_async_step(upload_session)
                    }
                    return render(request, 'upload/dataset_upload_time.html', context=context)
            else:
                upload_session.completed_step = 'time' if _ALLOW_TIME_STEP else 'check'
        return next_step_response(request, upload_session)
    elif request.method != 'POST':
        raise Exception()

    form = create_time_form(request, upload_session, request.POST)
    if not form.is_valid():
        logger.exception('Invalid upload form: %s', form.errors)
        raise GeneralUploadException(detail="Invalid Submission")

    cleaned = form.cleaned_data
    start_attribute_and_type = cleaned.get('start_attribute', None)
    if upload_session.time_transforms:
        upload_session.import_session.tasks[0].remove_transforms(
            upload_session.time_transforms,
            save=True
        )
        upload_session.import_session.tasks[0].save_transforms()
        upload_session.time_transforms = None

    if upload_session.import_session.tasks[0].transforms:
        for transform in upload_session.import_session.tasks[0].transforms:
            if 'type' in transform and \
                    (str(transform['type']) == 'DateFormatTransform' or
                     str(transform['type']) == 'CreateIndexTransform'):
                upload_session.import_session.tasks[0].remove_transforms(
                    [transform],
                    save=True
                )
                upload_session.import_session.tasks[0].save_transforms()

    try:
        upload_session.import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        logger.exception(e)
        Upload.objects.invalidate_from_session(upload_session)
        raise GeneralUploadException(detail=_("The GeoServer Import Session is no more available ") + e.args[0])

    if start_attribute_and_type:
        def tx(type_name):
            # return None if type_name is None or type_name == 'Date' \
            return None if type_name is None \
                else 'DateFormatTransform'
        end_attribute, end_type = cleaned.get('end_attribute', (None, None))
        time_step(
            upload_session,
            time_attribute=start_attribute_and_type[0],
            time_transform_type=tx(start_attribute_and_type[1]),
            time_format=cleaned.get('attribute_format', None),
            end_time_attribute=end_attribute,
            end_time_transform_type=tx(end_type),
            end_time_format=cleaned.get('end_attribute_format', None),
            presentation_strategy=cleaned['presentation_strategy'],
            precision_value=cleaned['precision_value'],
            precision_step=cleaned['precision_step'],
        )

    upload_session.completed_step = 'check'
    return next_step_response(request, upload_session)
예제 #9
0
파일: views.py 프로젝트: pasing/geonode
def save_step_view(req, session):
    form = LayerUploadForm(req.POST, req.FILES, user=req.user)

    overwrite = req.path_info.endswith('/replace')
    target_store = None
    if form.is_valid():
        logger.debug(f"valid_extensions: {form.cleaned_data['valid_extensions']}")
        data_retriever = form.cleaned_data["data_retriever"]
        relevant_files = select_relevant_files(
            form.cleaned_data["valid_extensions"],
            data_retriever.get_paths(allow_transfer=False).values()
        )
        logger.debug(f"relevant_files: {relevant_files}")
        base_file = data_retriever.get("base_file").get_path(allow_transfer=False)
        name, ext = os.path.splitext(os.path.basename(base_file))
        logger.debug(f'Name: {name}, ext: {ext}')
        logger.debug(f"base_file: {base_file}")
        scan_hint = get_scan_hint(form.cleaned_data["valid_extensions"])
        spatial_files = scan_file(
            base_file,
            scan_hint=scan_hint,
            charset=form.cleaned_data["charset"]
        )
        logger.debug(f"spatial_files: {spatial_files}")

        if overwrite:
            dataset = Dataset.objects.filter(id=req.GET['dataset_id'])
            if dataset.exists():
                name = dataset.first().name
                target_store = dataset.first().store

        import_session, upload = save_step(
            req.user,
            name,
            spatial_files,
            overwrite=overwrite,
            store_spatial_files=form.cleaned_data.get('store_spatial_files', True),
            mosaic=form.cleaned_data['mosaic'] or scan_hint == 'zip-mosaic',
            append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'],
            append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'],
            mosaic_time_regex=form.cleaned_data['mosaic_time_regex'],
            mosaic_time_value=form.cleaned_data['mosaic_time_value'],
            time_presentation=form.cleaned_data['time_presentation'],
            time_presentation_res=form.cleaned_data['time_presentation_res'],
            time_presentation_default_value=form.cleaned_data['time_presentation_default_value'],
            time_presentation_reference_value=form.cleaned_data['time_presentation_reference_value'],
            charset_encoding=form.cleaned_data["charset"],
            target_store=target_store
        )
        import_session.tasks[0].set_charset(form.cleaned_data["charset"])
        sld = None
        if spatial_files[0].sld_files:
            sld = spatial_files[0].sld_files[0]
        if os.path.exists(data_retriever.temporary_folder):
            tmp_files = [f for f in data_retriever.get_paths().values() if os.path.exists(f)]
            for f in tmp_files:
                if zipfile.is_zipfile(os.path.join(data_retriever.temporary_folder, f)):
                    fixup_shp_columnnames(os.path.join(data_retriever.temporary_folder, f),
                                          form.cleaned_data["charset"],
                                          tempdir=data_retriever.temporary_folder)

        _log(f'provided sld is {sld}')
        # upload_type = get_upload_type(base_file)
        upload_session = UploaderSession(
            tempdir=data_retriever.temporary_folder,
            base_file=spatial_files,
            name=upload.name,
            charset=form.cleaned_data["charset"],
            import_session=import_session,
            dataset_abstract=form.cleaned_data["abstract"],
            dataset_title=form.cleaned_data["dataset_title"],
            permissions=form.cleaned_data["permissions"],
            import_sld_file=sld,
            spatial_files_uploaded=form.cleaned_data['uploaded'],
            upload_type=spatial_files[0].file_type.code,
            time=form.cleaned_data['time'],
            mosaic=form.cleaned_data['mosaic'],
            append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'],
            append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'],
            mosaic_time_regex=form.cleaned_data['mosaic_time_regex'],
            mosaic_time_value=form.cleaned_data['mosaic_time_value'],
            user=upload.user
        )
        Upload.objects.update_from_session(upload_session)
        return next_step_response(req, upload_session, force_ajax=True)
    else:
        if hasattr(form, "data_retriever"):
            form.data_retriever.delete_files()
        errors = []
        for e in form.errors.values():
            errors.extend([escape(v) for v in e])
        raise GeneralUploadException(detail=errors)