Esempio n. 1
0
def save_step_view(req, session):
    if req.method == 'GET':
        return render(
            req, 'upload/dataset_upload.html', {
                'async_upload': _ASYNC_UPLOAD,
                'incomplete': Upload.objects.get_incomplete_uploads(req.user),
                'charsets': enumerations.CHARSETS
            })
    form = LayerUploadForm(req.POST, req.FILES)

    overwrite = req.path_info.endswith('/replace')
    target_store = None
    if form.is_valid():
        tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT)
        logger.debug(
            f"valid_extensions: {form.cleaned_data['valid_extensions']}")
        relevant_files = select_relevant_files(
            form.cleaned_data["valid_extensions"], iter(req.FILES.values()))
        logger.debug(f"relevant_files: {relevant_files}")
        write_uploaded_files_to_disk(tempdir, relevant_files)
        base_file = os.path.join(tempdir, form.cleaned_data["base_file"].name)
        name, ext = os.path.splitext(os.path.basename(base_file))
        logger.debug(f'Name: {name}, ext: {ext}')
        logger.debug(f"base_file: {base_file}")
        scan_hint = get_scan_hint(form.cleaned_data["valid_extensions"])
        spatial_files = scan_file(base_file,
                                  scan_hint=scan_hint,
                                  charset=form.cleaned_data["charset"])
        logger.debug(f"spatial_files: {spatial_files}")

        if overwrite:
            dataset = Dataset.objects.filter(id=req.GET['dataset_id'])
            if dataset.exists():
                name = dataset.first().name
                target_store = dataset.first().store

        import_session, upload = save_step(
            req.user,
            name,
            spatial_files,
            overwrite=overwrite,
            mosaic=form.cleaned_data['mosaic'] or scan_hint == 'zip-mosaic',
            append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'],
            append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'],
            mosaic_time_regex=form.cleaned_data['mosaic_time_regex'],
            mosaic_time_value=form.cleaned_data['mosaic_time_value'],
            time_presentation=form.cleaned_data['time_presentation'],
            time_presentation_res=form.cleaned_data['time_presentation_res'],
            time_presentation_default_value=form.
            cleaned_data['time_presentation_default_value'],
            time_presentation_reference_value=form.
            cleaned_data['time_presentation_reference_value'],
            charset_encoding=form.cleaned_data["charset"],
            target_store=target_store)
        import_session.tasks[0].set_charset(form.cleaned_data["charset"])
        sld = None
        if spatial_files[0].sld_files:
            sld = spatial_files[0].sld_files[0]
        if not os.path.isfile(os.path.join(tempdir,
                                           spatial_files[0].base_file)):
            tmp_files = [
                f for f in os.listdir(tempdir)
                if os.path.isfile(os.path.join(tempdir, f))
            ]
            for f in tmp_files:
                if zipfile.is_zipfile(os.path.join(tempdir, f)):
                    fixup_shp_columnnames(os.path.join(tempdir, f),
                                          form.cleaned_data["charset"],
                                          tempdir=tempdir)

        _log(f'provided sld is {sld}')
        # upload_type = get_upload_type(base_file)
        upload_session = UploaderSession(
            tempdir=tempdir,
            base_file=spatial_files,
            name=upload.name,
            charset=form.cleaned_data["charset"],
            import_session=import_session,
            dataset_abstract=form.cleaned_data["abstract"],
            dataset_title=form.cleaned_data["dataset_title"],
            permissions=form.cleaned_data["permissions"],
            import_sld_file=sld,
            upload_type=spatial_files[0].file_type.code,
            time=form.cleaned_data['time'],
            mosaic=form.cleaned_data['mosaic'],
            append_to_mosaic_opts=form.cleaned_data['append_to_mosaic_opts'],
            append_to_mosaic_name=form.cleaned_data['append_to_mosaic_name'],
            mosaic_time_regex=form.cleaned_data['mosaic_time_regex'],
            mosaic_time_value=form.cleaned_data['mosaic_time_value'],
            user=upload.user)
        Upload.objects.update_from_session(upload_session)
        return next_step_response(req, upload_session, force_ajax=True)
    else:
        errors = []
        for e in form.errors.values():
            errors.extend([escape(v) for v in e])
        return error_response(req, errors=errors)
Esempio n. 2
0
def dataset_upload_metadata(request):
    out = {}
    errormsgs = []

    form = NewLayerUploadForm(request.POST, request.FILES)

    if form.is_valid():

        tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT)

        relevant_files = select_relevant_files(['xml'],
                                               iter(request.FILES.values()))

        logger.debug(f"relevant_files: {relevant_files}")

        write_uploaded_files_to_disk(tempdir, relevant_files)

        base_file = os.path.join(tempdir, form.cleaned_data["base_file"].name)

        name = form.cleaned_data['dataset_title']
        layer = Dataset.objects.filter(typename=name)
        if layer.exists():
            dataset_uuid, vals, regions, keywords, _ = parse_metadata(
                open(base_file).read())
            if dataset_uuid and layer.first().uuid != dataset_uuid:
                out['success'] = False
                out['errors'] = "The UUID identifier from the XML Metadata, is different from the one saved"
                return HttpResponse(json.dumps(out),
                                    content_type='application/json',
                                    status=404)
            updated_dataset = update_resource(layer.first(), base_file,
                                              regions, keywords, vals)
            updated_dataset.save()
            out['status'] = ['finished']
            out['url'] = updated_dataset.get_absolute_url()
            out['bbox'] = updated_dataset.bbox_string
            out['crs'] = {'type': 'name', 'properties': updated_dataset.srid}
            out['ogc_backend'] = settings.OGC_SERVER['default']['BACKEND']
            if hasattr(updated_dataset, 'upload_session'):
                upload_session = updated_dataset.upload_session
                upload_session.processed = True
                upload_session.save()
            status_code = 200
            out['success'] = True
            return HttpResponse(json.dumps(out),
                                content_type='application/json',
                                status=status_code)
        else:
            out['success'] = False
            out['errors'] = "Dataset selected does not exists"
            status_code = 404
        return HttpResponse(json.dumps(out),
                            content_type='application/json',
                            status=status_code)
    else:
        for e in form.errors.values():
            errormsgs.extend([escape(v) for v in e])
        out['errors'] = form.errors
        out['errormsgs'] = errormsgs

    return HttpResponse(json.dumps(out),
                        content_type='application/json',
                        status=500)