Ejemplo n.º 1
0
def create_gn_dataset(workspace, datastore, name, title, owner_name):
    """
    Associate a layer in GeoNode for a given layer in GeoServer.
    """
    owner = get_user_model().objects.get(username=owner_name)

    layer = resource_manager.create(
        str(uuid.uuid4()),
        resource_type=Dataset,
        defaults=dict(
            name=name,
            workspace=workspace.name,
            store=datastore.name,
            subtype='vector',
            alternate=f'{workspace.name}:{name}',
            title=title,
            owner=owner,
            srid='EPSG:4326',
            bbox_polygon=Polygon.from_bbox(BBOX),
            ll_bbox_polygon=Polygon.from_bbox(BBOX),
            data_quality_statement=DATA_QUALITY_MESSAGE
        ))

    to_update = {}
    if settings.ADMIN_MODERATE_UPLOADS:
        to_update['is_approved'] = to_update['was_approved'] = False
    if settings.RESOURCE_PUBLISHING:
        to_update['is_published'] = to_update['was_published'] = False

    resource_manager.update(layer.uuid, instance=layer, vals=to_update)
    resource_manager.set_thumbnail(None, instance=layer)
    return layer
Ejemplo n.º 2
0
    def perform_create(self, serializer):
        # Thumbnail will be handled later
        post_creation_data = {
            "thumbnail": serializer.validated_data.pop("thumbnail_url", "")
        }

        instance = serializer.save(
            owner=self.request.user,
            resource_type="map",
            uuid=str(uuid4()),
        )

        # events and resouce routines
        self._post_change_routines(
            instance=instance,
            create_action_perfomed=True,
            additional_data=post_creation_data,
        )
        # Handle thumbnail generation
        resource_manager.set_thumbnail(instance.uuid,
                                       instance=instance,
                                       overwrite=False)
Ejemplo n.º 3
0
def final_step(upload_session, user, charset="UTF-8", dataset_id=None):
    import_session = upload_session.import_session
    import_id = import_session.id

    _log(f'Reloading session {import_id} to check validity')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)

    if Upload.objects.filter(import_id=import_id).count():
        Upload.objects.filter(import_id=import_id).update(complete=False)
        upload = Upload.objects.filter(import_id=import_id).get()
        if upload.state == enumerations.STATE_RUNNING:
            return

    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    # Create the style and assign it to the created resource
    # FIXME: Put this in gsconfig.py
    task = import_session.tasks[0]
    task.set_charset(charset)

    # @todo see above in save_step, regarding computed unique name
    name = task.layer.name

    if dataset_id:
        name = Dataset.objects.get(resourcebase_ptr_id=dataset_id).name

    _log(f'Getting from catalog [{name}]')
    try:
        # the importer chooses an available featuretype name late in the game need
        # to verify the resource.name otherwise things will fail.  This happens
        # when the same data is uploaded a second time and the default name is
        # chosen
        gs_catalog.get_layer(name)
    except Exception:
        Upload.objects.invalidate_from_session(upload_session)
        raise LayerNotReady(
            _(f"Expected to find layer named '{name}' in geoserver"))

    if import_session.state == 'READY' or (import_session.state == 'PENDING'
                                           and task.state == 'READY'):
        import_session.commit()
    elif import_session.state == 'INCOMPLETE' and task.state != 'ERROR':
        Upload.objects.invalidate_from_session(upload_session)
        raise Exception(f'unknown item state: {task.state}')
    try:
        import_session = import_session.reload()
    except gsimporter.api.NotFound as e:
        Upload.objects.invalidate_from_session(upload_session)
        raise UploadException.from_exc(
            _("The GeoServer Import Session is no more available"), e)
    upload_session.import_session = import_session
    Upload.objects.update_from_session(upload_session)

    _log(f'Creating Django record for [{name}]')
    target = task.target
    alternate = task.get_target_layer_name()
    dataset_uuid = None
    title = upload_session.dataset_title
    abstract = upload_session.dataset_abstract

    metadata_uploaded = False
    xml_file = upload_session.base_file[0].xml_files
    if xml_file:
        try:
            # get model properties from XML
            # If it's contained within a zip, need to extract it
            if upload_session.base_file.archive:
                archive = upload_session.base_file.archive
                zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
                zf.extract(xml_file[0], os.path.dirname(archive))
                # Assign the absolute path to this file
                xml_file = f"{os.path.dirname(archive)}/{xml_file[0]}"

            # Sanity checks
            if isinstance(xml_file, list):
                if len(xml_file) > 0:
                    xml_file = xml_file[0]
                else:
                    xml_file = None
            elif not isinstance(xml_file, str):
                xml_file = None

            if xml_file and os.path.exists(xml_file) and os.access(
                    xml_file, os.R_OK):
                dataset_uuid, vals, regions, keywords, custom = parse_metadata(
                    open(xml_file).read())
                metadata_uploaded = True
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            logger.error(e)
            raise GeoNodeException(
                _("Exception occurred while parsing the provided Metadata file."
                  ), e)

    # look for SLD
    sld_file = upload_session.base_file[0].sld_files
    sld_uploaded = False
    if sld_file:
        # If it's contained within a zip, need to extract it
        if upload_session.base_file.archive:
            archive = upload_session.base_file.archive
            logger.error(f'using uploaded sld file from {archive}')
            zf = zipfile.ZipFile(archive, 'r', allowZip64=True)
            zf.extract(sld_file[0],
                       os.path.dirname(archive),
                       path=upload_session.tempdir)
            # Assign the absolute path to this file
            sld_file[0] = f"{os.path.dirname(archive)}/{sld_file[0]}"
        else:
            _sld_file = f"{os.path.dirname(upload_session.tempdir)}/{os.path.basename(sld_file[0])}"
            logger.error(f"copying [{sld_file[0]}] to [{_sld_file}]")
            try:
                shutil.copyfile(sld_file[0], _sld_file)
                sld_file = _sld_file
            except (IsADirectoryError, shutil.SameFileError) as e:
                logger.exception(e)
                sld_file = sld_file[0]
            except Exception as e:
                raise UploadException.from_exc(_('Error uploading Dataset'), e)
        sld_uploaded = True
    else:
        # get_files will not find the sld if it doesn't match the base name
        # so we've worked around that in the view - if provided, it will be here
        if upload_session.import_sld_file:
            logger.error('using provided sld file from importer')
            base_file = upload_session.base_file
            sld_file = base_file[0].sld_files[0]
        sld_uploaded = False
    logger.error(f'[sld_uploaded: {sld_uploaded}] sld_file: {sld_file}')

    # Make sure the layer does not exists already
    if dataset_uuid and Dataset.objects.filter(uuid=dataset_uuid).count():
        Upload.objects.invalidate_from_session(upload_session)
        logger.error(
            "The UUID identifier from the XML Metadata is already in use in this system."
        )
        raise GeoNodeException(
            _("The UUID identifier from the XML Metadata is already in use in this system."
              ))

    # Is it a regular file or an ImageMosaic?
    # if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
    saved_dataset = None
    has_time = has_elevation = False
    start = end = None
    if upload_session.mosaic_time_regex and upload_session.mosaic_time_value:
        has_elevation = True
        start = datetime.datetime.strptime(
            upload_session.mosaic_time_value,
            TIME_REGEX_FORMAT[upload_session.mosaic_time_regex])
        start = pytz.utc.localize(start, is_dst=False)
        end = start
    if upload_session.time and upload_session.time_info and upload_session.time_transforms:
        has_time = True

    if upload_session.append_to_mosaic_opts:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    name=upload_session.append_to_mosaic_name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        name=upload_session.append_to_mosaic_name)
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
                if saved_dataset.temporal_extent_start and end:
                    if pytz.utc.localize(saved_dataset.temporal_extent_start,
                                         is_dst=False) < end:
                        saved_dataset.temporal_extent_end = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_end=end)
                    else:
                        saved_dataset.temporal_extent_start = end
                        Dataset.objects.filter(
                            name=upload_session.append_to_mosaic_name).update(
                                temporal_extent_start=end)
        except Exception as e:
            _log(
                f"There was an error updating the mosaic temporal extent: {str(e)}"
            )
    else:
        try:
            with transaction.atomic():
                saved_dataset_filter = Dataset.objects.filter(
                    store=target.name,
                    alternate=alternate,
                    workspace=target.workspace_name,
                    name=task.layer.name)
                if not saved_dataset_filter.exists():
                    saved_dataset = resource_manager.create(
                        dataset_uuid,
                        resource_type=Dataset,
                        defaults=dict(
                            store=target.name,
                            subtype=get_dataset_storetype(target.store_type),
                            alternate=alternate,
                            workspace=target.workspace_name,
                            title=title,
                            name=task.layer.name,
                            abstract=abstract or _('No abstract provided'),
                            owner=user,
                            temporal_extent_start=start,
                            temporal_extent_end=end,
                            is_mosaic=has_elevation,
                            has_time=has_time,
                            has_elevation=has_elevation,
                            time_regex=upload_session.mosaic_time_regex))
                    created = True
                else:
                    saved_dataset = saved_dataset_filter.get()
                    created = False
                saved_dataset.set_dirty_state()
        except Exception as e:
            Upload.objects.invalidate_from_session(upload_session)
            raise UploadException.from_exc(_('Error configuring Dataset'), e)

    assert saved_dataset

    if not created:
        return saved_dataset

    try:
        saved_dataset.set_dirty_state()
        with transaction.atomic():
            Upload.objects.update_from_session(upload_session,
                                               resource=saved_dataset)

            # Set default permissions on the newly created layer and send notifications
            permissions = upload_session.permissions

            # Finalize Upload
            resource_manager.set_permissions(None,
                                             instance=saved_dataset,
                                             permissions=permissions,
                                             created=created)
            resource_manager.update(None,
                                    instance=saved_dataset,
                                    xml_file=xml_file,
                                    metadata_uploaded=metadata_uploaded)
            resource_manager.exec('set_style',
                                  None,
                                  instance=saved_dataset,
                                  sld_uploaded=sld_uploaded,
                                  sld_file=sld_file,
                                  tempdir=upload_session.tempdir)
            resource_manager.exec('set_time_info',
                                  None,
                                  instance=saved_dataset,
                                  time_info=upload_session.time_info)
            resource_manager.set_thumbnail(None, instance=saved_dataset)
            saved_dataset.set_processing_state(enumerations.STATE_PROCESSED)
    except Exception as e:
        saved_dataset.set_processing_state(enumerations.STATE_INVALID)
        raise GeoNodeException(e)
    finally:
        saved_dataset.clear_dirty_state()

    try:
        logger.debug(
            f"... Cleaning up the temporary folders {upload_session.tempdir}")
        if upload_session.tempdir and os.path.exists(upload_session.tempdir):
            shutil.rmtree(upload_session.tempdir)
    except Exception as e:
        logger.warning(e)
    finally:
        Upload.objects.filter(import_id=import_id).update(complete=True)

    return saved_dataset
Ejemplo n.º 4
0
def document_metadata(request,
                      docid,
                      template='documents/document_metadata.html',
                      ajax=True):
    document = None
    try:
        document = _resolve_document(request, docid,
                                     'base.change_resourcebase_metadata',
                                     _PERMISSION_MSG_METADATA)
    except PermissionDenied:
        return HttpResponse(_("Not allowed"), status=403)
    except Exception:
        raise Http404(_("Not found"))
    if not document:
        raise Http404(_("Not found"))

    # Add metadata_author or poc if missing
    document.add_missing_metadata_author_or_poc()
    poc = document.poc
    metadata_author = document.metadata_author
    topic_category = document.category
    current_keywords = [keyword.name for keyword in document.keywords.all()]

    if request.method == "POST":
        document_form = DocumentForm(request.POST,
                                     instance=document,
                                     prefix="resource")
        category_form = CategoryForm(
            request.POST,
            prefix="category_choice_field",
            initial=int(request.POST["category_choice_field"])
            if "category_choice_field" in request.POST
            and request.POST["category_choice_field"] else None)

        if hasattr(settings, 'THESAURUS'):
            tkeywords_form = TKeywordForm(request.POST)
        else:
            tkeywords_form = ThesaurusAvailableForm(request.POST,
                                                    prefix='tkeywords')

    else:
        document_form = DocumentForm(instance=document, prefix="resource")
        document_form.disable_keywords_widget_for_non_superuser(request.user)
        category_form = CategoryForm(
            prefix="category_choice_field",
            initial=topic_category.id if topic_category else None)

        # Keywords from THESAURUS management
        doc_tkeywords = document.tkeywords.all()
        if hasattr(settings, 'THESAURUS') and settings.THESAURUS:
            warnings.warn(
                'The settings for Thesaurus has been moved to Model, \
            this feature will be removed in next releases', DeprecationWarning)
            tkeywords_list = ''
            lang = 'en'  # TODO: use user's language
            if doc_tkeywords and len(doc_tkeywords) > 0:
                tkeywords_ids = doc_tkeywords.values_list('id', flat=True)
                if hasattr(settings, 'THESAURUS') and settings.THESAURUS:
                    el = settings.THESAURUS
                    thesaurus_name = el['name']
                    try:
                        t = Thesaurus.objects.get(identifier=thesaurus_name)
                        for tk in t.thesaurus.filter(pk__in=tkeywords_ids):
                            tkl = tk.keyword.filter(lang=lang)
                            if len(tkl) > 0:
                                tkl_ids = ",".join(
                                    map(str, tkl.values_list('id', flat=True)))
                                tkeywords_list += f",{tkl_ids}" if len(
                                    tkeywords_list) > 0 else tkl_ids
                    except Exception:
                        tb = traceback.format_exc()
                        logger.error(tb)

            tkeywords_form = TKeywordForm(instance=document)
        else:
            tkeywords_form = ThesaurusAvailableForm(prefix='tkeywords')
            #  set initial values for thesaurus form
            for tid in tkeywords_form.fields:
                values = []
                values = [
                    keyword.id for keyword in doc_tkeywords
                    if int(tid) == keyword.thesaurus.id
                ]
                tkeywords_form.fields[tid].initial = values

    if request.method == "POST" and document_form.is_valid(
    ) and category_form.is_valid() and tkeywords_form.is_valid():
        new_poc = document_form.cleaned_data['poc']
        new_author = document_form.cleaned_data['metadata_author']
        new_keywords = current_keywords if request.keyword_readonly else document_form.cleaned_data[
            'keywords']
        new_regions = document_form.cleaned_data['regions']

        new_category = None
        if category_form and 'category_choice_field' in category_form.cleaned_data and \
                category_form.cleaned_data['category_choice_field']:
            new_category = TopicCategory.objects.get(
                id=int(category_form.cleaned_data['category_choice_field']))

        if new_poc is None:
            if poc is None:
                poc_form = ProfileForm(request.POST,
                                       prefix="poc",
                                       instance=poc)
            else:
                poc_form = ProfileForm(request.POST, prefix="poc")
            if poc_form.is_valid():
                if len(poc_form.cleaned_data['profile']) == 0:
                    # FIXME use form.add_error in django > 1.7
                    errors = poc_form._errors.setdefault(
                        'profile', ErrorList())
                    errors.append(
                        _('You must set a point of contact for this resource'))
            if poc_form.has_changed and poc_form.is_valid():
                new_poc = poc_form.save()

        if new_author is None:
            if metadata_author is None:
                author_form = ProfileForm(request.POST,
                                          prefix="author",
                                          instance=metadata_author)
            else:
                author_form = ProfileForm(request.POST, prefix="author")
            if author_form.is_valid():
                if len(author_form.cleaned_data['profile']) == 0:
                    # FIXME use form.add_error in django > 1.7
                    errors = author_form._errors.setdefault(
                        'profile', ErrorList())
                    errors.append(
                        _('You must set an author for this resource'))
            if author_form.has_changed and author_form.is_valid():
                new_author = author_form.save()

        document = document_form.instance
        resource_manager.update(document.uuid,
                                instance=document,
                                keywords=new_keywords,
                                regions=new_regions,
                                vals=dict(poc=new_poc or document.poc,
                                          metadata_author=new_author
                                          or document.metadata_author,
                                          category=new_category),
                                notify=True)
        resource_manager.set_thumbnail(document.uuid,
                                       instance=document,
                                       overwrite=False)
        document_form.save_many2many()

        register_event(request, EventType.EVENT_CHANGE_METADATA, document)
        url = hookset.document_detail_url(document)
        if not ajax:
            return HttpResponseRedirect(url)
        message = document.id

        try:
            # Keywords from THESAURUS management
            # Rewritten to work with updated autocomplete
            if not tkeywords_form.is_valid():
                return HttpResponse(
                    json.dumps({'message': "Invalid thesaurus keywords"},
                               status_code=400))

            thesaurus_setting = getattr(settings, 'THESAURUS', None)
            if thesaurus_setting:
                tkeywords_data = tkeywords_form.cleaned_data['tkeywords']
                tkeywords_data = tkeywords_data.filter(
                    thesaurus__identifier=thesaurus_setting['name'])
                document.tkeywords.set(tkeywords_data)
            elif Thesaurus.objects.all().exists():
                fields = tkeywords_form.cleaned_data
                document.tkeywords.set(tkeywords_form.cleanx(fields))

        except Exception:
            tb = traceback.format_exc()
            logger.error(tb)

        return HttpResponse(json.dumps({'message': message}))

    # - POST Request Ends here -

    # Request.GET
    if poc is not None:
        document_form.fields['poc'].initial = poc.id
        poc_form = ProfileForm(prefix="poc")
        poc_form.hidden = True

    if metadata_author is not None:
        document_form.fields['metadata_author'].initial = metadata_author.id
        author_form = ProfileForm(prefix="author")
        author_form.hidden = True

    metadata_author_groups = get_user_visible_groups(request.user)

    if settings.ADMIN_MODERATE_UPLOADS:
        if not request.user.is_superuser:
            can_change_metadata = request.user.has_perm(
                'change_resourcebase_metadata', document.get_self_resource())
            try:
                is_manager = request.user.groupmember_set.all().filter(
                    role='manager').exists()
            except Exception:
                is_manager = False
            if not is_manager or not can_change_metadata:
                if settings.RESOURCE_PUBLISHING:
                    document_form.fields['is_published'].widget.attrs.update(
                        {'disabled': 'true'})
                document_form.fields['is_approved'].widget.attrs.update(
                    {'disabled': 'true'})

    register_event(request, EventType.EVENT_VIEW_METADATA, document)
    return render(
        request,
        template,
        context={
            "resource":
            document,
            "document":
            document,
            "document_form":
            document_form,
            "poc_form":
            poc_form,
            "author_form":
            author_form,
            "category_form":
            category_form,
            "tkeywords_form":
            tkeywords_form,
            "metadata_author_groups":
            metadata_author_groups,
            "TOPICCATEGORY_MANDATORY":
            getattr(settings, 'TOPICCATEGORY_MANDATORY', False),
            "GROUP_MANDATORY_RESOURCES":
            getattr(settings, 'GROUP_MANDATORY_RESOURCES', False),
            "UI_MANDATORY_FIELDS":
            list(
                set(getattr(settings, 'UI_DEFAULT_MANDATORY_FIELDS', []))
                | set(getattr(settings, 'UI_REQUIRED_FIELDS', [])))
        })
Ejemplo n.º 5
0
    def form_valid(self, form):
        """
        If the form is valid, save the associated model.
        """
        doc_form = form.cleaned_data

        file = doc_form.pop('doc_file', None)
        if file:
            tempdir = tempfile.mkdtemp(dir=settings.STATIC_ROOT)
            dirname = os.path.basename(tempdir)
            filepath = storage_manager.save(f"{dirname}/{file.name}", file)
            storage_path = storage_manager.path(filepath)
            self.object = resource_manager.create(
                None,
                resource_type=Document,
                defaults=dict(owner=self.request.user,
                              doc_url=doc_form.pop('doc_url', None),
                              title=doc_form.pop('title', file.name),
                              files=[storage_path]))
            if tempdir != os.path.dirname(storage_path):
                shutil.rmtree(tempdir, ignore_errors=True)
        else:
            self.object = resource_manager.create(
                None,
                resource_type=Document,
                defaults=dict(owner=self.request.user,
                              doc_url=doc_form.pop('doc_url', None),
                              title=doc_form.pop('title', None)))

        if settings.ADMIN_MODERATE_UPLOADS:
            self.object.is_approved = False
        if settings.RESOURCE_PUBLISHING:
            self.object.is_published = False

        resource_manager.set_permissions(
            None,
            instance=self.object,
            permissions=form.cleaned_data["permissions"],
            created=True)

        abstract = None
        date = None
        regions = []
        keywords = []
        bbox = None
        url = hookset.document_detail_url(self.object)

        out = {'success': False}

        if getattr(settings, 'EXIF_ENABLED', False):
            try:
                from geonode.documents.exif.utils import exif_extract_metadata_doc
                exif_metadata = exif_extract_metadata_doc(self.object)
                if exif_metadata:
                    date = exif_metadata.get('date', None)
                    keywords.extend(exif_metadata.get('keywords', []))
                    bbox = exif_metadata.get('bbox', None)
                    abstract = exif_metadata.get('abstract', None)
            except Exception:
                logger.debug("Exif extraction failed.")

        resource_manager.update(
            self.object.uuid,
            instance=self.object,
            keywords=keywords,
            regions=regions,
            vals=dict(abstract=abstract,
                      date=date,
                      date_type="Creation",
                      bbox_polygon=BBOXHelper.from_xy(bbox).as_polygon()
                      if bbox else None),
            notify=True)
        resource_manager.set_thumbnail(self.object.uuid,
                                       instance=self.object,
                                       overwrite=False)

        register_event(self.request, EventType.EVENT_UPLOAD, self.object)

        if self.request.GET.get('no__redirect', False):
            out['success'] = True
            out['url'] = url
            if out['success']:
                status_code = 200
            else:
                status_code = 400
            return HttpResponse(json.dumps(out),
                                content_type='application/json',
                                status=status_code)
        else:
            return HttpResponseRedirect(url)
Ejemplo n.º 6
0
def geoapp_edit(request, geoappid, template='apps/app_edit.html'):
    """
    The view that returns the app composer opened to
    the app with the given app ID.
    """
    try:
        geoapp_obj = _resolve_geoapp(request, geoappid,
                                     'base.view_resourcebase',
                                     _PERMISSION_MSG_VIEW)
    except PermissionDenied:
        return HttpResponse(_("Not allowed"), status=403)
    except Exception:
        raise Http404(_("Not found"))
    if not geoapp_obj:
        raise Http404(_("Not found"))

    # Call this first in order to be sure "perms_list" is correct
    permissions_json = _perms_info_json(geoapp_obj)

    perms_list = list(geoapp_obj.get_self_resource().get_user_perms(
        request.user).union(geoapp_obj.get_user_perms(request.user)))

    group = None
    if geoapp_obj.group:
        try:
            group = GroupProfile.objects.get(slug=geoapp_obj.group.name)
        except GroupProfile.DoesNotExist:
            group = None

    r = geoapp_obj
    if request.method in ('POST', 'PATCH', 'PUT'):
        r = resource_manager.update(geoapp_obj.uuid,
                                    instance=geoapp_obj,
                                    notify=True)

        resource_manager.set_permissions(
            geoapp_obj.uuid,
            instance=geoapp_obj,
            permissions=ast.literal_eval(permissions_json))

        resource_manager.set_thumbnail(geoapp_obj.uuid,
                                       instance=geoapp_obj,
                                       overwrite=False)

    access_token = None
    if request and request.user:
        access_token = get_or_create_token(request.user)
        if access_token and not access_token.is_expired():
            access_token = access_token.token
        else:
            access_token = None

    _config = json.dumps(r.blob)
    _ctx = {
        'appId':
        geoappid,
        'appType':
        geoapp_obj.resource_type,
        'config':
        _config,
        'user':
        request.user,
        'access_token':
        access_token,
        'resource':
        geoapp_obj,
        'group':
        group,
        'perms_list':
        perms_list,
        "permissions_json":
        permissions_json,
        'preview':
        getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'mapstore')
    }

    return render(request, template, context=_ctx)
Ejemplo n.º 7
0
    def update_from_viewer(self, conf, context=None):
        """
        Update this Map's details by parsing a JSON object as produced by
        a GXP Viewer.

        This method automatically persists to the database!
        """

        template_name = hookset.update_from_viewer(conf, context=context)
        if not isinstance(context, dict):
            try:
                context = json.loads(ensure_string(context))
            except Exception:
                pass

        conf = context.get("config", {})
        if not isinstance(conf, dict) or isinstance(conf, bytes):
            try:
                conf = json.loads(ensure_string(conf))
            except Exception:
                conf = {}

        about = conf.get("about", {})
        self.title = conf.get("title", about.get("title", ""))
        self.abstract = conf.get("abstract", about.get("abstract", ""))

        _map = conf.get("map", {})
        center = _map.get("center", settings.DEFAULT_MAP_CENTER)
        self.zoom = _map.get("zoom", settings.DEFAULT_MAP_ZOOM)

        if isinstance(center, dict):
            self.center_x = center.get('x')
            self.center_y = center.get('y')
        else:
            self.center_x, self.center_y = center

        projection = _map.get("projection", settings.DEFAULT_MAP_CRS)
        bbox = _map.get("bbox", None)

        if bbox:
            self.set_bounds_from_bbox(bbox, projection)
        else:
            self.set_bounds_from_center_and_zoom(self.center_x, self.center_y,
                                                 self.zoom)

        if self.projection is None or self.projection == '':
            self.projection = projection

        if self.uuid is None or self.uuid == '':
            self.uuid = str(uuid.uuid1())

        def source_for(layer):
            try:
                return conf["sources"][layer["source"]]
            except Exception:
                if 'url' in layer:
                    return {'url': layer['url']}
                else:
                    return {}

        layers = [lyr for lyr in _map.get("layers", [])]
        dataset_names = {lyr.alternate for lyr in self.local_datasets}

        self.dataset_set.all().delete()
        self.keywords.add(*_map.get('keywords', []))

        for ordering, layer in enumerate(layers):
            self.dataset_set.add(
                dataset_from_viewer_config(self.id, MapLayer, layer,
                                           source_for(layer), ordering))

        from geonode.resource.manager import resource_manager
        resource_manager.update(self.uuid, instance=self, notify=True)
        resource_manager.set_thumbnail(self.uuid,
                                       instance=self,
                                       overwrite=False)

        if dataset_names != {lyr.alternate for lyr in self.local_datasets}:
            map_changed_signal.send_robust(sender=self,
                                           what_changed='datasets')

        return template_name