예제 #1
0
def get_layer_capabilities(layer,
                           version='1.3.0',
                           access_token=None,
                           tolerant=False):
    """
    Retrieve a layer-specific GetCapabilities document
    """
    workspace, layername = layer.alternate.split(
        ":") if ":" in layer.alternate else (None, layer.alternate)
    if not layer.remote_service:
        wms_url = f'{ogc_server_settings.LOCATION}{workspace}/{layername}/wms?service=wms&version={version}&request=GetCapabilities'  # noqa
        if access_token:
            wms_url += f'&access_token={access_token}'
    else:
        wms_url = f'{layer.remote_service.service_url}?service=wms&version={version}&request=GetCapabilities'

    _user, _password = ogc_server_settings.credentials
    req, content = http_client.get(wms_url, user=_user)
    getcap = ensure_string(content)
    if not getattr(settings, 'DELAYED_SECURITY_SIGNALS', False):
        if tolerant and ('ServiceException' in getcap
                         or req.status_code == 404):
            # WARNING Please make sure to have enabled DJANGO CACHE as per
            # https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching
            wms_url = f'{ogc_server_settings.public_url}{workspace}/ows?service=wms&version={version}&request=GetCapabilities&layers={layer}'  # noqa
            if access_token:
                wms_url += f'&access_token={access_token}'
            req, content = http_client.get(wms_url, user=_user)
            getcap = ensure_string(content)

    if 'ServiceException' in getcap or req.status_code == 404:
        return None
    return getcap.encode('UTF-8')
예제 #2
0
def layer_batch_download(request):
    """
    batch download a set of layers

    POST - begin download
    GET?id=<download_id> monitor status
    """

    from geonode.utils import http_client
    # currently this just piggy-backs on the map download backend
    # by specifying an ad hoc map that contains all layers requested
    # for download. assumes all layers are hosted locally.
    # status monitoring is handled slightly differently.

    if request.method == 'POST':
        layers = request.POST.getlist("layer")
        layers = Layer.objects.filter(alternate__in=list(layers))

        def layer_son(layer):
            return {
                "name": layer.alternate,
                "service": layer.service_type,
                "metadataURL": "",
                "serviceURL": ""
            }

        readme = """This data is provided by GeoNode.\n\nContents:"""

        def list_item(lyr):
            return "%s - %s.*" % (lyr.title, lyr.name)

        readme = "\n".join([readme] + [list_item(l) for l in layers])

        fake_map = {
            "map": {
                "readme": readme
            },
            "layers": [layer_son(lyr) for lyr in layers]
        }

        url = "%srest/process/batchDownload/launch/" % ogc_server_settings.LOCATION
        req = http_client.post(url, data=json.dumps(fake_map))
        content = req.content
        return HttpResponse(content, status=req.status_code)

    if request.method == 'GET':
        # essentially, this just proxies back to geoserver
        download_id = request.GET.get('id', None)
        if download_id is None:
            return HttpResponse(status=404)

        url = "%srest/process/batchDownload/status/%s" % (
            ogc_server_settings.LOCATION, download_id)
        req = http_client.get(url)
        content = req.content
        return HttpResponse(content, status=req.status_code)
예제 #3
0
파일: views.py 프로젝트: MapStory/geonode
def layer_batch_download(request):
    """
    batch download a set of layers

    POST - begin download
    GET?id=<download_id> monitor status
    """

    from geonode.utils import http_client
    # currently this just piggy-backs on the map download backend
    # by specifying an ad hoc map that contains all layers requested
    # for download. assumes all layers are hosted locally.
    # status monitoring is handled slightly differently.

    if request.method == 'POST':
        layers = request.POST.getlist("layer")
        layers = Layer.objects.filter(alternate__in=list(layers))

        def layer_son(layer):
            return {
                "name": layer.alternate,
                "service": layer.service_type,
                "metadataURL": "",
                "serviceURL": ""
            }

        readme = """This data is provided by GeoNode.\n\nContents:"""

        def list_item(lyr):
            return "%s - %s.*" % (lyr.title, lyr.name)

        readme = "\n".join([readme] + [list_item(l) for l in layers])

        fake_map = {
            "map": {"readme": readme},
            "layers": [layer_son(lyr) for lyr in layers]
        }

        url = "%srest/process/batchDownload/launch/" % ogc_server_settings.LOCATION
        req = http_client.post(url, data=json.dumps(fake_map))
        content = req.content
        return HttpResponse(content, status=req.status_code)

    if request.method == 'GET':
        # essentially, this just proxies back to geoserver
        download_id = request.GET.get('id', None)
        if download_id is None:
            return HttpResponse(status=404)

        url = "%srest/process/batchDownload/status/%s" % (
            ogc_server_settings.LOCATION, download_id)
        req = http_client.get(url)
        content = req.content
        return HttpResponse(content, status=req.status_code)
예제 #4
0
def download(request, resourceid, sender=Layer):

    _not_authorized = _("You are not authorized to download this resource.")
    _not_permitted = _("You are not permitted to save or edit this resource.")
    _no_files_found = _(
        "No files have been found for this resource. Please, contact a system administrator."
    )

    instance = resolve_object(request,
                              sender, {'pk': resourceid},
                              permission='base.download_resourcebase',
                              permission_msg=_not_permitted)

    if isinstance(instance, Layer):
        # Create Target Folder
        dirpath = tempfile.mkdtemp()
        dir_time_suffix = get_dir_time_suffix()
        target_folder = os.path.join(dirpath, dir_time_suffix)
        if not os.path.exists(target_folder):
            os.makedirs(target_folder)

        layer_files = []
        try:
            upload_session = instance.get_upload_session()
            if upload_session:
                layer_files = [
                    item for idx, item in enumerate(
                        LayerFile.objects.filter(
                            upload_session=upload_session))
                ]
                if layer_files:
                    # Copy all Layer related files into a temporary folder
                    for lyr in layer_files:
                        if storage.exists(str(lyr.file)):
                            geonode_layer_path = storage.path(str(lyr.file))
                            shutil.copy2(geonode_layer_path, target_folder)
                        else:
                            return HttpResponse(loader.render_to_string(
                                '401.html',
                                context={
                                    'error_title': _("No files found."),
                                    'error_message': _no_files_found
                                },
                                request=request),
                                                status=404)

            # Check we can access the original files
            if not layer_files:
                return HttpResponse(loader.render_to_string(
                    '401.html',
                    context={
                        'error_title': _("No files found."),
                        'error_message': _no_files_found
                    },
                    request=request),
                                    status=404)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file_path = os.path.join(target_folder,
                                                 "".join([s.name, ".sld"]))
                    with open(sld_file_path, "w") as sld_file:
                        sld_file.write(s.sld_body.strip())
                    try:
                        # Collecting headers and cookies
                        headers, access_token = get_headers(
                            request, urlsplit(s.sld_url), s.sld_url)

                        response, content = http_client.get(s.sld_url,
                                                            headers=headers,
                                                            timeout=TIMEOUT,
                                                            user=request.user)
                        sld_remote_content = response.text
                        sld_file_path = os.path.join(
                            target_folder, "".join([s.name, "_remote.sld"]))
                        with open(sld_file_path, "w") as sld_file:
                            sld_file.write(sld_remote_content.strip())
                    except Exception:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)
            except Exception:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            target_md_folder = os.path.join(target_folder, ".metadata")
            if not os.path.exists(target_md_folder):
                os.makedirs(target_md_folder)

            try:
                dump_file = os.path.join(target_md_folder,
                                         "".join([instance.name, ".dump"]))
                with open(dump_file, 'w') as outfile:
                    serialized_obj = json_serializer_producer(
                        model_to_dict(instance))
                    json.dump(serialized_obj, outfile)

                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = slugify(link.name)
                    link_file = os.path.join(
                        target_md_folder,
                        "".join([link_name, ".%s" % link.extension]))
                    if link.link_type in ('data'):
                        # Skipping 'data' download links
                        continue
                    elif link.link_type in ('metadata', 'image'):
                        # Dumping metadata files and images
                        with open(link_file, "wb"):
                            try:
                                # Collecting headers and cookies
                                headers, access_token = get_headers(
                                    request, urlsplit(link.url), link.url)

                                response, raw = http_client.get(
                                    link.url,
                                    stream=True,
                                    headers=headers,
                                    timeout=TIMEOUT,
                                    user=request.user)
                                raw.decode_content = True
                                shutil.copyfileobj(raw, link_file)
                            except Exception:
                                traceback.print_exc()
                                tb = traceback.format_exc()
                                logger.debug(tb)
                    elif link.link_type.startswith('OGC'):
                        # Dumping OGC/OWS links
                        with open(link_file, "w") as link_file:
                            link_file.write(link.url.strip())
            except Exception:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])
            target_file = os.path.join(dirpath, target_file_name)
            zip_dir(target_folder, target_file)
            register_event(request, 'download', instance)
            response = HttpResponse(content=open(target_file, mode='rb'),
                                    status=200,
                                    content_type="application/zip")
            response[
                'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(loader.render_to_string(
                '401.html',
                context={
                    'error_title': _("No files found."),
                    'error_message': _no_files_found
                },
                request=request),
                                status=404)
    return HttpResponse(loader.render_to_string('401.html',
                                                context={
                                                    'error_title':
                                                    _("Not Authorized"),
                                                    'error_message':
                                                    _not_authorized
                                                },
                                                request=request),
                        status=403)
예제 #5
0
def download(request, resourceid, sender=Layer):

    instance = resolve_object(
        request,
        sender, {'pk': resourceid},
        permission='base.download_resourcebase',
        permission_msg=_(
            "You are not permitted to save or edit this resource."))

    if isinstance(instance, Layer):
        try:
            upload_session = instance.get_upload_session()
            layer_files = [
                item for idx, item in enumerate(
                    LayerFile.objects.filter(upload_session=upload_session))
            ]

            # Create Target Folder
            dirpath = tempfile.mkdtemp()
            dir_time_suffix = get_dir_time_suffix()
            target_folder = os.path.join(dirpath, dir_time_suffix)
            if not os.path.exists(target_folder):
                os.makedirs(target_folder)

            # Copy all Layer related files into a temporary folder
            for l in layer_files:
                if storage.exists(l.file):
                    geonode_layer_path = storage.path(l.file)
                    base_filename, original_ext = os.path.splitext(
                        geonode_layer_path)
                    shutil.copy2(geonode_layer_path, target_folder)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file_path = os.path.join(target_folder,
                                                 "".join([s.name, ".sld"]))
                    sld_file = open(sld_file_path, "w")
                    sld_file.write(s.sld_body.strip())
                    sld_file.close()

                    try:
                        sld_file = open(sld_file_path, "r")

                        # Collecting headers and cookies
                        headers, access_token = get_headers(
                            request, urlsplit(s.sld_url), s.sld_url)

                        response, content = http_client.get(s.sld_url,
                                                            headers=headers,
                                                            timeout=TIMEOUT,
                                                            user=request.user)
                        sld_remote_content = response.text
                        sld_file_path = os.path.join(
                            target_folder, "".join([s.name, "_remote.sld"]))
                        sld_file = open(sld_file_path, "w")
                        sld_file.write(sld_remote_content.strip())
                        sld_file.close()
                    except BaseException:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)

            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            target_md_folder = os.path.join(target_folder, ".metadata")
            if not os.path.exists(target_md_folder):
                os.makedirs(target_md_folder)

            try:
                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = slugify(link.name)
                    link_file = os.path.join(
                        target_md_folder,
                        "".join([link_name, ".%s" % link.extension]))
                    if link.link_type in ('data'):
                        # Skipping 'data' download links
                        continue
                    elif link.link_type in ('metadata', 'image'):
                        # Dumping metadata files and images
                        link_file = open(link_file, "wb")
                        try:
                            # Collecting headers and cookies
                            headers, access_token = get_headers(
                                request, urlsplit(link.url), link.url)

                            response, raw = http_client.get(link.url,
                                                            stream=True,
                                                            headers=headers,
                                                            timeout=TIMEOUT,
                                                            user=request.user)
                            raw.decode_content = True
                            shutil.copyfileobj(raw, link_file)
                        except BaseException:
                            traceback.print_exc()
                            tb = traceback.format_exc()
                            logger.debug(tb)
                        finally:
                            link_file.close()
                    elif link.link_type.startswith('OGC'):
                        # Dumping OGC/OWS links
                        link_file = open(link_file, "w")
                        link_file.write(link.url.strip())
                        link_file.close()
            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])
            target_file = os.path.join(dirpath, target_file_name)
            zip_dir(target_folder, target_file)
            response = HttpResponse(content=open(target_file),
                                    status=200,
                                    content_type="application/zip")
            response[
                'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(json.dumps({'error': 'file_not_found'}),
                                status=404,
                                content_type="application/json")

    return HttpResponse(json.dumps({'error': 'unauthorized_request'}),
                        status=403,
                        content_type="application/json")
예제 #6
0
def download(request, resourceid, sender=Layer):

    instance = resolve_object(request,
                              sender,
                              {'pk': resourceid},
                              permission='base.download_resourcebase',
                              permission_msg=_("You are not permitted to save or edit this resource."))

    if isinstance(instance, Layer):
        try:
            upload_session = instance.get_upload_session()
            layer_files = [item for idx, item in enumerate(LayerFile.objects.filter(upload_session=upload_session))]

            # Create Target Folder
            dirpath = tempfile.mkdtemp()
            dir_time_suffix = get_dir_time_suffix()
            target_folder = os.path.join(dirpath, dir_time_suffix)
            if not os.path.exists(target_folder):
                os.makedirs(target_folder)

            # Copy all Layer related files into a temporary folder
            for l in layer_files:
                if storage.exists(l.file):
                    geonode_layer_path = storage.path(l.file)
                    base_filename, original_ext = os.path.splitext(geonode_layer_path)
                    shutil.copy2(geonode_layer_path, target_folder)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file_path = os.path.join(target_folder, "".join([s.name, ".sld"]))
                    sld_file = open(sld_file_path, "w")
                    sld_file.write(s.sld_body.strip())
                    sld_file.close()

                    try:
                        sld_file = open(sld_file_path, "r")

                        # Collecting headers and cookies
                        headers, access_token = get_headers(request, urlsplit(s.sld_url), s.sld_url)

                        response, content = http_client.get(
                            s.sld_url,
                            headers=headers,
                            timeout=TIMEOUT,
                            user=request.user)
                        sld_remote_content = response.text
                        sld_file_path = os.path.join(target_folder, "".join([s.name, "_remote.sld"]))
                        sld_file = open(sld_file_path, "w")
                        sld_file.write(sld_remote_content.strip())
                        sld_file.close()
                    except BaseException:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)

            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            target_md_folder = os.path.join(target_folder, ".metadata")
            if not os.path.exists(target_md_folder):
                os.makedirs(target_md_folder)

            try:
                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = slugify(link.name)
                    link_file = os.path.join(target_md_folder, "".join([link_name, ".%s" % link.extension]))
                    if link.link_type in ('data'):
                        # Skipping 'data' download links
                        continue
                    elif link.link_type in ('metadata', 'image'):
                        # Dumping metadata files and images
                        link_file = open(link_file, "wb")
                        try:
                            # Collecting headers and cookies
                            headers, access_token = get_headers(request, urlsplit(link.url), link.url)

                            response, raw = http_client.get(
                                link.url,
                                stream=True,
                                headers=headers,
                                timeout=TIMEOUT,
                                user=request.user)
                            raw.decode_content = True
                            shutil.copyfileobj(raw, link_file)
                        except BaseException:
                            traceback.print_exc()
                            tb = traceback.format_exc()
                            logger.debug(tb)
                        finally:
                            link_file.close()
                    elif link.link_type.startswith('OGC'):
                        # Dumping OGC/OWS links
                        link_file = open(link_file, "w")
                        link_file.write(link.url.strip())
                        link_file.close()
            except BaseException:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])
            target_file = os.path.join(dirpath, target_file_name)
            zip_dir(target_folder, target_file)
            response = HttpResponse(
                content=open(target_file),
                status=200,
                content_type="application/zip")
            response['Content-Disposition'] = 'attachment; filename="%s"' % target_file_name
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(
                json.dumps({
                    'error': 'file_not_found'
                }),
                status=404,
                content_type="application/json"
            )

    return HttpResponse(
        json.dumps({
            'error': 'unauthorized_request'
        }),
        status=403,
        content_type="application/json"
    )
예제 #7
0
def _get_wcs_axis_labels(coverage_id):
    """
    This is a utiliy method using the GeoNode Proxy to fetch the "DescribeCoverage".
    The outcome will be used to fetch the "Envelope" "axisLabels", that the WCS declares accordingly to the provided "outputCrs".

    The response will be something like the following here below:

    <?xml version="1.0" encoding="UTF-8"?>
    <wcs:CoverageDescriptions
      xmlns:wcs="http://www.opengis.net/wcs/2.0" xmlns:ows="http://www.opengis.net/ows/2.0"
      xmlns:gml="http://www.opengis.net/gml/3.2" xmlns:gmlcov="http://www.opengis.net/gmlcov/1.0"
      xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
      xmlns:swe="http://www.opengis.net/swe/2.0" xmlns:wcsgs="http://www.geoserver.org/wcsgs/2.0"
    xsi:schemaLocation="
      http://www.opengis.net/wcs/2.0 http://schemas.opengis.net/wcs/2.0/wcsDescribeCoverage.xsd
      http://www.geoserver.org/wcsgs/2.0
      http://localhost:8080/geoserver/schemas/wcs/2.0/wcsgs.xsd">
    <wcs:CoverageDescription gml:id="geonode__gebco_2020_n54_0659179519862_s51_64453104883433_w0_5559082031249998_e4_409911975264549">
        <gml:description>Generated from WorldImage</gml:description>
        <gml:name>gebco_2020_n54.0659179519862_s51.64453104883433_w0.5559082031249998_e4.409911975264549</gml:name>
        <gml:boundedBy>
        <gml:Envelope srsName="http://www.opengis.net/def/crs/EPSG/0/404000" axisLabels="x y" uomLabels="m m" srsDimension="2">
            <gml:lowerCorner>-0.5 -0.5</gml:lowerCorner>
            <gml:upperCorner>580.5 924.5</gml:upperCorner>
        </gml:Envelope>
        </gml:boundedBy>
        <wcs:CoverageId>geonode__gebco_2020_n54_0659179519862_s51_64453104883433_w0_5559082031249998_e4_409911975264549</wcs:CoverageId>
        <gml:coverageFunction>
        <gml:GridFunction>
            <gml:sequenceRule axisOrder="+2 +1">Linear</gml:sequenceRule>
            <gml:startPoint>0 0</gml:startPoint>
        </gml:GridFunction>
        </gml:coverageFunction>
        <gmlcov:metadata>
        <gmlcov:Extension>
            <ows:Keywords>
            <ows:Keyword>gebco_2020_n54.0659179519862_s51.64453104883433_w0.5559082031249998_e4.409911975264549</ows:Keyword>
            <ows:Keyword>WCS</ows:Keyword>
            <ows:Keyword>WorldImage</ows:Keyword>
            </ows:Keywords>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="..."/>
            <ows:Metadata xlink:type="simple" xlink:href="http://localhost:8000/showmetadata/xsl/42"/>
        </gmlcov:Extension>
        </gmlcov:metadata>
        <gml:domainSet>
        <gml:RectifiedGrid gml:id="grid00__geonode__gebco_2020_n54_0659179519862_s51_64453104883433_w0_5559082031249998_e4_409911975264549" dimension="2">
            <gml:limits>
            <gml:GridEnvelope>
                <gml:low>0 0</gml:low>
                <gml:high>924 580</gml:high>
            </gml:GridEnvelope>
            </gml:limits>
            <gml:axisLabels>i j</gml:axisLabels>
            <gml:origin>
            <gml:Point gml:id="p00_geonode__gebco_2020_n54_0659179519862_s51_64453104883433_w0_5559082031249998_e4_409911975264549" srsName="http://www.opengis.net/def/crs/EPSG/0/404000">
                <gml:pos>0.0 0.0</gml:pos>
            </gml:Point>
            </gml:origin>
            <gml:offsetVector srsName="http://www.opengis.net/def/crs/EPSG/0/404000">0.0 1.0</gml:offsetVector>
            <gml:offsetVector srsName="http://www.opengis.net/def/crs/EPSG/0/404000">1.0 0.0</gml:offsetVector>
        </gml:RectifiedGrid>
        </gml:domainSet>
        <gmlcov:rangeType>
        <swe:DataRecord>
            <swe:field name="GRAY_INDEX">
            <swe:Quantity>
                <swe:description>GRAY_INDEX</swe:description>
                <swe:uom code="W.m-2.Sr-1"/>
                <swe:constraint>
                <swe:AllowedValues>
                    <swe:interval>0 65535</swe:interval>
                </swe:AllowedValues>
                </swe:constraint>
            </swe:Quantity>
            </swe:field>
        </swe:DataRecord>
        </gmlcov:rangeType>
        <wcs:ServiceParameters>
        <wcs:CoverageSubtype>RectifiedGridCoverage</wcs:CoverageSubtype>
        <wcs:nativeFormat>image/tiff</wcs:nativeFormat>
        </wcs:ServiceParameters>
    </wcs:CoverageDescription>
    </wcs:CoverageDescriptions>
    """
    def _swap(_axis_labels):
        # WCS 2.0.1 swaps the order of the Lon/Lat axis to Lat/Lon.
        if _axis_labels[0].lower() in 'lat':
            return [_axis_labels[1], _axis_labels[0]]
        else:
            return _axis_labels

    try:
        _describe_coverage_response, _content = http_client.get(
            _wcs_describe_coverage(coverage_id))
        _describe_coverage_response.raise_for_status()
        _root = etree.fromstring(_content.encode('UTF-8'), parser=XML_PARSER)
        _nsmap = _get_nsmap(_root.nsmap, 'wcs')
        _coverage_descriptions = _root.xpath("//wcs:CoverageDescription",
                                             namespaces=_nsmap)
        for _coverage_description in _coverage_descriptions:
            _envelope = _coverage_description.xpath(
                "gml:boundedBy/gml:Envelope", namespaces=_nsmap)
            _axis_labels = _envelope[0].attrib["axisLabels"].split(" ")
            if _axis_labels and isinstance(_axis_labels,
                                           list) and len(_axis_labels) == 2:
                return _swap(_axis_labels)
    except Exception as e:
        logger.error(e)
    return None
예제 #8
0
def download(request, resourceid, sender=Layer):

    _not_authorized = _("You are not authorized to download this resource.")
    _not_permitted = _("You are not permitted to save or edit this resource.")
    _no_files_found = _(
        "No files have been found for this resource. Please, contact a system administrator."
    )

    instance = resolve_object(request,
                              sender, {'pk': resourceid},
                              permission='base.download_resourcebase',
                              permission_msg=_not_permitted)

    if isinstance(instance, Layer):
        layer_files = []
        file_list = []  # Store file info to be returned
        try:
            upload_session = instance.get_upload_session()
            if upload_session:
                layer_files = [
                    item for idx, item in enumerate(
                        LayerFile.objects.filter(
                            upload_session=upload_session))
                ]
                if layer_files:
                    # Copy all Layer related files into a temporary folder
                    for lyr in layer_files:
                        if storage.exists(str(lyr.file)):
                            geonode_layer_path = storage.path(str(lyr.file))
                            file_list.append({
                                "zip_folder":
                                "",
                                "name":
                                lyr.file.name.split('/')[-1],
                                "data_src_file":
                                geonode_layer_path,
                            })
                        else:
                            return HttpResponse(loader.render_to_string(
                                '401.html',
                                context={
                                    'error_title': _("No files found."),
                                    'error_message': _no_files_found
                                },
                                request=request),
                                                status=404)

            # Check we can access the original files
            if not layer_files:
                return HttpResponse(loader.render_to_string(
                    '401.html',
                    context={
                        'error_title': _("No files found."),
                        'error_message': _no_files_found
                    },
                    request=request),
                                    status=404)

            # Let's check for associated SLD files (if any)
            try:
                for s in instance.styles.all():
                    sld_file_name = "".join([s.name, ".sld"])
                    file_list.append({
                        "zip_folder": "",
                        "name": sld_file_name,
                        "data_str": s.sld_body.strip(),
                    })
                    try:
                        # Collecting headers and cookies
                        headers, access_token = get_headers(
                            request, urlsplit(s.sld_url), s.sld_url)

                        response, content = http_client.get(s.sld_url,
                                                            headers=headers,
                                                            timeout=TIMEOUT,
                                                            user=request.user)
                        sld_remote_content = response.text
                        remote_sld_file_name = "".join([s.name, "_remote.sld"])
                        file_list.append({
                            "zip_folder": "",
                            "name": remote_sld_file_name,
                            "data_str": sld_remote_content,
                        })
                    except Exception:
                        traceback.print_exc()
                        tb = traceback.format_exc()
                        logger.debug(tb)
            except Exception:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # Let's dump metadata
            try:
                dump_file_name = "".join([instance.name, ".dump"])
                serialized_obj = json_serializer_producer(
                    model_to_dict(instance))
                file_list.append({
                    "zip_folder": ".metadata/",
                    "name": dump_file_name,
                    "data_str": json.dumps(serialized_obj),
                })
                links = Link.objects.filter(resource=instance.resourcebase_ptr)
                for link in links:
                    link_name = slugify(link.name)
                    link_file_name = "".join([link_name, f".{link.extension}"])
                    link_file_obj = None

                    if link.link_type in ('data'):
                        # Skipping 'data' download links
                        continue
                    elif link.link_type in ('metadata', 'image'):
                        # Dumping metadata files and images
                        try:
                            # Collecting headers and cookies
                            headers, access_token = get_headers(
                                request, urlsplit(link.url), link.url)

                            response, raw = http_client.get(link.url,
                                                            stream=True,
                                                            headers=headers,
                                                            timeout=TIMEOUT,
                                                            user=request.user)
                            raw.decode_content = True
                            if raw and raw is not None:
                                link_file_obj = {
                                    "zip_folder": ".metadata/",
                                    "name": link_file_name,
                                    "data_iter": raw,
                                }
                        except Exception:
                            traceback.print_exc()
                            tb = traceback.format_exc()
                            logger.debug(tb)
                    elif link.link_type.startswith('OGC'):
                        # Dumping OGC/OWS links
                        link_file_obj = {
                            "zip_folder": ".metadata/",
                            "name": link_file_name,
                            "data_str": link.url.strip(),
                        }
                    # Add file_info to the file list
                    if link_file_obj is not None:
                        file_list.append(link_file_obj)
            except Exception:
                traceback.print_exc()
                tb = traceback.format_exc()
                logger.debug(tb)

            # ZIP everything and return
            target_file_name = "".join([instance.name, ".zip"])

            target_zip = zipstream.ZipFile(mode='w',
                                           compression=zipstream.ZIP_DEFLATED,
                                           allowZip64=True)

            # Iterable: Needed when the file_info has it's data as a stream
            def _iterable(source_iter):
                while True:
                    buf = source_iter.read(BUFFER_CHUNK_SIZE)
                    if not buf:
                        break
                    yield buf

            # Add files to zip
            for file_info in file_list:
                zip_file_name = "".join(
                    [file_info['zip_folder'], file_info['name']])
                # The zip can be built from 3 data sources: str, iterable or a file path
                if 'data_str' in file_info and file_info[
                        'data_str'] is not None:
                    target_zip.writestr(arcname=zip_file_name,
                                        data=bytes(file_info['data_str'],
                                                   'utf-8'))
                elif 'data_iter' in file_info and file_info[
                        'data_iter'] is not None:
                    target_zip.write_iter(arcname=zip_file_name,
                                          iterable=_iterable(
                                              file_info['data_iter']))
                elif 'data_src_file' in file_info and file_info[
                        'data_src_file'] is not None:
                    target_zip.write(filename=file_info['data_src_file'],
                                     arcname=zip_file_name)

            register_event(request, 'download', instance)

            # Streaming content response
            response = StreamingHttpResponse(target_zip,
                                             content_type='application/zip')
            response[
                'Content-Disposition'] = f'attachment; filename="{target_file_name}"'
            return response
        except NotImplementedError:
            traceback.print_exc()
            tb = traceback.format_exc()
            logger.debug(tb)
            return HttpResponse(loader.render_to_string(
                '401.html',
                context={
                    'error_title': _("No files found."),
                    'error_message': _no_files_found
                },
                request=request),
                                status=404)
    return HttpResponse(loader.render_to_string('401.html',
                                                context={
                                                    'error_title':
                                                    _("Not Authorized"),
                                                    'error_message':
                                                    _not_authorized
                                                },
                                                request=request),
                        status=403)