Пример #1
0
    def _apply_subsets(self, coverage, subsets):
        if len(subsets) > 1:
            raise InvalidSubsettingException("Too many subsets supplied")

        elif len(subsets):
            subset = subsets[0]

            if not isinstance(subset, Trim):
                raise InvalidSubsettingException(
                    "Invalid subsetting method: only trims are allowed")

            if subset.is_temporal:
                begin_time, end_time = coverage.time_extent
                if subset.low < begin_time or subset.high > end_time:
                    raise InvalidSubsettingException(
                        "Temporal subset does not match coverage temporal "
                        "extent.")

                resolution = get_total_seconds(coverage.resolution_time)
                low = get_total_seconds(subset.low - begin_time) / resolution
                high = get_total_seconds(subset.high - begin_time) / resolution

                subset = Trim("x", low, high)

            else:
                if subset.low < 0 or subset.high > coverage.size_x:
                    raise InvalidSubsettingException(
                        "Subset size does not match coverage size.")

        else:
            subset = Trim("x", 0, coverage.size_x)

        return subset
Пример #2
0
    def __init__(self, axis, low=None, high=None):
        super(Trim, self).__init__(axis)

        if low is not None and high is not None and low > high:
            raise InvalidSubsettingException(
                "Invalid bounds: lower bound greater than upper bound.")

        self.low = low
        self.high = high
Пример #3
0
    def _check_subset(self, subset):
        if not isinstance(subset, Subset):
            raise ValueError("Supplied argument is not a subset.")

        if not isinstance(subset, self.allowed_types):
            raise InvalidSubsettingException("Supplied subset is not allowed.")

        if self.has_x and subset.is_x:
            raise InvalidSubsettingException(
                "Multiple subsets for X-axis given.")

        if self.has_y and subset.is_y:
            raise InvalidSubsettingException(
                "Multiple subsets for Y-axis given.")

        if self.has_t and subset.is_temporal:
            raise InvalidSubsettingException(
                "Multiple subsets for time-axis given.")
Пример #4
0
def parse_subset_xml(elem):
    """ Parse one subset from the WCS 2.0 XML notation. Expects an lxml.etree
        Element as parameter.
    """

    try:
        dimension = elem.findtext(ns_wcs("Dimension"))
        parser = get_parser_for_axis(dimension)
        if elem.tag == ns_wcs("DimensionTrim"):
            return Trim(dimension, parser(elem.findtext(ns_wcs("TrimLow"))),
                        parser(elem.findtext(ns_wcs("TrimHigh"))))
        elif elem.tag == ns_wcs("DimensionSlice"):
            return Slice(dimension,
                         parser(elem.findtext(ns_wcs("SlicePoint"))))
    except Exception as e:
        raise InvalidSubsettingException(str(e))
Пример #5
0
def parse_subset_kvp(string):
    """ Parse one subset from the WCS 2.0 KVP notation.
    """

    try:
        match = SUBSET_RE.match(string)
        if not match:
            raise Exception("Could not parse input subset string.")

        axis = match.group(1)
        parser = get_parser_for_axis(axis)

        if match.group(4) is not None:
            return Trim(axis, parser(match.group(2)), parser(match.group(4)))
        else:
            return Slice(axis, parser(match.group(2)))
    except InvalidAxisLabelException:
        raise
    except Exception as e:
        raise InvalidSubsettingException(str(e))
Пример #6
0
    def handle(self, request):
        decoder = self.get_decoder(request)
        eo_ids = decoder.eo_ids
        
        containment = decoder.containment
        if not containment:
            containment = "overlaps"

        count_default = self.constraints["CountDefault"]
        count = decoder.count
        if count_default is not None:
            count = min(count, count_default)

        try:
            subsets = Subsets(
                decoder.subsets, 
                crs="http://www.opengis.net/def/crs/EPSG/0/4326",
                allowed_types=Trim
            )
        except ValueError, e:
            raise InvalidSubsettingException(str(e))
Пример #7
0
    def handle(self, request):
        decoder = self.get_decoder(request)
        eo_ids = decoder.eo_ids

        format, format_params = decoder.format
        writer = self.get_pacakge_writer(format, format_params)

        containment = decoder.containment

        count_default = self.constraints["CountDefault"]
        count = decoder.count
        if count_default is not None:
            count = min(count, count_default)

        try:
            subsets = Subsets(decoder.subsets,
                              crs="http://www.opengis.net/def/crs/EPSG/0/4326",
                              allowed_types=Trim)
        except ValueError as e:
            raise InvalidSubsettingException(str(e))

        if len(eo_ids) == 0:
            raise

        # fetch a list of all requested EOObjects
        available_ids = models.EOObject.objects.filter(
            identifier__in=eo_ids).values_list("identifier", flat=True)

        # match the requested EOIDs against the available ones. If any are
        # requested, that are not available, raise and exit.
        failed = [eo_id for eo_id in eo_ids if eo_id not in available_ids]
        if failed:
            raise NoSuchDatasetSeriesOrCoverageException(failed)

        collections_qs = subsets.filter(
            models.Collection.objects.filter(identifier__in=eo_ids),
            containment="overlaps")

        # create a set of all indirectly referenced containers by iterating
        # recursively. The containment is set to "overlaps", to also include
        # collections that might have been excluded with "contains" but would
        # have matching coverages inserted.

        def recursive_lookup(super_collection, collection_set):
            sub_collections = models.Collection.objects.filter(
                collections__in=[super_collection.pk]).exclude(
                    pk__in=map(lambda c: c.pk, collection_set))
            sub_collections = subsets.filter(sub_collections, "overlaps")

            # Add all to the set
            collection_set |= set(sub_collections)

            for sub_collection in sub_collections:
                recursive_lookup(sub_collection, collection_set)

        collection_set = set(collections_qs)
        for collection in set(collection_set):
            recursive_lookup(collection, collection_set)

        collection_pks = map(lambda c: c.pk, collection_set)

        # Get all either directly referenced coverages or coverages that are
        # within referenced containers. Full subsetting is applied here.

        coverages_qs = models.Coverage.objects.filter(
            Q(identifier__in=eo_ids) | Q(collections__in=collection_pks))
        coverages_qs = subsets.filter(coverages_qs, containment=containment)

        # save a reference before limits are applied to obtain the full number
        # of matched coverages.
        coverages_no_limit_qs = coverages_qs

        # compute how many (if any) coverages can be retrieved. This depends on
        # the "count" parameter and default setting. Also, if we already
        # exceeded the count, limit the number of dataset series aswell
        """
        if inc_dss_section:
            num_collections = len(collection_set)
        else:
            num_collections = 0

        if num_collections < count and inc_cov_section:
            coverages_qs = coverages_qs.order_by("identifier")[:count - num_collections]
        elif num_collections == count or not inc_cov_section:
            coverages_qs = []
        else:
            coverages_qs = []
            collection_set = sorted(collection_set, key=lambda c: c.identifier)[:count]
        """

        # get a number of coverages that *would* have been included, but are not
        # because of the count parameter
        # count_all_coverages = coverages_no_limit_qs.count()

        # TODO: if containment is "within" we need to check all collections
        # again
        if containment == "within":
            collection_set = filter(lambda c: subsets.matches(c),
                                    collection_set)

        coverages = []
        dataset_series = []

        # finally iterate over everything that has been retrieved and get
        # a list of dataset series and coverages to be encoded into the response
        for eo_object in chain(coverages_qs, collection_set):
            if issubclass(eo_object.real_type, models.Coverage):
                coverages.append(eo_object.cast())

        fd, pkg_filename = tempfile.mkstemp()
        tmp = os.fdopen(fd)
        tmp.close()
        package = writer.create_package(pkg_filename, format, format_params)

        for coverage in coverages:
            params = self.get_params(coverage, decoder, request)
            renderer = self.get_renderer(params)
            result_set = renderer.render(params)
            all_filenames = set()
            for result_item in result_set:
                if not result_item.filename:
                    ext = mimetypes.guess_extension(result_item.content_type)
                    filename = coverage.identifier + ext
                else:
                    filename = result_item.filename
                if filename in all_filenames:
                    continue  # TODO: create new filename
                all_filenames.add(filename)
                location = "%s/%s" % (coverage.identifier, filename)
                writer.add_to_package(package, result_item.data_file,
                                      result_item.size, location)

        mime_type = writer.get_mime_type(package, format, format_params)
        ext = writer.get_file_extension(package, format, format_params)
        writer.cleanup(package)

        response = StreamingHttpResponse(tempfile_iterator(pkg_filename),
                                         mime_type)
        response["Content-Disposition"] = 'inline; filename="ows%s"' % ext
        response["Content-Length"] = str(os.path.getsize(pkg_filename))

        return response
Пример #8
0
    def handle(self, request):
        decoder = self.get_decoder(request)
        eo_ids = decoder.eo_ids

        containment = decoder.containment
        if not containment:
            containment = "overlaps"

        count_default = self.constraints["CountDefault"]
        count = decoder.count
        if count_default is not None:
            count = min(count, count_default)

        try:
            subsets = Subsets(
                decoder.subsets,
                crs="http://www.opengis.net/def/crs/EPSG/0/4326",
                allowed_types=Trim
            )
        except ValueError as e:
            raise InvalidSubsettingException(str(e))

        # check whether the DatasetSeries and CoverageDescriptions sections are
        # included
        inc_dss_section = decoder.section_included("DatasetSeriesDescriptions")
        inc_cov_section = decoder.section_included("CoverageDescriptions")

        if len(eo_ids) == 0:
            raise

        # fetch the objects directly referenced by EOID
        eo_objects = models.EOObject.objects.filter(
            identifier__in=eo_ids
        ).select_subclasses()

        # check if all EOIDs are available
        available_ids = set(eo_object.identifier for eo_object in eo_objects)
        failed = [
            eo_id for eo_id in eo_ids if eo_id not in available_ids
        ]

        # fail when some objects are not available
        if failed:
            raise NoSuchDatasetSeriesOrCoverageException(failed)

        # split list of objects into Collections, Products and Coverages
        collections = []
        mosaics = []
        products = []
        coverages = []

        for eo_object in eo_objects:
            if isinstance(eo_object, models.Collection):
                collections.append(eo_object)
            elif isinstance(eo_object, models.Mosaic):
                mosaics.append(eo_object)
            elif isinstance(eo_object, models.Product):
                products.append(eo_object)
            elif isinstance(eo_object, models.Coverage):
                coverages.append(eo_object)

        filters = subsets.get_filters(containment=containment)

        # get a QuerySet of all dataset series, directly or indirectly referenced
        all_dataset_series_qs = models.EOObject.objects.filter(
            Q(  # directly referenced Collections
                collection__isnull=False,
                identifier__in=[
                    collection.identifier for collection in collections
                ],
            ) |
            Q(  # directly referenced Products
                product__isnull=False,
                identifier__in=[product.identifier for product in products],
            ) |
            Q(  # Products within Collections
                product__isnull=False,
                product__collections__in=collections,
                **filters
            )
        )

        if inc_dss_section:
            dataset_series_qs = all_dataset_series_qs[:count]
        else:
            dataset_series_qs = models.EOObject.objects.none()

        # Allow metadata queries on coverage itself or on the
        # parent product if available
        parent_product_filters = []
        for key, value in filters.items():
            prop = key.partition('__')[0]
            parent_product_filters.append(
                Q(**{
                    key: value
                }) | Q(**{
                    '%s__isnull' % prop: True,
                    'coverage__parent_product__%s' % key: value
                })
            )

        # get a QuerySet for all Coverages, directly or indirectly referenced
        all_coverages_qs = models.EOObject.objects.filter(
            *parent_product_filters
        ).filter(
            Q(  # directly referenced Coverages
                identifier__in=[
                    coverage.identifier for coverage in coverages
                ]
            ) |
            Q(  # Coverages within directly referenced Products
                coverage__parent_product__in=products,
            ) |
            Q(  # Coverages within indirectly referenced Products
                coverage__parent_product__collections__in=collections
            ) |
            Q(  # Coverages within directly referenced Collections
                coverage__collections__in=collections
            ) |
            Q(  # Coverages within directly referenced Collections
                coverage__mosaics__in=mosaics
            ) |
            Q(  # directly referenced Mosaics
                identifier__in=[
                    mosaic.identifier for mosaic in mosaics
                ]
            ) |
            Q(  # Mosaics within directly referenced Collections
                mosaic__collections__in=collections
            )
        ).select_subclasses(models.Coverage, models.Mosaic)

        all_coverages_qs = all_coverages_qs.order_by('identifier')

        # check if the CoverageDescriptions section is included. If not, use an
        # empty queryset
        if inc_cov_section:
            coverages_qs = all_coverages_qs
        else:
            coverages_qs = models.Coverage.objects.none()

        # limit coverages according to the number of dataset series
        coverages_qs = coverages_qs[:max(
            0, count - dataset_series_qs.count() - len(mosaics)
        )]

        # compute the number of all items that would match
        number_matched = all_coverages_qs.count() + all_dataset_series_qs.count()

        # create an encoder and encode the result
        encoder = WCS20EOXMLEncoder()
        return (
            encoder.serialize(
                encoder.encode_eo_coverage_set_description(
                    dataset_series_set=[
                        objects.DatasetSeries.from_model(eo_object)
                        for eo_object in dataset_series_qs
                    ],
                    coverages=[
                        objects.from_model(coverage)
                        for coverage in coverages_qs
                    ],
                    number_matched=number_matched
                ), pretty_print=True
            ),
            encoder.content_type
        )