Example #1
0
    def encode_contents(self, coverages_qs, dataset_series_qs):
        contents = []

        if coverages_qs:
            coverages = []

            # reduce data transfer by only selecting required elements
            # TODO: currently runs into a bug
            #coverages_qs = coverages_qs.only(
            #    "identifier", "real_content_type"
            #)

            for coverage in coverages_qs:
                coverages.append(
                    WCS("CoverageSummary",
                        WCS("CoverageId", coverage.identifier),
                        WCS("CoverageSubtype", coverage.real_type.__name__)
                    )
                )
            contents.extend(coverages)

        if dataset_series_qs:
            dataset_series_set = []

            # reduce data transfer by only selecting required elements
            # TODO: currently runs into a bug
            #dataset_series_qs = dataset_series_qs.only(
            #    "identifier", "begin_time", "end_time", "footprint"
            #)

            for dataset_series in dataset_series_qs:
                minx, miny, maxx, maxy = dataset_series.extent_wgs84

                dataset_series_set.append(
                    EOWCS("DatasetSeriesSummary",
                        OWS("WGS84BoundingBox",
                            OWS("LowerCorner", "%f %f" % (miny, minx)),
                            OWS("UpperCorner", "%f %f" % (maxy, maxx)),
                        ),
                        EOWCS("DatasetSeriesId", dataset_series.identifier),
                        GML("TimePeriod",
                            GML(
                                "beginPosition",
                                isoformat(dataset_series.begin_time)
                            ),
                            GML(
                                "endPosition",
                                isoformat(dataset_series.end_time)
                            ),
                            **{
                                ns_gml("id"): dataset_series.identifier
                                + "_timeperiod"
                            }
                        )
                    )
                )

            contents.append(WCS("Extension", *dataset_series_set))

        return WCS("Contents", *contents)
def coverage_serialize(obj):
    """ Serialize Coverage object to a JSON serializable dictionary """
    lon_min, lat_min, lon_max, lat_max = obj.extent_wgs84
    return dict(zip(COVERAGE_KEYS, (
        obj.identifier, isoformat(obj.begin_time), isoformat(obj.end_time),
        lon_min, lon_max, lat_min, lat_max,
    )))
Example #3
0
    def write(self, values, file_obj, format=None, encoding=None, pretty=False):
        def flip(point):
            return point[1], point[0]

        # ignore format
        tree = E.Metadata(
            E.EOID(values["identifier"]),
            E.BeginTime(isoformat(values["begin_time"])),
            E.EndTime(isoformat(values["end_time"])),
            E.Footprint(
                *map(lambda polygon: 
                    E.Polygon(
                        E.Exterior(
                            " ".join([
                                "%f %f" % flip(point)
                                for point in polygon.exterior_ring
                            ])
                        ),
                        *[E.Interior(
                            " ".join([
                                "%f %f" % flip(point)
                                for point in interior
                            ])
                        ) for interior in polygon[1:]]
                    ),
                    values["footprint"]
                )
            )
        )

        file_obj.write(
            etree.tostring(tree, pretty_print=pretty, encoding=encoding)
        )
Example #4
0
    def encode_item(self, request, item, search_context):
        link_url = request.build_absolute_uri(
            "%s?service=WCS&version=2.0.1&request=DescribeCoverage&coverageId=%s"
            % (reverse("ows"), item.identifier)
        )

        rss_item = RSS("item",
            RSS("title", item.identifier),
            RSS("description", CDATA(item.identifier)),
            RSS("link", link_url),
        )

        if "geo" in search_context.parameters:
            rss_item.append(RSS("guid", request.build_absolute_uri()))
        else:
            rss_item.append(RSS("guid", item.identifier, isPermaLink="false"))

        rss_item.extend(self.encode_item_links(request, item))

        # TODO: remove this for the general dc:date?
        if item.begin_time and item.end_time:
            rss_item.append(
                GML("TimePeriod",
                    GML("beginPosition", isoformat(item.begin_time)),
                    GML("endPosition", isoformat(item.end_time)),
                    **{ns_gml("id"): item.identifier}
                )
            )

        rss_item.extend(self.encode_spatio_temporal(item))
        return rss_item
Example #5
0
    def encode_spatio_temporal(self, item):
        entries = []
        if item.footprint:
            extent = item.extent_wgs84
            entries.append(
                GEORSS("box",
                    "%f %f %f %f" % (extent[1], extent[0], extent[3], extent[2])
                )
            )
            entries.append(
                GEORSS("where",
                    GML32Encoder().encode_multi_surface(
                        item.footprint, item.identifier
                    )
                )
            )

        begin_time, end_time = item.time_extent
        if begin_time and end_time:
            if begin_time != end_time:
                entries.append(
                    DC("date", "%s/%s" % (
                        isoformat(begin_time), isoformat(end_time)
                    ))
                )
            else:
                entries.append(DC("date", isoformat(begin_time)))

        return entries
Example #6
0
    def encode_eo_metadata(self, coverage, request=None, subset_polygon=None):
        data_items = list(coverage.data_items.filter(
            semantic="metadata", format="eogml"
        ))
        if len(data_items) >= 1:
            with open(retrieve(data_items[0])) as f:
                earth_observation = etree.parse(f).getroot()

            if subset_polygon:
                try:
                    feature = earth_observation.xpath(
                        "om:featureOfInterest", namespaces=nsmap
                    )[0]
                    feature[0] = self.encode_footprint(
                        coverage.footprint.intersection(subset_polygon),
                        coverage.identifier
                    )
                except IndexError:
                    pass  # no featureOfInterest

        else:
            earth_observation = self.encode_earth_observation(
                coverage, subset_polygon=subset_polygon
            )

        if not request:
            lineage = None

        elif request.method == "GET":
            lineage = EOWCS("lineage",
                EOWCS("referenceGetCoverage",
                    self.encode_reference("Reference",
                        request.build_absolute_uri().replace("&", "&"),
                        False
                    )
                ), GML("timePosition", isoformat(now()))
            )
        elif request.method == "POST":  # TODO: better way to do this
            href = request.build_absolute_uri().replace("&", "&")
            lineage = EOWCS("lineage",
                EOWCS("referenceGetCoverage",
                    OWS("ServiceReference",
                        OWS("RequestMessage",
                            etree.parse(request).getroot()
                        ), **{ns_xlink("href"): href}
                    )
                ), GML("timePosition", isoformat(now()))
            )

        return GMLCOV("metadata",
            GMLCOV("Extension",
                EOWCS("EOMetadata",
                    earth_observation,
                    *[lineage] if lineage is not None else []
                )
            )
        )
Example #7
0
 def set_feature_values(self, feature, eo_object):
     """ Set the values and the geometry of the feature. This needs to be
         inline with the :meth:`create_fields` method.
     """
     feature.SetGeometry(
         ogr.CreateGeometryFromWkb(str(eo_object.footprint.wkb))
     )
     feature.SetField("id", eo_object.identifier.encode("utf-8"))
     feature.SetField("begin_time", isoformat(eo_object.begin_time))
     feature.SetField("end_time", isoformat(eo_object.end_time))
    def execute(collection, begin_time, end_time, **kwarg):
        """ The main execution function for the process.
        """

        # get the dataset series matching the requested ID
        try:
            model = models.EOObject.objects.get(identifier=collection)
        except models.EOObject.DoesNotExist:
            raise InvalidInputValueError(
                "collection", "Invalid collection name '%s'!" % collection
            )

        if models.iscollection(model):
            model = model.cast()

            # recursive dataset series lookup
            def _get_children_ids(ds):
                ds_rct = ds.real_content_type
                id_list = [ds.id]
                for child in model.eo_objects.filter(real_content_type=ds_rct):
                    id_list.extend(_get_children_ids(child))
                return id_list

            collection_ids = _get_children_ids(model)

            # prepare coverage query set
            coverages_qs = models.Coverage.objects.filter(
                collections__id__in=collection_ids
            )
            if end_time is not None:
                coverages_qs = coverages_qs.filter(begin_time__lte=end_time)
            if begin_time is not None:
                coverages_qs = coverages_qs.filter(end_time__gte=begin_time)
            coverages_qs = coverages_qs.order_by('begin_time', 'end_time')
            coverages_qs = coverages_qs.values_list("begin_time", "end_time", "identifier", "min_x", "min_y", "max_x", "max_y")

        else:
            min_x, min_y, max_x, max_y = model.extent_wgs84
            coverages_qs = ((model.begin_time, model.end_time, model.identifier, min_x, min_y, max_x, max_y),)

        # create the output
        output = CDAsciiTextBuffer()
        writer = csv.writer(output, quoting=csv.QUOTE_ALL)
        header = ["starttime", "endtime", "bbox", "identifier"]
        writer.writerow(header)

        for starttime, endtime, identifier, min_x, min_y, max_x, max_y in coverages_qs:
            bbox = (min_x, min_y, max_x, max_y)
            writer.writerow([isoformat(starttime), isoformat(endtime), bbox, identifier])

        return output
Example #9
0
    def get_schema(self, collection=None):
        minmax = {}
        if collection:
            if collection.begin_time:
                minmax["minimum"] = isoformat(collection.begin_time)
            if collection.end_time:
                minmax["maximum"] = isoformat(collection.end_time)

        return (
            dict(name="start", type="start", **minmax),
            dict(name="end", type="end", **minmax),
            dict(name="timerel", type="relation",
                options=["intersects", "contains", "disjoint", "equals"]
            )
        )
Example #10
0
    def encode_execute_response(self, process, inputs, results, lineage=False):
        response_elem = WPS("ExecuteResponse",
            self.encode_process_brief(process),
            WPS("Status",
                WPS("ProcessSucceded"), # TODO: other states
                creationTime=isoformat(now())
            )
        )

        if lineage:
            response_elem.append(
                WPS("DataInputs",

                )
            )

        response_elem.extend((
            WPS("OutputDefinitions", *[
                self.encode_parameter(name, parameter, False)
                for name, parameter in process.outputs.items()
            ]),
            WPS("ProcessOutputs", *[
                self.encode_output(name, process.outputs[name], data)
                for name, data in results.items()
            ])
        ))
        return response_elem
Example #11
0
def webclient(request, identifier):
    """
    View for webclient interface.
    
    Uses `webclient.preview_service`, `webclient.outline_service`,
    `webclient.preview_url`
    """
    
    try:
        eo_object = models.Collection.objects.get(identifier=identifier)
    
    except models.Collection.DoesNotExist:
        raise Http404("No such collection.")
    
    begin = eo_object.begin_time
    end = eo_object.end_time
    
    extent = eo_object.extent_wgs84
    # zoom to Europe if we don't have a proper extent
    if extent == (0,0,1,1):
        extent = (-10,30,34,72)
    reader = WebclientConfigReader(get_eoxserver_config())

    return render_to_response(
        'webclient/webclient.html', {
            "eoid": identifier,
            "ows_url": reverse("eoxserver.services.views.ows"), #reader.http_service_url,
            "preview_service": reader.preview_service,
            "outline_service": reader.outline_service,
            "preview_url": reader.preview_url or reader.http_service_url,
            "outline_url": reader.outline_url or reader.http_service_url,
            #"begin": {"date": begin.strftime("%Y-%m-%d"),
            #          "time": begin.strftime("%H:%M")},
            #"end": {"date": end.strftime("%Y-%m-%d"),
            #        "time": end.strftime("%H:%M")},
            "begin": isoformat(begin),
            "end": isoformat(end),
            "extent": "%f,%f,%f,%f" % extent,
            "debug": settings.DEBUG
        },
        context_instance=RequestContext(request)
    )
Example #12
0
    def seed(self, layer, grid, level_range, time):

        print layer, grid, level_range, time

        host = "http://localhost/browse/ows"

        level_0_num_tiles_y = 2  # rows
        level_0_num_tiles_x = 4  # cols

        mesh_cache = MeshCache()

        for tileLevel in level_range:

            tiles_x = level_0_num_tiles_x * pow(2, tileLevel);
            tiles_y = level_0_num_tiles_y * pow(2, tileLevel)

            #find which tiles are crossed by extent
            tile_width = 360 / (tiles_x)
            tile_height = 180 / (tiles_y)

            #coverage = eoxs_models.Coverage.objects.get(identifier=result.identifier)
            layer_obj = models.DatasetSeries.objects.get(identifier=layer)

            # Go through all coverages in layer and seed only the tiles they cross and their time
            for coverage in models.CurtainCoverage.objects.filter(collections__in=[layer_obj.pk]):

                #cycle through tiles
                for col in range(tiles_x):
                    for row in range(tiles_y):

                        west = -180 + (col * tile_width)
                        east = west + tile_width
                        north = 90 - (row * tile_height)
                        south = north - tile_height

                        if (coverage.footprint.intersects(Polygon.from_bbox( (west,south,east,north) ))):
                            time = isoformat(coverage.begin_time) + "/" + isoformat(coverage.end_time)
                            print ('[W3DSGetTileHandler::seed] processing %s / %s / %s / %s / %s/ %s' % (layer, grid, tileLevel, col, row, time))
                            logger.debug('[W3DSGetTileHandler::seed] processing %s / %s / %s / %s / %s/ %s' % (layer, grid, tileLevel, col, row, time))
                            if not mesh_cache.lookup(layer, grid, tileLevel, col, row, time):
                                mesh_cache.request_and_store(layer, grid, tileLevel, col, row, time)
Example #13
0
def configuration(request):
    # select collections or coverages not contained in collections that are
    # visible
    qs = models.EOObject.objects.filter(
        Q(collection__isnull=False) |
        Q(
            coverage__isnull=False, coverage__visible=True,
            collections__isnull=True, collection__isnull=True
        )
    )

    # get the min/max values for begin and end time
    values = qs.aggregate(Min("begin_time"), Max("end_time"))
    start_time = values["begin_time__min"] or now() - timedelta(days=5)
    end_time = values["end_time__max"] or now()
    start_time_full = start_time - timedelta(days=5)
    end_time_full = end_time + timedelta(days=5)

    try:
        # get only coverages that are in a collection or are visible
        # limit them to 10 and select the first time, so that we can limit the
        # initial brush
        coverages_qs = models.EOObject.objects.filter(
            Q(collection__isnull=True),
            Q(collections__isnull=False) | Q(coverage__visible=True)
        )
        first = list(coverages_qs.order_by("-begin_time")[:10])[-1]
        start_time = first.begin_time
    except (models.EOObject.DoesNotExist, IndexError):
        pass

    return render(
        request, 'webclient/config.json', {
            "layers": qs,
            "start_time_full": isoformat(start_time_full),
            "end_time_full": isoformat(end_time_full),
            "start_time": isoformat(start_time),
            "end_time": isoformat(end_time)
        }
    )
Example #14
0
 def test_seed_merge(self):
     """ Checks the `Time` models. Checks that the tilesets only contain 
     tiles that are in the correct time span.
     """
     
     from ngeo_browse_server.mapcache.models import Time
     
     times = [(t.start_time, t.end_time) for t in Time.objects.all()]
     
     self.assertItemsEqual(self.expected_seeded_areas, times)
 
     db_filename = join(self.temp_mapcache_dir, 
                    self.expected_inserted_into_series + ".sqlite")
     
     expected_timespans = ["%s/%s" % (isoformat(area[-2]), isoformat(area[-1]))
                           for area in self.expected_seeded_areas]
     
     with sqlite3.connect(db_filename) as connection:
         cur = connection.cursor()
         cur.execute("SELECT DISTINCT dim FROM tiles;")
         timespans = [row[0] for row in cur.fetchall()]
         self.assertItemsEqual(expected_timespans, timespans)
Example #15
0
def configuration(request):
    collections = models.Collection.objects.all()
    coverages = filter(
        lambda c: not models.iscollection(c),
        models.Coverage.objects.filter(
            visible=True, collections__isnull=True
        )
    )

    all_objects = list(chain(collections, coverages))
    start_time = min(o.begin_time for o in all_objects)
    end_time = max(o.end_time for o in all_objects)

    return render_to_response(
        'webclient/config.json', {
            "layers": all_objects,
            "start_time_full": isoformat(start_time - timedelta(days=5)),
            "end_time_full": isoformat(end_time + timedelta(days=5)),
            "start_time": isoformat(start_time),
            "end_time": isoformat(end_time)
        },
        context_instance=RequestContext(request)
    )
Example #16
0
    def encode_item(self, request, item, search_context):
        link_url = request.build_absolute_uri(
            "%s?service=WCS&version=2.0.1&request=DescribeCoverage&coverageId=%s"
            % (reverse("ows"), item.identifier)
        )

        rss_item = E("item",
            E("title", item.identifier),
            # RSS("description", ), # TODO
            E("link", link_url),
        )

        if "geo" in search_context.parameters:
            rss_item.append(E("guid", request.build_absolute_uri()))
        else:
            rss_item.append(E("guid", item.identifier, isPermaLink="false"))

        rss_item.extend(self.encode_item_links(request, item))

        if item.footprint:
            extent = item.extent_wgs84
            rss_item.append(
                GEORSS("box",
                    "%f %f %f %f" % (extent[1], extent[0], extent[3], extent[2])
                )
            )

        if item.begin_time and item.end_time:
            rss_item.append(
                GML("TimePeriod",
                    GML("beginPosition", isoformat(item.begin_time)),
                    GML("endPosition", isoformat(item.end_time)),
                    **{ns_gml("id"): item.identifier}
                )
            )
        return rss_item
Example #17
0
    def subset_to_kvp(self, subset):
        temporal_format = lambda v: ('"%s"' % isoformat(v) if v else "*")
        spatial_format = lambda v: (str(v) if v is not None else "*")

        frmt = temporal_format if subset.is_temporal else spatial_format

        if isinstance(subset, Slice):
            value = frmt(subset.value)
        else:
            value = "%s,%s" % (frmt(subset.low), frmt(subset.high))

        crs = self.subsets.crs
        if crs:
            return "subset", "%s,%s(%s)" % (subset.axis, crs, value)
        else:
            return "subset", "%s(%s)" % (subset.axis, value)
Example #18
0
def _encode_common_response(process, status_elem, inputs, raw_inputs, resp_doc):
    """Encode common execute response part shared by all specific responses."""
    inputs = inputs or {}
    conf = CapabilitiesConfigReader(get_eoxserver_config())
    url = conf.http_service_url
    if url[-1] == "?":
        url = url[:-1]
    elem = WPS("ExecuteResponse",
        encode_process_brief(process),
        WPS("Status", status_elem, creationTime=isoformat(now())),
        {
            "service": "WPS",
            "version": "1.0.0",
            ns_xml("lang"): "en-US",
            "serviceInstance": (
                "%s?service=WPS&version=1.0.0&request=GetCapabilities" % url
            )
        },
    )

    if resp_doc.lineage:
        inputs_data = []
        for id_, prm in process.inputs:
            if isinstance(prm, RequestParameter):
                continue
            prm = fix_parameter(id_, prm)
            data = inputs.get(id_)
            rawinp = raw_inputs.get(prm.identifier)
            if rawinp is not None:
                inputs_data.append(_encode_input(data, prm, rawinp))
        elem.append(WPS("DataInputs", *inputs_data))

        outputs_def = []
        for id_, prm in process.outputs:
            prm = fix_parameter(id_, prm)
            outdef = resp_doc.get(prm.identifier)
            if outdef is not None:
                outputs_def.append(encode_output_def(outdef))
        elem.append(WPS("OutputDefinitions", *outputs_def))

    return elem
Example #19
0
 def encode_time_instant(self, time, identifier):
     return GML("TimeInstant",
         GML("timePosition", isoformat(time)),
         **{ns_gml("id"): identifier}   
     )
Example #20
0
 def encode_time_period(self, begin_time, end_time, identifier):
     return GML("TimePeriod",
         GML("beginPosition", isoformat(begin_time)),
         GML("endPosition", isoformat(end_time)),
         **{ns_gml("id"): identifier}
     )
Example #21
0
                maxy = max(maxy, time_model.maxy)
                start_time = min(start_time, time_model.start_time)
                end_time = max(end_time, time_model.end_time)
                
                seed_mapcache(tileset=browse_layer_model.id, 
                              grid=browse_layer_model.grid, 
                              minx=time_model.minx, miny=time_model.miny,
                              maxx=time_model.maxx, maxy=time_model.maxy, 
                              minzoom=browse_layer_model.lowest_map_level, 
                              maxzoom=browse_layer_model.highest_map_level,
                              start_time=time_model.start_time,
                              end_time=time_model.end_time,
                              delete=True,
                              **get_mapcache_seed_config(config))
        
            logger.info("Result time span is %s/%s." % (isoformat(start_time),
                                                        isoformat(end_time)))
            times_qs.delete()
        
        time_model = mapcache_models.Time(start_time=start_time, end_time=end_time,
                                          minx=minx, miny=miny, 
                                          maxx=maxx, maxy=maxy,
                                          source=source)
        
        time_model.full_clean()
        time_model.save()
        
        seed_areas.append((minx, miny, maxx, maxy, start_time, end_time))
    
    return extent, (browse.start_time, browse.end_time)
Example #22
0
    def execute(self, collections, begin_time, end_time, coord_list, srid):
        """ The main execution function for the process.
        """
        eo_ids = collections.split(',')

        
        containment = "overlaps"

        subsets = Subsets((Trim("t", begin_time, end_time),))


        if len(eo_ids) == 0:
            raise

        # fetch a list of all requested EOObjects
        available_ids = models.EOObject.objects.filter(
            identifier__in=eo_ids
        ).values_list("identifier", flat=True)

        # match the requested EOIDs against the available ones. If any are
        # requested, that are not available, raise and exit.
        failed = [ eo_id for eo_id in eo_ids if eo_id not in available_ids ]
        if failed:
            raise NoSuchDatasetSeriesOrCoverageException(failed)

        collections_qs = subsets.filter(models.Collection.objects.filter(
            identifier__in=eo_ids
        ), containment="overlaps")

        # create a set of all indirectly referenced containers by iterating
        # recursively. The containment is set to "overlaps", to also include 
        # collections that might have been excluded with "contains" but would 
        # have matching coverages inserted.

        def recursive_lookup(super_collection, collection_set):
            sub_collections = models.Collection.objects.filter(
                collections__in=[super_collection.pk]
            ).exclude(
                pk__in=map(lambda c: c.pk, collection_set)
            )
            sub_collections = subsets.filter(sub_collections, "overlaps")

            # Add all to the set
            collection_set |= set(sub_collections)

            for sub_collection in sub_collections:
                recursive_lookup(sub_collection, collection_set)

        collection_set = set(collections_qs)
        for collection in set(collection_set):
            recursive_lookup(collection, collection_set)

        collection_pks = map(lambda c: c.pk, collection_set)

        # Get all either directly referenced coverages or coverages that are
        # within referenced containers. Full subsetting is applied here.

        coverages_qs = subsets.filter(models.Coverage.objects.filter(
            Q(identifier__in=eo_ids) | Q(collections__in=collection_pks)
        ), containment=containment)


        coordinates = coord_list.split(';')

        points = []
        for coordinate in coordinates:
            x,y = coordinate.split(',')
            # parameter parsing
            point = Point(float(x), float(y))
            point.srid = srid
            points.append(point)

        points = MultiPoint(points)
        points.srid = srid


        eo_objects = coverages_qs.filter(
            footprint__intersects=points
        ).order_by('begin_time')

        output = StringIO()
        writer = csv.writer(output, quoting=csv.QUOTE_NONE)
        header = ["id", "time", "val"]
        writer.writerow(header)

        for eo_object in eo_objects:

            coverage = eo_object.cast()

            #layer = models.DatasetSeries.objects.get(identifier__in=coverage.identifier)
            layer = eo_object.collections.all()[0]

            time =  isoformat(coverage.begin_time)

            data_item = coverage.data_items.get(semantic__startswith="bands")
            filename = connect(data_item)
            ds = gdal.Open(filename)

            if ds.GetProjection():
                gt = ds.GetGeoTransform()
                sr = SpatialReference(ds.GetProjection())
                points_t = points.transform(sr, clone=True)
            else:
                bbox = coverage.footprint.extent
                gt = [ 
                    bbox[0], (bbox[2] - bbox[0])/ds.RasterXSize, 0,
                    bbox[3], 0, (bbox[1] - bbox[3])/ds.RasterYSize
                ]
          

            for index, point in enumerate(points, start=1):
                print index, point

                if not coverage.footprint.contains(point):
                    continue

                #point.transform(sr)

                # Works only if gt[2] and gt[4] equal zero! 
                px = int((point[0] - gt[0]) / gt[1]) #x pixel
                py = int((point[1] - gt[3]) / gt[5]) #y pixel

               
                pixelVal = ds.GetRasterBand(1).ReadAsArray(px,py,1,1)[0,0]
                if pixelVal != -9999:
                    writer.writerow([ str(layer.identifier), time, pixelVal])

                

        return {
            "processed": output.getvalue()
        }
Example #23
0
def create_diff_label(self, master_id, slave_id, bbox, num_bands, crs, unit):
    """ The main execution function for the process.
    """

    #srid = crss.parseEPSGCode(str(crs), (crss.fromShortCode, crss.fromURN, crss.fromURL))

    master = models.RectifiedDataset.objects.get(identifier=master_id)
    slave = models.RectifiedDataset.objects.get(identifier=slave_id)

    filename_master = connect(master.data_items.get(semantic__startswith="bands"))
    filename_slave = connect(slave.data_items.get(semantic__startswith="bands"))

    ds_master = gdal.Open(filename_master, gdalconst.GA_ReadOnly)
    ds_slave = gdal.Open(filename_slave, gdalconst.GA_ReadOnly)

    master_bbox = master.footprint.extent
    slave_bbox = slave.footprint.extent

    res_x_master = (master_bbox[2] - master_bbox[0]) / ds_master.RasterXSize
    res_y_master = (master_bbox[3] - master_bbox[1]) / ds_master.RasterYSize

    res_x_slave = (slave_bbox[2] - slave_bbox[0]) / ds_slave.RasterXSize
    res_y_slave = (slave_bbox[3] - slave_bbox[1]) / ds_slave.RasterYSize

    size_x = int((bbox[2]-bbox[0])/res_x_master)
    size_y = int((bbox[3]-bbox[1])/res_y_master)

    builder = VRTBuilder(size_x, size_y, (num_bands*2), master.range_type.bands.all()[0].data_type)

    dst_rect_master = (
        int( math.floor((master_bbox[0] - bbox[0]) / res_x_master) ), # x offset
        int( math.floor((bbox[3] - master_bbox[3]) / res_y_master) ), # y offset
        ds_master.RasterXSize, # x size
        ds_master.RasterYSize  # y size
    )

    dst_rect_slave = (
        int( math.floor((slave_bbox[0] - bbox[0]) / res_x_slave) ), # x offset
        int( math.floor((bbox[3] - slave_bbox[3]) / res_y_slave) ), # y offset
        ds_slave.RasterXSize, # x size
        ds_slave.RasterYSize  # y size
    )

    for i in range(1, num_bands+1):
        builder.add_simple_source(i, str(filename_master), i, src_rect=(0, 0, ds_master.RasterXSize, ds_master.RasterYSize), dst_rect=dst_rect_master)
        builder.add_simple_source(num_bands+i , str(filename_slave), i, src_rect=(0, 0, ds_slave.RasterXSize, ds_slave.RasterYSize), dst_rect=dst_rect_slave)
    

    ext = Rect(0,0,size_x, size_y)

    
    pix_master = builder.dataset.GetRasterBand(1).ReadAsArray()
    pix_slave = builder.dataset.GetRasterBand(num_bands +1).ReadAsArray()

    if num_bands == 1:
        pix_master = np.dstack((pix_master, builder.dataset.GetRasterBand(1).ReadAsArray()))
        pix_slave = np.dstack((pix_slave, builder.dataset.GetRasterBand(2).ReadAsArray()))
    else:
        for i in range(2, num_bands+1):
            pix_master = np.dstack((pix_master, builder.dataset.GetRasterBand(i).ReadAsArray()))
            pix_slave = np.dstack((pix_slave, builder.dataset.GetRasterBand(num_bands+i).ReadAsArray()))


    def _diff(a,b):
        d = np.array(a[:,:,0],'float32') - np.array(b[:,:,0],'float32')
        return d

    pix_res = _diff(pix_master, pix_slave)
    
    res_max = np.max(pix_res)
    res_min = np.min(pix_res)

    
    # Make a figure and axes with dimensions as desired.
    fig = pyplot.figure(figsize=(8,1))
    fig.patch.set_alpha(0.8)
    ax1 = fig.add_axes([0.05, 0.75, 0.9, 0.15])

    def savefig_pix(fig,fname,width,height,dpi=100):
        rdpi = 1.0/float(dpi)  
        fig.set_size_inches(width*rdpi,height*rdpi)
        fig.savefig(fname,dpi=dpi)

    # Set the colormap and norm to correspond to the data for which
    # the colorbar will be used.
    cmap = mpl.cm.RdBu
    #norm = mpl.colors.Normalize(vmin=res_min, vmax=res_max)
    res_ = max(abs(res_max), abs(res_min))
    norm = mpl.colors.Normalize(vmin=-res_, vmax=res_)

    cb1 = mpl.colorbar.ColorbarBase(ax1, cmap=cmap,
                                       norm=norm,
                                       orientation='horizontal')
    
    mis = master_id.split("_")
    master_id_label = " ".join( (mis[0], mis[1], mis[2], isoformat(master.begin_time)) )

    sis = slave_id.split("_")
    slave_id_label = " ".join( (sis[0], sis[1], sis[2], isoformat(slave.begin_time)) )


    if unit:
        label = "Difference from %s \n to %s; Unit: %s"%(slave_id_label, master_id_label, unit)
    else:
        label = "Difference from %s \n to %s"%(slave_id_label, master_id_label)

    cb1.set_label(label)


    # the output image
    basename = "%s_%s"%( self.identifier,uuid4().hex )
    filename_png = "/tmp/%s.png" %( basename )

    try:
     
        fig.savefig(filename_png, dpi=80)

        with open(filename_png) as f:
            output = f.read()

    except Exception as e: 

        if os.path.isfile(filename_png):
            os.remove(filename_png)

        raise e
       
    else:
        os.remove(filename_png)

    return output
Example #24
0
 def encode_time_period(self, begin_time, end_time, identifier):
     return GML("TimePeriod",
         GML("beginPosition", isoformat(begin_time)),
         GML("endPosition", isoformat(end_time)),
         **{ns_gml("id"): identifier}
     )
Example #25
0
    def _eop2html(coverage):

        yield _lb("Earth Observation:", level=0)
        yield _lb("Phenomenom Time:", level=1)
        yield _kv("start:", "%s"%(isoformat(coverage.begin_time)), 2)
        yield _kv("stop:", "%s"%(isoformat(coverage.end_time)), 2)
        yield _lb("Spatial Metadata:", level=1)
        yield _kv("CRS:", "EPSG:%d"%coverage.srid, 2)
        yield _lb("Extent:", level=2)
        yield _kv("north:", "%.3f deg"%ext[3], 3)
        yield _kv("west:", "%.3f deg"%ext[0], 3)
        yield _kv("east:", "%.3f deg"%ext[2], 3)
        yield _kv("south:", "%.3f deg"%ext[1], 3)


        data_items = coverage.data_items
        data_items = data_items.filter(semantic="metadata", format="eogml")
        data_items = list(data_items)
        if len(data_items) < 1:
            return

        with open(retrieve(data_items[0])) as fid:
            eop = etree.parse(fid)

        # extract metadata
        md = eop_extract(eop)

        if md.get("center"):
            yield _lb("Center:", level=2)
            yield _kv("latitude:", "%.3f dg"%md["center"][0], 3)
            yield _kv("longitude:", "%.3f dg"%md["center"][1], 3)

        def _md(key, label, level=2):
            if md.get(key) is not None:
                return _kv(label, md[key], level)
            return ""

        yield _lb("Platform:", level=1)
        yield _md("platformName", "short name:")
        yield _md("platformSID", "serial identifier:")
        yield _md("platformOrbitType", "orbit type:")

        yield _lb("Instrument:", level=1)
        yield _md("instrumentName", "short name:")
        yield _md("instrumentDescription", "description:")
        yield _md("instrumentType", "type:")

        yield _lb("Sensor:", level=1)
        yield _md("sensorType", "type:")
        yield _md("sensorResolution", "resolution:")
        yield _md("sensorOpMode", "operational mode:")
        yield _md("sensorSwathId", "swath:")

        yield _lb("Acquisition:", level=1)
        yield _md("orbitDirection", "orbit direction:")
        yield _md("orbitDuration", "orbit duration:")
        yield _md("orbitNumber", "orbit number:")
        yield _md("lastOrbitNumber", "last orbit number:")
        yield _md("ascNodeDate", "asc.node date:")
        yield _md("ascNodeLongitude", "asc.node longitude:")
        yield _md("startTimeFromAscNode", "start time from asc.node:")
        yield _md("complTimeFromAscNode", "compl.time from asc.node:")
        yield _md("wrsLatitudeGrid", "WRS latitude:")
        yield _md("wrsLongitudeGrid", "WRS longitude:")
        yield _md("sunAzimut", "sun azimut:")
        yield _md("sunElevation", "sun elevation:")
        yield _md("sunZenit", "sun zenit:")
        yield _md("intrAzimut", "instrument azimut:")
        yield _md("intrElevation", "instrument elevation:")
        yield _md("intrZenit", "instrument zenit:")
        yield _md("incidence", "incidence angle:")
        yield _md("acrossTrackIncidence", "across track inc.:")
        yield _md("alongTrackIncidence", "along track inc.:")
        yield _md("pitch", "pitch")
        yield _md("roll", "roll")
        yield _md("yaw", "yaw")
        yield _md("antennaLookDir", "antenna look dir.:")
        yield _md("dopplerFreq", "doppler frequency")
        yield _md("incidenceVariation", "incidence ang.variation:")
        yield _md("maxIncidence", "max.incidence angle")
        yield _md("minIncidence", "min.incidence angle")
        yield _md("polarChannels", "polarisation channels:")
        yield _md("polarMode", "polarisation mode:")

        if md.get("cloudCovPercent") is not None:
            yield _lb("Cloud Cover:")
            yield _kv("percentage:", md["cloudCovPercent"], 2)
            yield _md("cloudCovAsConfidence", "assessment confidence:")
            yield _md("cloudCovQuotationMode", "quotation mode:")

        if md.get("snowCovPercent") is not None:
            yield _lb("Snow Cover:")
            yield _kv("percentage:", md["snowCovPercent"], 2)
            yield _md("snowCovAsConfidence", "assessment confidence:")
            yield _md("snowCovQuotationMode", "quotation mode:")

        if len(md.get("resultQuality")) > 0:
            yield _lb("Result Quality:")
            for item in md.get("resultQuality", []):
                text = html_escape(item)
                yield '<tr><td colspan="2"><pre style="width:25em;overflow-x:scroll;">%s</pre></td></tr>'%text
Example #26
0
 def handle(self, *args, **kwargs):
     # parse command arguments
     self.verbosity = int(kwargs.get("verbosity", 1))
     traceback = kwargs.get("traceback", False)
     self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)
     
     browse_layer_id = kwargs.get("browse_layer_id")
     browse_type = kwargs.get("browse_type")
     if not browse_layer_id and not browse_type:
         raise CommandError("No browse layer or browse type was specified.")
     elif browse_layer_id and browse_type:
         raise CommandError("Both browse layer and browse type were specified.")
     
     start = kwargs.get("start")
     end = kwargs.get("end")
     compression = kwargs.get("compression")
     export_cache = kwargs["export_cache"]
     output_path = kwargs.get("output_path")
     
     # parse start/end if given
     if start: 
         start = getDateTime(start)
     if end:
         end = getDateTime(end)
     
     if not output_path:
         output_path = package.generate_filename(compression)
     
     with package.create(output_path, compression) as p:
         # query the browse layer
         if browse_layer_id:
             try:
                 browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer '%s' does not exist" 
                                    % browse_layer_id)
         else:
             try:
                 browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer with browse type '%s' does "
                                    "not exist" % browse_type)
         
         browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
         p.set_browse_layer(
             serialize_browse_layers((browse_layer,), pretty_print=True)
         )
         
         # query browse reports; optionally filter for start/end time
         browse_reports_qs = BrowseReport.objects.all()
         
         # apply start/end filter
         if start and not end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
         elif end and not start:
             browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
         elif start and end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                          browses__end_time__lte=end)
         
         # use count annotation to exclude all browse reports with no browses
         browse_reports_qs = browse_reports_qs.annotate(
             browse_count=Count('browses')
         ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
         
         # iterate over all browse reports
         for browse_report_model in browse_reports_qs:
             browses_qs = Browse.objects.filter(
                 browse_report=browse_report_model
             )
             if start:
                 browses_qs = browses_qs.filter(start_time__gte=start)
             if end:
                 browses_qs = browses_qs.filter(end_time__lte=end)
             
             browse_report = browsereport_data.BrowseReport.from_model(
                 browse_report_model, browses_qs
             )
             
             # iterate over all browses in the query
             for browse, browse_model in izip(browse_report, browses_qs):
                 coverage = eoxs_models.RectifiedDataset.objects.get(
                     identifier=browse_model.coverage_id
                 )
                 
                 # set the 
                 base_filename = browse_model.coverage_id
                 data_filename = base_filename + ".tif"
                 md_filename = base_filename + ".xml"
                 footprint_filename = base_filename + ".wkb"
                 
                 browse._file_name = data_filename
                 
                 # add optimized browse image to package
                 data_item = coverage.data_items.get(
                     semantic__startswith="bands"
                 )
                 browse_file_path = data_item.location
                 
                 with open(browse_file_path) as f:
                     p.add_browse(f, data_filename)
                     wkb = coverage.footprint.wkb
                     p.add_footprint(footprint_filename, wkb)
                 
                 if export_cache:
                     time_model = mapcache_models.Time.objects.get(
                         start_time__lte=browse_model.start_time,
                         end_time__gte=browse_model.end_time,
                         source__name=browse_layer_model.id
                     )
                     
                     # get "dim" parameter
                     dim = (isoformat(time_model.start_time) + "/" +
                            isoformat(time_model.end_time))
                     
                     # exit if a merged browse is found
                     if dim != (isoformat(browse_model.start_time) + "/" +
                            isoformat(browse_model.end_time)):
                         raise CommandError("Browse layer '%s' contains "
                                            "merged browses and exporting "
                                            "of cache is requested. Try "
                                            "without exporting the cache."
                                            % browse_layer_model.id)
                     
                     # get path to sqlite tileset and open it
                     ts = tileset.open(
                         get_tileset_path(browse_layer.id)
                     )
                     
                     for tile_desc in ts.get_tiles(
                         browse_layer.id, 
                         URN_TO_GRID[browse_layer.grid], dim=dim,
                         minzoom=browse_layer.highest_map_level,
                         maxzoom=browse_layer.lowest_map_level
                     ):
                         p.add_cache_file(*tile_desc)
                         
                     
             
             # save browse report xml and add it to the package
             p.add_browse_report(
                 serialize_browse_report(browse_report, pretty_print=True),
                 name="%s_%s_%s_%s.xml" % (
                     browse_report.browse_type,
                     browse_report.responsible_org_name,
                     browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                     uuid.uuid4().hex
                 )
             )
Example #27
0
 def encode(cls, value):
     if isinstance(value, cls.dtype):
         return unicode(isoformat(value))
     raise ValueError("Invalid value type '%s'!"%type(value))
Example #28
0
    def get_layer_description(self, eo_object, raster_styles, geometry_styles):
        if isinstance(eo_object, models.Coverage):
            coverage = RenderCoverage.from_model(eo_object)
            return LayerDescription.from_coverage(coverage, raster_styles)
        elif isinstance(eo_object, models.Mosaic):
            coverage = RenderCoverage.from_model(eo_object)
            return LayerDescription.from_mosaic(coverage, raster_styles)
        elif isinstance(eo_object, (models.Product, models.Collection)):
            if getattr(eo_object, "product_type", None):
                browse_type_qs = eo_object.product_type.browse_types.all()
                mask_type_qs = eo_object.product_type.mask_types.all()
            elif getattr(eo_object, "collection_type", None):
                browse_type_qs = models.BrowseType.objects.filter(
                    product_type__allowed_collection_types__collections=
                    eo_object)
                mask_type_qs = models.MaskType.objects.filter(
                    product_type__allowed_collection_types__collections=
                    eo_object)
            else:
                browse_type_qs = models.BrowseType.objects.none()
                mask_type_qs = models.MaskType.objects.none()

            browse_types_name_and_is_gray = browse_type_qs.annotate(
                is_gray=Case(When(green_expression__isnull=True,
                                  then=Value(True)),
                             default=Value(False),
                             output_field=BooleanField())).values_list(
                                 'name', 'is_gray').distinct()
            mask_type_names = mask_type_qs.values_list('name',
                                                       flat=True).distinct()

            sub_layers = [
                LayerDescription("%s%soutlines" %
                                 (eo_object.identifier, self.suffix_separator),
                                 styles=geometry_styles,
                                 queryable=True),
                LayerDescription("%s%soutlined" %
                                 (eo_object.identifier, self.suffix_separator),
                                 styles=geometry_styles,
                                 queryable=True)
            ]
            for name, is_gray in browse_types_name_and_is_gray:
                sub_layers.append(
                    LayerDescription(
                        "%s%s%s" %
                        (eo_object.identifier, self.suffix_separator, name)
                        if name else eo_object.identifier,
                        styles=raster_styles if is_gray else []))

            for mask_type_name in mask_type_names:
                sub_layers.append(
                    LayerDescription("%s%s%s" %
                                     (eo_object.identifier,
                                      self.suffix_separator, mask_type_name),
                                     styles=geometry_styles))
                sub_layers.append(
                    LayerDescription("%s%smasked_%s" %
                                     (eo_object.identifier,
                                      self.suffix_separator, mask_type_name)))

            dimensions = {}
            if eo_object.begin_time and eo_object.end_time:
                dimensions["time"] = {
                    'min': isoformat(eo_object.begin_time),
                    'max': isoformat(eo_object.end_time),
                    'step': 'PT1S',
                    'default': isoformat(eo_object.end_time),
                    'units': 'ISO8601'
                }

            return LayerDescription(name=eo_object.identifier,
                                    bbox=eo_object.footprint.extent
                                    if eo_object.footprint else None,
                                    dimensions=dimensions,
                                    sub_layers=sub_layers)

        raise UnsupportedObject("Object %r cannot be mapped to a layer." %
                                eo_object)
Example #29
0
    def encode_capabilities(self, sections, coverages_qs=None, dataset_series_qs=None):
        conf = CapabilitiesConfigReader(get_eoxserver_config())

        all_sections = "all" in sections
        caps = []
        if all_sections or "serviceidentification" in sections:
            caps.append(
                OWS("ServiceIdentification",
                    OWS("Title", conf.title),
                    OWS("Abstract", conf.abstract),
                    OWS("Keywords", *[
                        OWS("Keyword", keyword) for keyword in conf.keywords
                    ]),
                    OWS("ServiceType", "OGC WCS", codeSpace="OGC"),
                    OWS("ServiceTypeVersion", "2.0.1"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_application-profile_earth-observation/1.0/conf/eowcs"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_application-profile_earth-observation/1.0/conf/eowcs_get-kvp"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_service-extension_crs/1.0/conf/crs"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS/2.0/conf/core"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_protocol-binding_get-kvp/1.0/conf/get-kvp"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_protocol-binding_post-xml/1.0/conf/post-xml"),
                    OWS("Profile", "http://www.opengis.net/spec/GMLCOV/1.0/conf/gml-coverage"),
                    OWS("Profile", "http://www.opengis.net/spec/GMLCOV/1.0/conf/multipart"),
                    OWS("Profile", "http://www.opengis.net/spec/GMLCOV/1.0/conf/special-format"),
                    OWS("Profile", "http://www.opengis.net/spec/GMLCOV_geotiff-coverages/1.0/conf/geotiff-coverage"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_geotiff-coverages/1.0/conf/geotiff-coverage"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_service-model_crs-predefined/1.0/conf/crs-predefined"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_service-model_scaling+interpolation/1.0/conf/scaling+interpolation"),
                    OWS("Profile", "http://www.opengis.net/spec/WCS_service-model_band-subsetting/1.0/conf/band-subsetting"),
                    OWS("Fees", conf.fees),
                    OWS("AccessConstraints", conf.access_constraints)
                )
            )

        if all_sections or "serviceprovider" in sections:
            caps.append(
                OWS("ServiceProvider",
                    OWS("ProviderName", conf.provider_name),
                    self.encode_reference("ProviderSite", conf.provider_site),
                    OWS("ServiceContact",
                        OWS("IndividualName", conf.individual_name),
                        OWS("PositionName", conf.position_name),
                        OWS("ContactInfo",
                            OWS("Phone",
                                OWS("Voice", conf.phone_voice),
                                OWS("Facsimile", conf.phone_facsimile)
                            ),
                            OWS("Address",
                                OWS("DeliveryPoint", conf.delivery_point),
                                OWS("City", conf.city),
                                OWS("AdministrativeArea", conf.administrative_area),
                                OWS("PostalCode", conf.postal_code),
                                OWS("Country", conf.country),
                                OWS("ElectronicMailAddress", conf.electronic_mail_address)
                            ),
                            self.encode_reference(
                                "OnlineResource", conf.onlineresource
                            ),
                            OWS("HoursOfService", conf.hours_of_service),
                            OWS("ContactInstructions", conf.contact_instructions)
                        ),
                        OWS("Role", conf.role)
                    )
                )
            )


        if all_sections or "operationsmetadata" in sections:
            component = ServiceComponent(env)
            versions = ("2.0.0", "2.0.1")
            get_handlers = component.query_service_handlers(
                service="WCS", versions=versions, method="GET"
            )
            post_handlers = component.query_service_handlers(
                service="WCS", versions=versions, method="POST"
            )
            all_handlers = sorted(
                set(get_handlers + post_handlers), key=lambda h: h.request
            )

            operations = []
            for handler in all_handlers:
                methods = []
                if handler in get_handlers:
                    methods.append(
                        self.encode_reference("Get", conf.http_service_url)
                    )
                if handler in post_handlers:
                    post = self.encode_reference("Post", conf.http_service_url)
                    post.append(
                        OWS("Constraint", 
                            OWS("AllowedValues", 
                                OWS("Value", "XML")
                            ), name="PostEncoding"
                        )
                    )
                    methods.append(post)

                operations.append(
                    OWS("Operation",
                        OWS("DCP",
                            OWS("HTTP", *methods)
                        ), 
                        # apply default values as constraints
                        *[
                            OWS("Constraint",
                                OWS("NoValues"),
                                OWS("DefaultValue", str(default)),
                                name=name
                            ) for name, default 
                            in getattr(handler, "constraints", {}).items()
                        ],
                        name=handler.request
                    )
                )
            caps.append(OWS("OperationsMetadata", *operations))


        if all_sections or "servicemetadata" in sections:
            service_metadata = WCS("ServiceMetadata")

            # get the list of enabled formats from the format registry
            formats = filter(
                lambda f: f, getFormatRegistry().getSupportedFormatsWCS()
            )
            service_metadata.extend(
                map(lambda f: WCS("formatSupported", f.mimeType), formats)
            )

            # get a list of supported CRSs from the CRS registry
            supported_crss = crss.getSupportedCRS_WCS(format_function=crss.asURL)
            extension = WCS("Extension")
            service_metadata.append(extension)
            extension.extend(
                map(lambda c: CRS("crsSupported", c), supported_crss)
            )

            caps.append(service_metadata)

        inc_contents = all_sections or "contents" in sections
        inc_coverage_summary = inc_contents or "coveragesummary" in sections
        inc_dataset_series_summary = inc_contents or "datasetseriessummary" in sections
        inc_contents = inc_contents or inc_coverage_summary or inc_dataset_series_summary

        if inc_contents:
            contents = []

            if inc_coverage_summary:
                coverages = []

                # reduce data transfer by only selecting required elements
                # TODO: currently runs into a bug
                #coverages_qs = coverages_qs.only(
                #    "identifier", "real_content_type"
                #)

                for coverage in coverages_qs:
                    coverages.append(
                        WCS("CoverageSummary",
                            WCS("CoverageId", coverage.identifier),
                            WCS("CoverageSubtype", coverage.real_type.__name__)
                        )
                    )
                contents.extend(coverages)

            if inc_dataset_series_summary:
                dataset_series_set = []
                
                # reduce data transfer by only selecting required elements
                # TODO: currently runs into a bug
                #dataset_series_qs = dataset_series_qs.only(
                #    "identifier", "begin_time", "end_time", "footprint"
                #)
                
                for dataset_series in dataset_series_qs:
                    minx, miny, maxx, maxy = dataset_series.extent_wgs84

                    dataset_series_set.append(
                        EOWCS("DatasetSeriesSummary",
                            OWS("WGS84BoundingBox",
                                OWS("LowerCorner", "%f %f" % (miny, minx)),
                                OWS("UpperCorner", "%f %f" % (maxy, maxx)),
                            ),
                            EOWCS("DatasetSeriesId", dataset_series.identifier),
                            GML("TimePeriod",
                                GML("beginPosition", isoformat(dataset_series.begin_time)),
                                GML("endPosition", isoformat(dataset_series.end_time)),
                                **{ns_gml("id"): dataset_series.identifier + "_timeperiod"}
                            )
                        )
                    )

                contents.append(WCS("Extension", *dataset_series_set))

            caps.append(WCS("Contents", *contents))

        root = WCS("Capabilities", *caps, version="2.0.1", updateSequence=conf.update_sequence)
        return root
Example #30
0
    def execute(self, collection, begin_time, end_time):
        """ The main execution function for the process.
        """

        eo_ids = [collection]

        
        containment = "overlaps"

        subsets = Subsets((Trim("t", begin_time, end_time),))


        if len(eo_ids) == 0:
            raise

        # fetch a list of all requested EOObjects
        available_ids = models.EOObject.objects.filter(
            identifier__in=eo_ids
        ).values_list("identifier", flat=True)

        # match the requested EOIDs against the available ones. If any are
        # requested, that are not available, raise and exit.
        failed = [ eo_id for eo_id in eo_ids if eo_id not in available_ids ]
        if failed:
            raise NoSuchDatasetSeriesOrCoverageException(failed)

        collections_qs = subsets.filter(models.Collection.objects.filter(
            identifier__in=eo_ids
        ), containment="overlaps")

        # create a set of all indirectly referenced containers by iterating
        # recursively. The containment is set to "overlaps", to also include 
        # collections that might have been excluded with "contains" but would 
        # have matching coverages inserted.

        def recursive_lookup(super_collection, collection_set):
            sub_collections = models.Collection.objects.filter(
                collections__in=[super_collection.pk]
            ).exclude(
                pk__in=map(lambda c: c.pk, collection_set)
            )
            sub_collections = subsets.filter(sub_collections, "overlaps")

            # Add all to the set
            collection_set |= set(sub_collections)

            for sub_collection in sub_collections:
                recursive_lookup(sub_collection, collection_set)

        collection_set = set(collections_qs)
        for collection in set(collection_set):
            recursive_lookup(collection, collection_set)

        collection_pks = map(lambda c: c.pk, collection_set)

        # Get all either directly referenced coverages or coverages that are
        # within referenced containers. Full subsetting is applied here.

        coverages_qs = subsets.filter(models.Coverage.objects.filter(
            Q(identifier__in=eo_ids) | Q(collections__in=collection_pks)
        ), containment=containment)

       

        output = StringIO()
        writer = csv.writer(output, quoting=csv.QUOTE_ALL)
        header = ["starttime", "endtime", "bbox", "identifier" ]
        writer.writerow(header)

        for coverage in coverages_qs:
            starttime = coverage.begin_time
            endtime = coverage.end_time
            identifier = coverage.identifier
            bbox = coverage.extent_wgs84
            writer.writerow([isoformat(starttime), isoformat(endtime), bbox, identifier])


        return output.getvalue()
Example #31
0
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """ 
    """
    
    seed_areas = []
    
    report_result = IngestBrowseReportResult()
    
    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:
                    
                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)
                    
                    transaction.commit() 
                    transaction.commit(using="mapcache")
                    
                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")
                    
                    report_result.add(IngestBrowseFailureResult(
                        browse.browse_identifier, 
                        type(e).__name__, str(e))
                    )
                    
                    continue
        
        tileset_name = browse_layer_model.id
        dim = isoformat(browse.start_time) + "/" + isoformat(browse.end_time)
        ts = tileset.open(get_tileset_path(tileset_name, config), mode="w")
        
        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0
        
        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d." 
                        % (minzoom, maxzoom))
            
            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue
                
                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)
        
        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d."
                        % (minzoom, maxzoom))
            
            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid, 
                          minx=result.extent[0], miny=result.extent[1],
                          maxx=result.extent[2], maxy=result.extent[3], 
                          minzoom=minzoom, 
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))
        
            logger.info("Successfully finished seeding.")
Example #32
0
 def encode_time_instant(self, time, identifier):
     return GML("TimeInstant",
         GML("timePosition", isoformat(time)),
         **{ns_gml("id"): identifier}
     )
Example #33
0
    def encode_summary(self, request, collection_id, item):
        template_name = getattr(
            settings, 'EOXS_OPENSEARCH_SUMMARY_TEMPLATE',
            DEFAULT_EOXS_OPENSEARCH_SUMMARY_TEMPLATE
        )

        metadata = []
        coverages = []

        if isinstance(item, models.Coverage):
            coverages = [item]
        elif isinstance(item, models.Product):
            coverages = item.coverages.all()
            metadata = [
                (
                    name.replace('_', ' ').title(),
                    isoformat(value) if isinstance(value, datetime) else str(value)
                )
                for name, value in models.product_get_metadata(item)
            ]

        eo_om_item = item.metadata_items.filter(
            format__in=['eogml', 'eoom', 'text/xml'],
            semantic__isnull=False
        ).first()
        if eo_om_item is not None:
            eo_om_link = self._make_metadata_href(request, item, eo_om_item)
        else:
            eo_om_link = None

        template_params = {
            'item': item,
            'metadata': metadata,
            'atom': self._create_self_link(request, collection_id, item),
            'wms_capabilities': self._create_wms_capabilities_link(request, item),
            'map_small': self._create_map_link(request, item, 100),
            'map_large': self._create_map_link(request, item, 500),
            'eocoveragesetdescription': self._create_eo_coverage_set_description(
                request, item
            ),
            'coverages': [{
                'identifier': coverage.identifier,
                'description': self._create_coverage_description_link(
                    request, coverage
                ),
                'coverage': self._create_coverage_link(
                    request, coverage
                )}
                for coverage in coverages
            ],
            'download_link': self._create_download_link(
                request, item
            ) if isinstance(item, models.Product) else None,
            'eo_om_link': eo_om_link,
        }

        return ATOM("summary",
            CDATA(
                render_to_string(
                    template_name, template_params,
                    request=request
                )
            ),
            type="html"
        )
Example #34
0
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None):
    """ Ingests a browse report. reraise_exceptions if errors shall be handled 
    externally
    """
    
    try:
        # get the according browse layer
        browse_type = parsed_browse_report.browse_type
        browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type)
    except models.BrowseLayer.DoesNotExist:
        raise IngestionException("Browse layer with browse type '%s' does not "
                                 "exist." % parsed_browse_report.browse_type)
    
    # generate a browse report model
    browse_report = create_browse_report(parsed_browse_report, browse_layer)
    
    # initialize the preprocessor with configuration values
    crs = None
    if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible":
        crs = "EPSG:3857"
    elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad":
        crs = "EPSG:4326"
        
    logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid))
    
    # create the required preprocessor/format selection
    format_selection = get_format_selection("GTiff",
                                            **get_format_config(config))

    if do_preprocessing and not browse_layer.contains_vertical_curtains \
        and not browse_layer.contains_volumes:
        # add config parameters and custom params
        params = get_optimization_config(config)
        
        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"
        
        # add band selection
        if (browse_layer.r_band is not None and 
            browse_layer.g_band is not None and 
            browse_layer.b_band is not None):
            
            bands = [(browse_layer.r_band, rad_min, rad_max), 
                     (browse_layer.g_band, rad_min, rad_max), 
                     (browse_layer.b_band, rad_min, rad_max)]
            
            if params["bandmode"] == RGBA:
                # RGBA
                bands.append((0, 0, 0))
            
            params["bands"] = bands
        
        preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params)

    elif browse_layer.contains_vertical_curtains:

        logger.info("Preparing Vertical Curtain Pre-Processor")

        params = {}

        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"

        preprocessor = VerticalCurtainPreprocessor(**params)

    elif browse_layer.contains_volumes:
        preprocessor = VolumePreProcessor()

    else:
        preprocessor = None # TODO: CopyPreprocessor
    
    report_result = IngestBrowseReportResult()
    
    succeded = []
    failed = []
    
    timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f")
    browse_dirname = _valid_path("%s_%s_%s_%s" % (
        browse_type, browse_report.responsible_org_name,
        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
        timestamp
    ))
    success_dir = join(get_success_dir(config), browse_dirname)
    failure_dir = join(get_failure_dir(config), browse_dirname)
    
    if exists(success_dir): 
        logger.warn("Success directory '%s' already exists.")
    else:
        makedirs(success_dir)
    if exists(failure_dir): 
        logger.warn("Failure directory '%s' already exists.")
    else:
        makedirs(failure_dir)
    
    # iterate over all browses in the browse report
    for parsed_browse in parsed_browse_report:
        # transaction management per browse
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:
                    seed_areas = []
                    # try ingest a single browse and log success
                    result = ingest_browse(parsed_browse, browse_report,
                                           browse_layer, preprocessor, crs,
                                           success_dir, failure_dir,
                                           seed_areas, config=config)
                    
                    report_result.add(result)
                    succeded.append(parsed_browse)
                    
                    # commit here to allow seeding
                    transaction.commit() 
                    transaction.commit(using="mapcache")
                    
                    
                    logger.info("Commited changes to database.")

                    if not browse_layer.contains_vertical_curtains and not browse_layer.contains_volumes:
                    
                        for minx, miny, maxx, maxy, start_time, end_time in seed_areas:
                            try:
                                
                                # seed MapCache synchronously
                                # TODO: maybe replace this with an async solution
                                seed_mapcache(tileset=browse_layer.id, 
                                              grid=browse_layer.grid, 
                                              minx=minx, miny=miny, 
                                              maxx=maxx, maxy=maxy, 
                                              minzoom=browse_layer.lowest_map_level, 
                                              maxzoom=browse_layer.highest_map_level,
                                              start_time=start_time,
                                              end_time=end_time,
                                              delete=False,
                                              **get_mapcache_seed_config(config))
                                logger.info("Successfully finished seeding.")
                                
                            except Exception, e:
                                logger.warn("Seeding failed: %s" % str(e))
                    
                    elif not browse_layer.contains_volumes:

                        host = "http://localhost/browse/ows"

                        level_0_num_tiles_y = 2  # rows
                        level_0_num_tiles_x = 4  # cols

                        seed_level = range(browse_layer.lowest_map_level, browse_layer.highest_map_level)

                        for tileLevel in seed_level:

                            tiles_x = level_0_num_tiles_x * pow(2, tileLevel);
                            tiles_y = level_0_num_tiles_y * pow(2, tileLevel)

                            #find which tiles are crossed by extent
                            tile_width = 360 / (tiles_x)
                            tile_height = 180 / (tiles_y)

                            coverage = eoxs_models.Coverage.objects.get(identifier=result.identifier)

                            #cycle through tiles
                            for col in range(tiles_x):
                                for row in range(tiles_y):

                                    west = -180 + (col * tile_width)
                                    east = west + tile_width
                                    north = 90 - (row * tile_height)
                                    south = north - tile_height

                                    if (coverage.footprint.intersects(Polygon.from_bbox( (west,south,east,north) ))):

                                        try:
                                            # NOTE: The MeshFactory ignores time
                                            time = (isoformat(result.time_interval[0]) + "/" + isoformat(result.time_interval[1]))
                                            
                                            baseurl = host + '?service=W3DS&request=GetTile&version=1.0.0&crs=EPSG:4326&layer={0}&style=default&format=model/gltf'.format(browse_layer.id)
                                            url = '{0}&tileLevel={1}&tilecol={2}&tilerow={3}&time={4}'.format(baseurl, tileLevel, col, row, time)

                                            logger.info('Seeding call to URL: %s' % (url,))

                                            response = urllib2.urlopen(url)
                                            response.close()

                                        except Exception, e:
                                            logger.warn("Seeding failed: %s" % str(e))

                        transaction.commit() 

                    else:
                        pass