Ejemplo n.º 1
0
    def encodeEarthObservation(self, eo_id, begin_time, end_time, footprint):
        begin_time_iso = isotime(begin_time)
        end_time_iso = isotime(end_time)
        result_time_iso = isotime(end_time)

        return self._makeElement(
            "eop", "EarthObservation", [
                ("@gml", "id", "eop_%s" % eo_id),
                ("om", "phenomenonTime", [
                    ("gml", "TimePeriod", [
                        ("@gml", "id", "phen_time_%s" % eo_id),
                        ("gml", "beginPosition", begin_time_iso),
                        ("gml", "endPosition", end_time_iso)
                    ])
                ]),
                ("om", "resultTime", [
                    ("gml", "TimeInstant", [
                        ("@gml", "id", "res_time_%s" % eo_id),
                        ("gml", "timePosition", result_time_iso)
                    ])
                ]),
                ("om", "procedure", []),
                ("om", "observedProperty", []),
                ("om", "featureOfInterest", [
                    (self.encodeFootprint(footprint, eo_id),)
                ]),
                ("om", "result", []),
                (self.encodeMetadataProperty(eo_id),)
            ]
        )
Ejemplo n.º 2
0
def notify(summary, message, urgency=None, ip_address=None, config=None):
    config = config or get_ngeo_config()

    urgency = urgency or "INFO"
    if urgency not in ("INFO", "CRITICAL", "BLOCK"):
        raise ValueError("Invalid urgency value '%s'." % urgency)

    try:
        if not ip_address:
            # get the value for "notification_url" and fall back to
            # "address"
            ip_address = safe_get(config, "control", "notification_url")

            if not ip_address:
                ctrl_config = get_controller_config(
                    get_controller_config_path(config))

                logger.debug(
                    "No 'notification_url' present. Trying to fall back to "
                    "registered IP address.")
                ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION,
                                      "address")

    except (IOError, NoSectionError):
        # probably no config file present, so IP cannot be determined.
        pass

    if not ip_address:
        # cannot log this error as we would run into an endless loop
        logger.info("Cannot send notification to CTRL.")
        return

    tree = E(
        "notifyControllerServer",
        E("header", E("timestamp", isotime(now())),
          E("instance", get_instance_id(config)), E("subsystem", "BROW"),
          E("urgency", urgency)),
        E("body", E("summary", summary), E("message", message)))

    if ip_address.startswith("http://") or ip_address.startswith("https://"):
        pass
    else:
        ip_address = "http://%s" % ip_address

    if not ip_address.endswith("/notify"):
        ip_address += "/notify"

    logger.info("Sending notification to CTRL at IP '%s'." % ip_address)

    req = urllib2.Request(url=ip_address,
                          data=etree.tostring(tree, pretty_print=True),
                          headers={'Content-Type': 'application/xml'})
    try:
        urllib2.urlopen(req, timeout=1)
    except (urllib2.HTTPError, urllib2.URLError), e:
        logger.info("Error sending notification: %s" % e)
        logger.debug(traceback.format_exc() + "\n")
Ejemplo n.º 3
0
def notify(summary, message, urgency=None, ip_address=None, config=None):
    config = config or get_ngeo_config()

    urgency = urgency or "INFO"
    if urgency not in ("INFO", "CRITICAL", "BLOCK"):
        raise ValueError("Invalid urgency value '%s'." % urgency)

    try:
        if not ip_address:
            ctrl_config = get_controller_config(get_controller_config_path(config))
            ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address")
    except IOError:
        # probably no config file present, so IP cannot be determined.
        pass

    if not ip_address:
        return

    tree = E("notifyControllerServer",
        E("header",
            E("timestamp", isotime(now())),
            E("instance", get_instance_id(config)),
            E("subsystem", "BROW"),
            E("urgency", urgency)
        ),
        E("body",
            E("summary", summary),
            E("message", message)
        )
    )

    req = urllib2.Request(
        url="http://%s/notify" % ip_address,
        data=etree.tostring(tree, pretty_print=True),
        headers={'Content-Type': 'text/xml'}
    )
    try:
        urllib2.urlopen(req, timeout=1)
    except (urllib2.HTTPError, urllib2.URLError):
        # could not send notification. Out of options
        pass
Ejemplo n.º 4
0
    def handle(self, *args, **kwargs):
        System.init()
        
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        logger.info("Starting browse export from command line.")

        browse_layer_id = kwargs.get("browse_layer_id")
        browse_type = kwargs.get("browse_type")
        if not browse_layer_id and not browse_type:
            logger.error("No browse layer or browse type was specified.")
            raise CommandError("No browse layer or browse type was specified.")
        elif browse_layer_id and browse_type:
            logger.error("Both browse layer and browse type were specified.")
            raise CommandError("Both browse layer and browse type were specified.")
        
        start = kwargs.get("start")
        end = kwargs.get("end")
        compression = kwargs.get("compression")
        export_cache = kwargs["export_cache"]
        output_path = kwargs.get("output_path")
        
        # parse start/end if given
        if start: 
            start = getDateTime(start)
        if end:
            end = getDateTime(end)
        
        if not output_path:
            output_path = package.generate_filename(compression)
        
        with package.create(output_path, compression) as p:
            # query the browse layer
            if browse_layer_id:
                try:
                    browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer '%s' does not exist" 
                                 % browse_layer_id)
                    raise CommandError("Browse layer '%s' does not exist" 
                                       % browse_layer_id)
            else:
                try:
                    browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer with browse type '%s' does "
                                 "not exist" % browse_type)
                    raise CommandError("Browse layer with browse type '%s' does "
                                       "not exist" % browse_type)
            
            browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
            p.set_browse_layer(
                serialize_browse_layers((browse_layer,), pretty_print=True)
            )
            
            # query browse reports; optionally filter for start/end time
            browse_reports_qs = BrowseReport.objects.all()
            
            # apply start/end filter
            if start and not end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
            elif end and not start:
                browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
            elif start and end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                             browses__end_time__lte=end)
            
            # use count annotation to exclude all browse reports with no browses
            browse_reports_qs = browse_reports_qs.annotate(
                browse_count=Count('browses')
            ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
            
            # iterate over all browse reports
            for browse_report_model in browse_reports_qs:
                browses_qs = Browse.objects.filter(
                    browse_report=browse_report_model
                )
                if start:
                    browses_qs = browses_qs.filter(start_time__gte=start)
                if end:
                    browses_qs = browses_qs.filter(end_time__lte=end)
                
                browse_report = browsereport_data.BrowseReport.from_model(
                    browse_report_model, browses_qs
                )
                
                # iterate over all browses in the query
                for browse, browse_model in izip(browse_report, browses_qs):
                    coverage_wrapper = System.getRegistry().getFromFactory(
                        "resources.coverages.wrappers.EOCoverageFactory",
                        {"obj_id": browse_model.coverage_id}
                    )
                    
                    # set the 
                    base_filename = browse_model.coverage_id
                    data_filename = base_filename + ".tif"
                    md_filename = base_filename + ".xml"
                    footprint_filename = base_filename + ".wkb"
                    
                    browse._file_name = data_filename
                    
                    # add optimized browse image to package
                    data_package = coverage_wrapper.getData()
                    data_package.prepareAccess()
                    browse_file_path = data_package.getGDALDatasetIdentifier()
                    with open(browse_file_path) as f:
                        p.add_browse(f, data_filename)
                        wkb = coverage_wrapper.getFootprint().wkb
                        p.add_footprint(footprint_filename, wkb)
                    
                    if export_cache:
                        time_model = mapcache_models.Time.objects.get(
                            start_time__lte=browse_model.start_time,
                            end_time__gte=browse_model.end_time,
                            source__name=browse_layer_model.id
                        )
                        
                        # get "dim" parameter
                        dim = (isotime(time_model.start_time) + "/" +
                               isotime(time_model.end_time))
                        
                        # exit if a merged browse is found
                        if dim != (isotime(browse_model.start_time) + "/" +
                               isotime(browse_model.end_time)):
                            logger.error("Browse layer '%s' contains "
                                         "merged browses and exporting "
                                         "of cache is requested. Try "
                                         "without exporting the cache."
                                         % browse_layer_model.id)
                            raise CommandError("Browse layer '%s' contains "
                                               "merged browses and exporting "
                                               "of cache is requested. Try "
                                               "without exporting the cache."
                                               % browse_layer_model.id)
                        
                        # get path to sqlite tileset and open it
                        ts = tileset.open(
                            get_tileset_path(browse_layer.browse_type)
                        )
                        
                        for tile_desc in ts.get_tiles(
                            browse_layer.id, 
                            URN_TO_GRID[browse_layer.grid], dim=dim,
                            minzoom=browse_layer.highest_map_level,
                            maxzoom=browse_layer.lowest_map_level
                        ):
                            p.add_cache_file(*tile_desc)
                            
                        
                
                # save browse report xml and add it to the package
                p.add_browse_report(
                    serialize_browse_report(browse_report, pretty_print=True),
                    name="%s_%s_%s_%s.xml" % (
                        browse_report.browse_type,
                        browse_report.responsible_org_name,
                        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                        uuid.uuid4().hex
                    )
                )

        logger.info("Successfully finished browse export from command line.")
Ejemplo n.º 5
0
            maxy = max(maxy, time_model.maxy)
            start_time = min(start_time, time_model.start_time)
            end_time = max(end_time, time_model.end_time)

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=time_model.minx, miny=time_model.miny,
                          maxx=time_model.maxx, maxy=time_model.maxy,
                          minzoom=browse_layer_model.lowest_map_level,
                          maxzoom=browse_layer_model.highest_map_level,
                          start_time=time_model.start_time,
                          end_time=time_model.end_time,
                          delete=True,
                          **get_mapcache_seed_config(config))

        logger.info("Result time span is %s/%s." % (isotime(start_time),
                                                    isotime(end_time)))
        times_qs.delete()

    time_model = mapcache_models.Time(start_time=start_time, end_time=end_time,
                                      minx=minx, miny=miny,
                                      maxx=maxx, maxy=maxy,
                                      source=source)

    time_model.full_clean()
    time_model.save()

    seed_areas.append((minx, miny, maxx, maxy, start_time, end_time))

    return extent, (browse.start_time, browse.end_time)
Ejemplo n.º 6
0
def notify(summary, message, urgency=None, ip_address=None, config=None):
    config = config or get_ngeo_config()

    urgency = urgency or "INFO"
    if urgency not in ("INFO", "CRITICAL", "BLOCK"):
        raise ValueError("Invalid urgency value '%s'." % urgency)

    try:
        if not ip_address:
            # get the value for "notification_url" and fall back to
            # "address"
            ip_address = safe_get(
                config, "control", "notification_url"
            )

            if not ip_address:
                ctrl_config = get_controller_config(
                    get_controller_config_path(config)
                )

                logger.debug(
                    "No 'notification_url' present. Trying to fall back to "
                    "registered IP address."
                )
                ip_address = safe_get(
                    ctrl_config, CONTROLLER_SERVER_SECTION, "address"
                )

    except (IOError, NoSectionError):
        # probably no config file present, so IP cannot be determined.
        pass

    if not ip_address:
        # cannot log this error as we would run into an endless loop
        logger.info("Cannot send notification to CTRL.")
        return

    tree = E("notifyControllerServer",
        E("header",
            E("timestamp", isotime(now())),
            E("instance", get_instance_id(config)),
            E("subsystem", "BROW"),
            E("urgency", urgency)
        ),
        E("body",
            E("summary", summary),
            E("message", message)
        )
    )

    if ip_address.startswith("http://") or ip_address.startswith("https://"):
        pass
    else:
        ip_address = "http://%s" % ip_address

    if not ip_address.endswith("/notify"):
        ip_address += "/notify"

    logger.info("Sending notification to CTRL at IP '%s'." % ip_address)

    req = urllib2.Request(
        url=ip_address,
        data=etree.tostring(tree, pretty_print=True),
        headers={'Content-Type': 'application/xml'}
    )
    try:
        urllib2.urlopen(req, timeout=1)
    except (urllib2.HTTPError, urllib2.URLError), e:
        logger.info("Error sending notification: %s" % e)
        logger.debug(traceback.format_exc() + "\n")
Ejemplo n.º 7
0
Archivo: imp.py Proyecto: EOX-A/ngeo-b
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(IngestBrowseFailureResult(
                        browse.browse_identifier,
                        type(e).__name__, str(e))
                    )

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type, config), mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d."
                        % (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d."
                        % (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0], miny=result.extent[1],
                          maxx=result.extent[2], maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Ejemplo n.º 8
0
    def handle(self, *browse_layer_id, **kwargs):
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        # check consistency
        if not len(browse_layer_id):
            logger.error("No browse layer given.")
            raise CommandError("No browse layer given.")
        elif len(browse_layer_id) > 1:
            logger.error("Too many browse layers given.")
            raise CommandError("Too many browse layers given.")
        else:
            browse_layer_id = browse_layer_id[0]

        try:
            # get the according browse layer
            browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id)
        except models.BrowseLayer.DoesNotExist:
            logger.error("Browse layer '%s' does not exist." % browse_layer_id)
            raise CommandError("Browse layer '%s' does not exist." %
                               browse_layer_id)

        start = kwargs.get("start")
        end = kwargs.get("end")
        dry_run = kwargs.get("dry_run")
        force = kwargs.get("force")

        # parse start/end if given
        if start:
            start = getDateTime(start)
        if end:
            end = getDateTime(end)

        if force:
            logger.info("Starting reseeding browse layer '%s'." %
                        browse_layer_id)
        else:
            logger.info("Starting seeding browse layer '%s'." %
                        browse_layer_id)

        times_qs = mapcache_models.Time.objects.filter(source=browse_layer.id)

        # apply start/end filter
        if start and not end:
            times_qs = times_qs.filter(start_time__gte=start)
        elif end and not start:
            times_qs = times_qs.filter(end_time__lte=end)
        elif start and end:
            times_qs = times_qs.filter(start_time__gte=start,
                                       end_time__lte=end)

        for time_model in times_qs:

            if dry_run:
                logger.info("Time span to (re)seed is %s/%s." % (isotime(
                    time_model.start_time), isotime(time_model.end_time)))
            else:
                try:
                    logger.info("(Re)seeding time span %s/%s." % (isotime(
                        time_model.start_time), isotime(time_model.end_time)))
                    seed_mapcache(tileset=browse_layer.id,
                                  grid=browse_layer.grid,
                                  minx=time_model.minx,
                                  miny=time_model.miny,
                                  maxx=time_model.maxx,
                                  maxy=time_model.maxy,
                                  minzoom=browse_layer.lowest_map_level,
                                  maxzoom=browse_layer.highest_map_level,
                                  start_time=time_model.start_time,
                                  end_time=time_model.end_time,
                                  delete=False,
                                  force=force,
                                  **get_mapcache_seed_config())
                    logger.info("Successfully finished (re)seeding time span.")
                except Exception, e:
                    logger.warn("(Re)seeding failed: %s" % str(e))
Ejemplo n.º 9
0
            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=time_model.minx,
                          miny=time_model.miny,
                          maxx=time_model.maxx,
                          maxy=time_model.maxy,
                          minzoom=browse_layer_model.lowest_map_level,
                          maxzoom=browse_layer_model.highest_map_level,
                          start_time=time_model.start_time,
                          end_time=time_model.end_time,
                          delete=True,
                          **get_mapcache_seed_config(config))

        logger.info("Result time span is %s/%s." %
                    (isotime(start_time), isotime(end_time)))
        times_qs.delete()

    time_model = mapcache_models.Time(start_time=start_time,
                                      end_time=end_time,
                                      minx=minx,
                                      miny=miny,
                                      maxx=maxx,
                                      maxy=maxy,
                                      source=source)

    time_model.full_clean()
    time_model.save()

    seed_areas.append((minx, miny, maxx, maxy, start_time, end_time))
Ejemplo n.º 10
0
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(
                        IngestBrowseFailureResult(browse.browse_identifier,
                                                  type(e).__name__, str(e)))

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type,
                                           config),
                          mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d." %
                        (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d." %
                        (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0],
                          miny=result.extent[1],
                          maxx=result.extent[2],
                          maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Ejemplo n.º 11
0
    def handle(self, *browse_layer_id, **kwargs):
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        # check consistency
        if not len(browse_layer_id):
            logger.error("No browse layer given.")
            raise CommandError("No browse layer given.")
        elif len(browse_layer_id) > 1:
            logger.error("Too many browse layers given.")
            raise CommandError("Too many browse layers given.")
        else:
            browse_layer_id = browse_layer_id[0]

        try:
            # get the according browse layer
            browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id)
        except models.BrowseLayer.DoesNotExist:
            logger.error("Browse layer '%s' does not exist."
                         % browse_layer_id)
            raise CommandError("Browse layer '%s' does not exist."
                               % browse_layer_id)

        start = kwargs.get("start")
        end = kwargs.get("end")
        dry_run = kwargs.get("dry_run")
        force = kwargs.get("force")

        # parse start/end if given
        if start:
            start = getDateTime(start)
        if end:
            end = getDateTime(end)

        if force:
            logger.info("Starting reseeding browse layer '%s'." % browse_layer_id)
        else:
            logger.info("Starting seeding browse layer '%s'." % browse_layer_id)

        times_qs = mapcache_models.Time.objects.filter(
            source=browse_layer.id
        )

        # apply start/end filter
        if start and not end:
            times_qs = times_qs.filter(start_time__gte=start)
        elif end and not start:
            times_qs = times_qs.filter(end_time__lte=end)
        elif start and end:
            times_qs = times_qs.filter(start_time__gte=start,
                                       end_time__lte=end)

        for time_model in times_qs:

            if dry_run:
                logger.info("Time span to (re)seed is %s/%s."
                            % (isotime(time_model.start_time),
                               isotime(time_model.end_time)))
            else:
                try:
                    logger.info("(Re)seeding time span %s/%s."
                                % (isotime(time_model.start_time),
                                   isotime(time_model.end_time)))
                    seed_mapcache(tileset=browse_layer.id,
                                  grid=browse_layer.grid,
                                  minx=time_model.minx, miny=time_model.miny,
                                  maxx=time_model.maxx, maxy=time_model.maxy,
                                  minzoom=browse_layer.lowest_map_level,
                                  maxzoom=browse_layer.highest_map_level,
                                  start_time=time_model.start_time,
                                  end_time=time_model.end_time,
                                  delete=False, force=force,
                                  **get_mapcache_seed_config())
                    logger.info("Successfully finished (re)seeding time span.")
                except Exception, e:
                    logger.warn("(Re)seeding failed: %s" % str(e))