Example #1
0
def add_mapcache_layer_xml(browse_layer, config=None):
    name = browse_layer.id

    config = config or get_ngeo_config()

    root = read_mapcache_xml(config)

    if len(root.xpath("cache[@name='%s']|source[@name='%s']|tileset[@name='%s']" % (name, name, name))):
        raise Exception(
            "Cannot add browse layer to mapcache config, because a layer with "
            "the name '%s' is already inserted." % name
        )

    tileset_path = get_tileset_path(browse_layer.browse_type)

    root.extend([
        E("cache", 
            E("dbfile", tileset_path),
            E("detect_blank", "true"),
            name=name, type="sqlite3"
        ),
        E("source",
            E("getmap", 
                E("params",
                    E("LAYERS", name),
                    E("TRANSPARENT", "true")
                )
            ),
            E("http", 
                E("url", "http://localhost/browse/ows?")
            ),
            name=name, type="wms"
        ),
        E("tileset",
            E("source", name),
            E("cache", name),
            E("grid", 
                URN_TO_GRID[browse_layer.grid], **{
                    "max-cached-zoom": str(browse_layer.highest_map_level),
                    "out-of-zoom-strategy": "reassemble"
                }
            ),
            E("format", "mixed"),
            E("metatile", "8 8"),
            E("expires", "3600"),
            E("read-only", "true"),
            E("timedimension",
                E("dbfile", settings.DATABASES["mapcache"]["NAME"]),
                E("query", "select strftime('%Y-%m-%dT%H:%M:%SZ',start_time)||'/'||strftime('%Y-%m-%dT%H:%M:%SZ',end_time) from time where source_id=:tileset and start_time<=datetime(:end_timestamp,'unixepoch') and end_time>=datetime(:start_timestamp,'unixepoch') and maxx>=:minx and maxy>=:miny and minx<=:maxx and miny<=:maxy order by end_time desc limit 100"),
                type="sqlite", default="2010" # TODO: default year into layer definition
            ),
            name=name
        )
    ])

    write_mapcache_xml(root, config)
Example #2
0
def delete_browse_layer(browse_layer, config=None):
    config = config or get_ngeo_config()

    # remove browse layer model. This should also delete all related browses
    # and browse reports
    models.BrowseLayer.objects.get(id=browse_layer.id).delete()
    eoxs_models.DatasetSeries.objects.get(identifier=browse_layer.id).delete()

    if not browse_layer.contains_volumes and not browse_layer.contains_vertical_curtains:
        # remove source from mapcache sqlite
        mapcache_models.Source.objects.get(name=browse_layer.id).delete()

        # remove browse layer from mapcache XML
        remove_mapcache_layer_xml(browse_layer, config)

        # delete browse layer cache
        try:
            os.remove(get_tileset_path(browse_layer.browse_type))
        except OSError:
            # when no browse was ingested, the sqlite file does not exist, so just
            # issue a warning
            logger.warning(
                "Could not remove tileset '%s'." 
                % get_tileset_path(browse_layer.browse_type)
            )

    # delete all optimzed files by deleting the whole directory of the layer
    optimized_dir = get_project_relative_path(join(
        config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id
    ))
    try:
        shutil.rmtree(optimized_dir)
    except OSError:
        logger.error(
            "Could not remove directory for optimzed files: '%s'." 
            % optimized_dir
        )
Example #3
0
    def handle(self, *args, **kwargs):
        System.init()
        
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        logger.info("Starting browse export from command line.")

        browse_layer_id = kwargs.get("browse_layer_id")
        browse_type = kwargs.get("browse_type")
        if not browse_layer_id and not browse_type:
            logger.error("No browse layer or browse type was specified.")
            raise CommandError("No browse layer or browse type was specified.")
        elif browse_layer_id and browse_type:
            logger.error("Both browse layer and browse type were specified.")
            raise CommandError("Both browse layer and browse type were specified.")
        
        start = kwargs.get("start")
        end = kwargs.get("end")
        compression = kwargs.get("compression")
        export_cache = kwargs["export_cache"]
        output_path = kwargs.get("output_path")
        
        # parse start/end if given
        if start: 
            start = getDateTime(start)
        if end:
            end = getDateTime(end)
        
        if not output_path:
            output_path = package.generate_filename(compression)
        
        with package.create(output_path, compression) as p:
            # query the browse layer
            if browse_layer_id:
                try:
                    browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer '%s' does not exist" 
                                 % browse_layer_id)
                    raise CommandError("Browse layer '%s' does not exist" 
                                       % browse_layer_id)
            else:
                try:
                    browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer with browse type '%s' does "
                                 "not exist" % browse_type)
                    raise CommandError("Browse layer with browse type '%s' does "
                                       "not exist" % browse_type)
            
            browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
            p.set_browse_layer(
                serialize_browse_layers((browse_layer,), pretty_print=True)
            )
            
            # query browse reports; optionally filter for start/end time
            browse_reports_qs = BrowseReport.objects.all()
            
            # apply start/end filter
            if start and not end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
            elif end and not start:
                browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
            elif start and end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                             browses__end_time__lte=end)
            
            # use count annotation to exclude all browse reports with no browses
            browse_reports_qs = browse_reports_qs.annotate(
                browse_count=Count('browses')
            ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
            
            # iterate over all browse reports
            for browse_report_model in browse_reports_qs:
                browses_qs = Browse.objects.filter(
                    browse_report=browse_report_model
                )
                if start:
                    browses_qs = browses_qs.filter(start_time__gte=start)
                if end:
                    browses_qs = browses_qs.filter(end_time__lte=end)
                
                browse_report = browsereport_data.BrowseReport.from_model(
                    browse_report_model, browses_qs
                )
                
                # iterate over all browses in the query
                for browse, browse_model in izip(browse_report, browses_qs):
                    coverage_wrapper = System.getRegistry().getFromFactory(
                        "resources.coverages.wrappers.EOCoverageFactory",
                        {"obj_id": browse_model.coverage_id}
                    )
                    
                    # set the 
                    base_filename = browse_model.coverage_id
                    data_filename = base_filename + ".tif"
                    md_filename = base_filename + ".xml"
                    footprint_filename = base_filename + ".wkb"
                    
                    browse._file_name = data_filename
                    
                    # add optimized browse image to package
                    data_package = coverage_wrapper.getData()
                    data_package.prepareAccess()
                    browse_file_path = data_package.getGDALDatasetIdentifier()
                    with open(browse_file_path) as f:
                        p.add_browse(f, data_filename)
                        wkb = coverage_wrapper.getFootprint().wkb
                        p.add_footprint(footprint_filename, wkb)
                    
                    if export_cache:
                        time_model = mapcache_models.Time.objects.get(
                            start_time__lte=browse_model.start_time,
                            end_time__gte=browse_model.end_time,
                            source__name=browse_layer_model.id
                        )
                        
                        # get "dim" parameter
                        dim = (isotime(time_model.start_time) + "/" +
                               isotime(time_model.end_time))
                        
                        # exit if a merged browse is found
                        if dim != (isotime(browse_model.start_time) + "/" +
                               isotime(browse_model.end_time)):
                            logger.error("Browse layer '%s' contains "
                                         "merged browses and exporting "
                                         "of cache is requested. Try "
                                         "without exporting the cache."
                                         % browse_layer_model.id)
                            raise CommandError("Browse layer '%s' contains "
                                               "merged browses and exporting "
                                               "of cache is requested. Try "
                                               "without exporting the cache."
                                               % browse_layer_model.id)
                        
                        # get path to sqlite tileset and open it
                        ts = tileset.open(
                            get_tileset_path(browse_layer.browse_type)
                        )
                        
                        for tile_desc in ts.get_tiles(
                            browse_layer.id, 
                            URN_TO_GRID[browse_layer.grid], dim=dim,
                            minzoom=browse_layer.highest_map_level,
                            maxzoom=browse_layer.lowest_map_level
                        ):
                            p.add_cache_file(*tile_desc)
                            
                        
                
                # save browse report xml and add it to the package
                p.add_browse_report(
                    serialize_browse_report(browse_report, pretty_print=True),
                    name="%s_%s_%s_%s.xml" % (
                        browse_report.browse_type,
                        browse_report.responsible_org_name,
                        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                        uuid.uuid4().hex
                    )
                )

        logger.info("Successfully finished browse export from command line.")
Example #4
0
def add_mapcache_layer_xml(browse_layer, config=None):
    name = browse_layer.id

    config = config or get_ngeo_config()

    root = read_mapcache_xml(config)

    if len(root.xpath("cache[@name='%s']|source[@name='%s']|tileset[@name='%s']" % (name, name, name))):
        raise LayerException(
            "Cannot add browse layer to mapcache config, because a layer with "
            "the name '%s' is already inserted." % name
        )

    tileset_path = get_tileset_path(browse_layer.browse_type)

    bounds = CRS_BOUNDS[GRID_TO_SRID[URN_TO_GRID[browse_layer.grid]]]
    full = float(abs(bounds[0]) + abs(bounds[2]))

    root.extend([
        E("cache",
            E("dbfile", tileset_path),
            E("detect_blank", "true"),
            E("pragma", "2147483646", name="max_page_count"),
            E("pragma", "2048", name="page_size"),
            name=name, type="sqlite3"
        ),
        E("source",
            E("getmap",
                E("params",
                    E("LAYERS", name),
                    E("TRANSPARENT", "true")
                )
            ),
            E("http",
                E("url", "http://localhost/browse/ows?")
            ),
            name=name, type="wms"
        ),
        E("tileset",
            E("metadata",
                E("title", str(browse_layer.title)),
                *([
                    E("abstract", str(browse_layer.description))]
                    if browse_layer.description
                    else []
                )
            ),
            E("source", name),
            E("cache", name),
            E("grid",
                URN_TO_GRID[browse_layer.grid], **{
                    "max-cached-zoom": str(browse_layer.highest_map_level),
                    "out-of-zoom-strategy": "reassemble"
                }
            ),
            E("format", "mixed"),
            E("metatile", "8 8"),
            E("expires", "3600"),
            E("read-only", "true"),
            E("timedimension",
                E("dbfile", settings.DATABASES["mapcache"]["NAME"]),
                E("query", "select * from (select strftime('%Y-%m-%dT%H:%M:%SZ',start_time)||'/'||strftime('%Y-%m-%dT%H:%M:%SZ',end_time) as interval from time where source_id=:tileset and (start_time<datetime(:end_timestamp,'unixepoch') and (end_time>datetime(:start_timestamp,'unixepoch')) or (start_time=end_time and start_time<datetime(:end_timestamp,'unixepoch') and end_time>=datetime(:start_timestamp,'unixepoch'))) and ((maxx>=:minx and minx<=:maxx) or (maxx>"+str(bounds[2])+" and (maxx-"+str(full)+")>=:minx and (minx-"+str(full)+")<=:maxx)) and maxy>=:miny and miny<=:maxy order by end_time desc limit "+str(browse_layer.tile_query_limit)+") order by interval asc"),
                type="sqlite", default=str(browse_layer.timedimension_default)),
            *([
                E("auth_method", "cmdlineauth")]
                if browse_layer.browse_access_policy in ("RESTRICTED", "PRIVATE")
                else []
            ),
            name=name
        )
    ])

    logger.info("Adding cache, source, and tileset for '%s' to mapcache "
                "config." % name)
    write_mapcache_xml(root, config)
Example #5
0
def delete_browse_layer(browse_layer, purge=False, config=None):
    config = config or get_ngeo_config()

    # only remove MapCache configuration in order to allow a roll-back
    # without data loss
    if models.BrowseLayer.objects.filter(id=browse_layer.id).exists():
        logger.info("Starting disabling of browse layer '%s'." % browse_layer.id)
    else:
        raise Exception(
            "Could not disable browse layer '%s' as it does not exist."
            % browse_layer.id
        )

    # remove browse layer from MapCache XML
    remove_mapcache_layer_xml(browse_layer, config)

    logger.info("Finished disabling of browse layer '%s'." % browse_layer.id)

    if purge:
        logger.info("Starting purging of browse layer '%s'." % browse_layer.id)
        # remove browse layer model. This should also delete all related browses
        # and browse reports
        models.BrowseLayer.objects.get(id=browse_layer.id).delete()

        # delete EOxServer layer metadata
        dss = System.getRegistry().getFromFactory(
            "resources.coverages.wrappers.DatasetSeriesFactory",
            {"obj_id": browse_layer.id}
        )
        dss._DatasetSeriesWrapper__model.layer_metadata.all().delete()
        # delete EOxServer dataset series
        dss_mgr = System.getRegistry().findAndBind(
            intf_id="resources.coverages.interfaces.Manager",
            params={
                "resources.coverages.interfaces.res_type": "eo.dataset_series"
            }
        )
        dss_mgr.delete(browse_layer.id)

        # remove source from mapcache sqlite
        mapcache_models.Source.objects.get(name=browse_layer.id).delete()

        # delete browse layer cache
        try:
            logger.info(
                "Deleting tileset for browse layer '%s'." % browse_layer.id
            )
            os.remove(get_tileset_path(browse_layer.browse_type))
        except OSError:
            # when no browse was ingested, the sqlite file does not exist, so
            # just issue a warning
            logger.warning(
                "Could not remove tileset '%s'."
                % get_tileset_path(browse_layer.browse_type)
            )

        # delete all optimized files by deleting the whole directory of the layer
        optimized_dir = get_project_relative_path(join(
            config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id
        ))
        try:
            logger.info(
                "Deleting optimized images for browse layer '%s'."
                % browse_layer.id
            )
            shutil.rmtree(optimized_dir)
        except OSError:
            logger.error(
                "Could not remove directory for optimized files: '%s'."
                % optimized_dir
            )

        logger.info("Finished purging of browse layer '%s'." % browse_layer.id)
Example #6
0
def add_mapcache_layer_xml(browse_layer, config=None):
    name = browse_layer.id

    config = config or get_ngeo_config()

    root = read_mapcache_xml(config)

    if len(
            root.xpath(
                "cache[@name='%s']|source[@name='%s']|tileset[@name='%s']" %
                (name, name, name))):
        raise LayerException(
            "Cannot add browse layer to mapcache config, because a layer with "
            "the name '%s' is already inserted." % name)

    tileset_path = get_tileset_path(browse_layer.browse_type)

    bounds = CRS_BOUNDS[GRID_TO_SRID[URN_TO_GRID[browse_layer.grid]]]
    full = float(abs(bounds[0]) + abs(bounds[2]))

    root.extend([
        E("cache",
          E("dbfile", tileset_path),
          E("detect_blank", "true"),
          E("pragma", "2147483646", name="max_page_count"),
          E("pragma", "2048", name="page_size"),
          name=name,
          type="sqlite3"),
        E("source",
          E("getmap", E("params", E("LAYERS", name), E("TRANSPARENT",
                                                       "true"))),
          E("http", E("url", "http://localhost/browse/ows?")),
          name=name,
          type="wms"),
        E("tileset",
          E(
              "metadata", E("title", str(browse_layer.title)),
              *([E("abstract", str(browse_layer.description))]
                if browse_layer.description else [])),
          E("source", name),
          E("cache", name),
          E(
              "grid", URN_TO_GRID[browse_layer.grid], **{
                  "max-cached-zoom": str(browse_layer.highest_map_level),
                  "out-of-zoom-strategy": "reassemble"
              }),
          E("format", "mixed"),
          E("metatile", "8 8"),
          E("expires", "3600"),
          E("read-only", "true"),
          E("timedimension",
            E("dbfile", settings.DATABASES["mapcache"]["NAME"]),
            E(
                "query",
                "select * from (select strftime('%Y-%m-%dT%H:%M:%SZ',start_time)||'/'||strftime('%Y-%m-%dT%H:%M:%SZ',end_time) as interval from time where source_id=:tileset and (start_time<datetime(:end_timestamp,'unixepoch') and (end_time>datetime(:start_timestamp,'unixepoch')) or (start_time=end_time and start_time<datetime(:end_timestamp,'unixepoch') and end_time>=datetime(:start_timestamp,'unixepoch'))) and ((maxx>=:minx and minx<=:maxx) or (maxx>"
                + str(bounds[2]) + " and (maxx-" + str(full) +
                ")>=:minx and (minx-" + str(full) +
                ")<=:maxx)) and maxy>=:miny and miny<=:maxy order by end_time desc limit "
                + str(browse_layer.tile_query_limit) +
                ") order by interval asc"),
            type="sqlite",
            default=str(browse_layer.timedimension_default)),
          *([E("auth_method", "cmdlineauth")]
            if browse_layer.browse_access_policy in ("RESTRICTED",
                                                     "PRIVATE") else []),
          name=name)
    ])

    logger.info("Adding cache, source, and tileset for '%s' to mapcache "
                "config." % name)
    write_mapcache_xml(root, config)
Example #7
0
File: imp.py Project: EOX-A/ngeo-b
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(IngestBrowseFailureResult(
                        browse.browse_identifier,
                        type(e).__name__, str(e))
                    )

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type, config), mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d."
                        % (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d."
                        % (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0], miny=result.extent[1],
                          maxx=result.extent[2], maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Example #8
0
def delete_browse_layer(browse_layer, purge=False, config=None):
    config = config or get_ngeo_config()

    # only remove MapCache configuration in order to allow a roll-back
    # without data loss
    if models.BrowseLayer.objects.filter(id=browse_layer.id).exists():
        logger.info("Starting disabling of browse layer '%s'." %
                    browse_layer.id)
    else:
        raise Exception(
            "Could not disable browse layer '%s' as it does not exist." %
            browse_layer.id)

    # remove browse layer from MapCache XML
    remove_mapcache_layer_xml(browse_layer, config)

    # disable SxCat harvesting for collection
    harvesting_via_sxcat = False
    try:
        harvesting_via_sxcat = config.getboolean("control",
                                                 "harvesting_via_sxcat")
    except:
        pass
    if harvesting_via_sxcat and browse_layer.harvesting_source:
        disable_collection(browse_layer)

    logger.info("Finished disabling of browse layer '%s'." % browse_layer.id)

    if purge:
        logger.info("Starting purging of browse layer '%s'." % browse_layer.id)
        # remove browse layer model. This should also delete all related browses
        # and browse reports
        models.BrowseLayer.objects.get(id=browse_layer.id).delete()

        # delete EOxServer layer metadata
        dss = System.getRegistry().getFromFactory(
            "resources.coverages.wrappers.DatasetSeriesFactory",
            {"obj_id": browse_layer.id})
        dss._DatasetSeriesWrapper__model.layer_metadata.all().delete()
        # delete EOxServer dataset series
        dss_mgr = System.getRegistry().findAndBind(
            intf_id="resources.coverages.interfaces.Manager",
            params={
                "resources.coverages.interfaces.res_type": "eo.dataset_series"
            })
        dss_mgr.delete(browse_layer.id)

        # remove source from mapcache sqlite
        mapcache_models.Source.objects.get(name=browse_layer.id).delete()

        # delete browse layer cache
        try:
            logger.info("Deleting tileset for browse layer '%s'." %
                        browse_layer.id)
            os.remove(get_tileset_path(browse_layer.browse_type))
        except OSError:
            # when no browse was ingested, the sqlite file does not exist, so
            # just issue a warning
            logger.warning("Could not remove tileset '%s'." %
                           get_tileset_path(browse_layer.browse_type))

        # delete all optimized files by deleting the whole directory of the layer
        optimized_dir = get_project_relative_path(
            join(config.get(INGEST_SECTION, "optimized_files_dir"),
                 browse_layer.id))
        try:
            logger.info("Deleting optimized images for browse layer '%s'." %
                        browse_layer.id)
            shutil.rmtree(optimized_dir)
        except OSError:
            logger.error(
                "Could not remove directory for optimized files: '%s'." %
                optimized_dir)

        if harvesting_via_sxcat and browse_layer.harvesting_source:
            remove_collection(browse_layer)

        logger.info("Finished purging of browse layer '%s'." % browse_layer.id)
Example #9
0
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(
                        IngestBrowseFailureResult(browse.browse_identifier,
                                                  type(e).__name__, str(e)))

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type,
                                           config),
                          mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d." %
                        (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d." %
                        (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0],
                          miny=result.extent[1],
                          maxx=result.extent[2],
                          maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Example #10
0
 def handle(self, *args, **kwargs):
     # parse command arguments
     self.verbosity = int(kwargs.get("verbosity", 1))
     traceback = kwargs.get("traceback", False)
     self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)
     
     browse_layer_id = kwargs.get("browse_layer_id")
     browse_type = kwargs.get("browse_type")
     if not browse_layer_id and not browse_type:
         raise CommandError("No browse layer or browse type was specified.")
     elif browse_layer_id and browse_type:
         raise CommandError("Both browse layer and browse type were specified.")
     
     start = kwargs.get("start")
     end = kwargs.get("end")
     compression = kwargs.get("compression")
     export_cache = kwargs["export_cache"]
     output_path = kwargs.get("output_path")
     
     # parse start/end if given
     if start: 
         start = getDateTime(start)
     if end:
         end = getDateTime(end)
     
     if not output_path:
         output_path = package.generate_filename(compression)
     
     with package.create(output_path, compression) as p:
         # query the browse layer
         if browse_layer_id:
             try:
                 browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer '%s' does not exist" 
                                    % browse_layer_id)
         else:
             try:
                 browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer with browse type '%s' does "
                                    "not exist" % browse_type)
         
         browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
         p.set_browse_layer(
             serialize_browse_layers((browse_layer,), pretty_print=True)
         )
         
         # query browse reports; optionally filter for start/end time
         browse_reports_qs = BrowseReport.objects.all()
         
         # apply start/end filter
         if start and not end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
         elif end and not start:
             browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
         elif start and end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                          browses__end_time__lte=end)
         
         # use count annotation to exclude all browse reports with no browses
         browse_reports_qs = browse_reports_qs.annotate(
             browse_count=Count('browses')
         ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
         
         # iterate over all browse reports
         for browse_report_model in browse_reports_qs:
             browses_qs = Browse.objects.filter(
                 browse_report=browse_report_model
             )
             if start:
                 browses_qs = browses_qs.filter(start_time__gte=start)
             if end:
                 browses_qs = browses_qs.filter(end_time__lte=end)
             
             browse_report = browsereport_data.BrowseReport.from_model(
                 browse_report_model, browses_qs
             )
             
             # iterate over all browses in the query
             for browse, browse_model in izip(browse_report, browses_qs):
                 coverage = eoxs_models.RectifiedDataset.objects.get(
                     identifier=browse_model.coverage_id
                 )
                 
                 # set the 
                 base_filename = browse_model.coverage_id
                 data_filename = base_filename + ".tif"
                 md_filename = base_filename + ".xml"
                 footprint_filename = base_filename + ".wkb"
                 
                 browse._file_name = data_filename
                 
                 # add optimized browse image to package
                 data_item = coverage.data_items.get(
                     semantic__startswith="bands"
                 )
                 browse_file_path = data_item.location
                 
                 with open(browse_file_path) as f:
                     p.add_browse(f, data_filename)
                     wkb = coverage.footprint.wkb
                     p.add_footprint(footprint_filename, wkb)
                 
                 if export_cache:
                     time_model = mapcache_models.Time.objects.get(
                         start_time__lte=browse_model.start_time,
                         end_time__gte=browse_model.end_time,
                         source__name=browse_layer_model.id
                     )
                     
                     # get "dim" parameter
                     dim = (isoformat(time_model.start_time) + "/" +
                            isoformat(time_model.end_time))
                     
                     # exit if a merged browse is found
                     if dim != (isoformat(browse_model.start_time) + "/" +
                            isoformat(browse_model.end_time)):
                         raise CommandError("Browse layer '%s' contains "
                                            "merged browses and exporting "
                                            "of cache is requested. Try "
                                            "without exporting the cache."
                                            % browse_layer_model.id)
                     
                     # get path to sqlite tileset and open it
                     ts = tileset.open(
                         get_tileset_path(browse_layer.id)
                     )
                     
                     for tile_desc in ts.get_tiles(
                         browse_layer.id, 
                         URN_TO_GRID[browse_layer.grid], dim=dim,
                         minzoom=browse_layer.highest_map_level,
                         maxzoom=browse_layer.lowest_map_level
                     ):
                         p.add_cache_file(*tile_desc)
                         
                     
             
             # save browse report xml and add it to the package
             p.add_browse_report(
                 serialize_browse_report(browse_report, pretty_print=True),
                 name="%s_%s_%s_%s.xml" % (
                     browse_report.browse_type,
                     browse_report.responsible_org_name,
                     browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                     uuid.uuid4().hex
                 )
             )