Ejemplo n.º 1
0
    def handle(self, *args, **kwargs):
        System.init()
        
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        logger.info("Starting browse export from command line.")

        browse_layer_id = kwargs.get("browse_layer_id")
        browse_type = kwargs.get("browse_type")
        if not browse_layer_id and not browse_type:
            logger.error("No browse layer or browse type was specified.")
            raise CommandError("No browse layer or browse type was specified.")
        elif browse_layer_id and browse_type:
            logger.error("Both browse layer and browse type were specified.")
            raise CommandError("Both browse layer and browse type were specified.")
        
        start = kwargs.get("start")
        end = kwargs.get("end")
        compression = kwargs.get("compression")
        export_cache = kwargs["export_cache"]
        output_path = kwargs.get("output_path")
        
        # parse start/end if given
        if start: 
            start = getDateTime(start)
        if end:
            end = getDateTime(end)
        
        if not output_path:
            output_path = package.generate_filename(compression)
        
        with package.create(output_path, compression) as p:
            # query the browse layer
            if browse_layer_id:
                try:
                    browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer '%s' does not exist" 
                                 % browse_layer_id)
                    raise CommandError("Browse layer '%s' does not exist" 
                                       % browse_layer_id)
            else:
                try:
                    browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer with browse type '%s' does "
                                 "not exist" % browse_type)
                    raise CommandError("Browse layer with browse type '%s' does "
                                       "not exist" % browse_type)
            
            browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
            p.set_browse_layer(
                serialize_browse_layers((browse_layer,), pretty_print=True)
            )
            
            # query browse reports; optionally filter for start/end time
            browse_reports_qs = BrowseReport.objects.all()
            
            # apply start/end filter
            if start and not end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
            elif end and not start:
                browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
            elif start and end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                             browses__end_time__lte=end)
            
            # use count annotation to exclude all browse reports with no browses
            browse_reports_qs = browse_reports_qs.annotate(
                browse_count=Count('browses')
            ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
            
            # iterate over all browse reports
            for browse_report_model in browse_reports_qs:
                browses_qs = Browse.objects.filter(
                    browse_report=browse_report_model
                )
                if start:
                    browses_qs = browses_qs.filter(start_time__gte=start)
                if end:
                    browses_qs = browses_qs.filter(end_time__lte=end)
                
                browse_report = browsereport_data.BrowseReport.from_model(
                    browse_report_model, browses_qs
                )
                
                # iterate over all browses in the query
                for browse, browse_model in izip(browse_report, browses_qs):
                    coverage_wrapper = System.getRegistry().getFromFactory(
                        "resources.coverages.wrappers.EOCoverageFactory",
                        {"obj_id": browse_model.coverage_id}
                    )
                    
                    # set the 
                    base_filename = browse_model.coverage_id
                    data_filename = base_filename + ".tif"
                    md_filename = base_filename + ".xml"
                    footprint_filename = base_filename + ".wkb"
                    
                    browse._file_name = data_filename
                    
                    # add optimized browse image to package
                    data_package = coverage_wrapper.getData()
                    data_package.prepareAccess()
                    browse_file_path = data_package.getGDALDatasetIdentifier()
                    with open(browse_file_path) as f:
                        p.add_browse(f, data_filename)
                        wkb = coverage_wrapper.getFootprint().wkb
                        p.add_footprint(footprint_filename, wkb)
                    
                    if export_cache:
                        time_model = mapcache_models.Time.objects.get(
                            start_time__lte=browse_model.start_time,
                            end_time__gte=browse_model.end_time,
                            source__name=browse_layer_model.id
                        )
                        
                        # get "dim" parameter
                        dim = (isotime(time_model.start_time) + "/" +
                               isotime(time_model.end_time))
                        
                        # exit if a merged browse is found
                        if dim != (isotime(browse_model.start_time) + "/" +
                               isotime(browse_model.end_time)):
                            logger.error("Browse layer '%s' contains "
                                         "merged browses and exporting "
                                         "of cache is requested. Try "
                                         "without exporting the cache."
                                         % browse_layer_model.id)
                            raise CommandError("Browse layer '%s' contains "
                                               "merged browses and exporting "
                                               "of cache is requested. Try "
                                               "without exporting the cache."
                                               % browse_layer_model.id)
                        
                        # get path to sqlite tileset and open it
                        ts = tileset.open(
                            get_tileset_path(browse_layer.browse_type)
                        )
                        
                        for tile_desc in ts.get_tiles(
                            browse_layer.id, 
                            URN_TO_GRID[browse_layer.grid], dim=dim,
                            minzoom=browse_layer.highest_map_level,
                            maxzoom=browse_layer.lowest_map_level
                        ):
                            p.add_cache_file(*tile_desc)
                            
                        
                
                # save browse report xml and add it to the package
                p.add_browse_report(
                    serialize_browse_report(browse_report, pretty_print=True),
                    name="%s_%s_%s_%s.xml" % (
                        browse_report.browse_type,
                        browse_report.responsible_org_name,
                        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                        uuid.uuid4().hex
                    )
                )

        logger.info("Successfully finished browse export from command line.")
Ejemplo n.º 2
0
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(
                        IngestBrowseFailureResult(browse.browse_identifier,
                                                  type(e).__name__, str(e)))

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type,
                                           config),
                          mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d." %
                        (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d." %
                        (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0],
                          miny=result.extent[1],
                          maxx=result.extent[2],
                          maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Ejemplo n.º 3
0
Archivo: imp.py Proyecto: EOX-A/ngeo-b
def import_browse_report(p, browse_report_file, browse_layer_model, crs,
                         seed_cache_levels, import_cache_levels, config):
    """
    """

    seed_areas = []

    report_result = IngestBrowseReportResult()

    browse_report = decode_browse_report(etree.parse(browse_report_file))
    browse_report_model = create_browse_report(browse_report,
                                               browse_layer_model)
    for browse in browse_report:
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:

                    result = import_browse(p, browse, browse_report_model,
                                           browse_layer_model, crs, seed_areas,
                                           config)
                    report_result.add(result)

                    transaction.commit()
                    transaction.commit(using="mapcache")

                except Exception, e:
                    logger.error("Failure during import of browse '%s'." %
                                 browse.browse_identifier)
                    logger.debug(traceback.format_exc() + "\n")
                    transaction.rollback()
                    transaction.rollback(using="mapcache")

                    report_result.add(IngestBrowseFailureResult(
                        browse.browse_identifier,
                        type(e).__name__, str(e))
                    )

                    continue

        tileset_name = browse_layer_model.id
        dim = isotime(browse.start_time) + "/" + isotime(browse.end_time)
        ts = tileset.open(get_tileset_path(browse_layer_model.browse_type, config), mode="w")

        grid = URN_TO_GRID[browse_layer_model.grid]
        tile_num = 0

        # import cache
        for minzoom, maxzoom in import_cache_levels:
            logger.info("Importing cached tiles from zoom level %d to %d."
                        % (minzoom, maxzoom))

            for x, y, z, f in p.get_cache_files(tileset_name, grid, dim):
                if z < minzoom or z > maxzoom:
                    continue

                ts.add_tile(tileset_name, grid, dim, x, y, z, f)
                tile_num += 1

        logger.info("Imported %d cached tiles." % tile_num)

        # seed cache
        for minzoom, maxzoom in seed_cache_levels:
            logger.info("Re-seeding tile cache from zoom level %d to %d."
                        % (minzoom, maxzoom))

            seed_mapcache(tileset=browse_layer_model.id,
                          grid=browse_layer_model.grid,
                          minx=result.extent[0], miny=result.extent[1],
                          maxx=result.extent[2], maxy=result.extent[3],
                          minzoom=minzoom,
                          maxzoom=maxzoom,
                          start_time=result.time_interval[0],
                          end_time=result.time_interval[1],
                          delete=False,
                          **get_mapcache_seed_config(config))

            logger.info("Successfully finished seeding.")
Ejemplo n.º 4
0
 def handle(self, *args, **kwargs):
     # parse command arguments
     self.verbosity = int(kwargs.get("verbosity", 1))
     traceback = kwargs.get("traceback", False)
     self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)
     
     browse_layer_id = kwargs.get("browse_layer_id")
     browse_type = kwargs.get("browse_type")
     if not browse_layer_id and not browse_type:
         raise CommandError("No browse layer or browse type was specified.")
     elif browse_layer_id and browse_type:
         raise CommandError("Both browse layer and browse type were specified.")
     
     start = kwargs.get("start")
     end = kwargs.get("end")
     compression = kwargs.get("compression")
     export_cache = kwargs["export_cache"]
     output_path = kwargs.get("output_path")
     
     # parse start/end if given
     if start: 
         start = getDateTime(start)
     if end:
         end = getDateTime(end)
     
     if not output_path:
         output_path = package.generate_filename(compression)
     
     with package.create(output_path, compression) as p:
         # query the browse layer
         if browse_layer_id:
             try:
                 browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer '%s' does not exist" 
                                    % browse_layer_id)
         else:
             try:
                 browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer with browse type '%s' does "
                                    "not exist" % browse_type)
         
         browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
         p.set_browse_layer(
             serialize_browse_layers((browse_layer,), pretty_print=True)
         )
         
         # query browse reports; optionally filter for start/end time
         browse_reports_qs = BrowseReport.objects.all()
         
         # apply start/end filter
         if start and not end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
         elif end and not start:
             browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
         elif start and end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                          browses__end_time__lte=end)
         
         # use count annotation to exclude all browse reports with no browses
         browse_reports_qs = browse_reports_qs.annotate(
             browse_count=Count('browses')
         ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
         
         # iterate over all browse reports
         for browse_report_model in browse_reports_qs:
             browses_qs = Browse.objects.filter(
                 browse_report=browse_report_model
             )
             if start:
                 browses_qs = browses_qs.filter(start_time__gte=start)
             if end:
                 browses_qs = browses_qs.filter(end_time__lte=end)
             
             browse_report = browsereport_data.BrowseReport.from_model(
                 browse_report_model, browses_qs
             )
             
             # iterate over all browses in the query
             for browse, browse_model in izip(browse_report, browses_qs):
                 coverage = eoxs_models.RectifiedDataset.objects.get(
                     identifier=browse_model.coverage_id
                 )
                 
                 # set the 
                 base_filename = browse_model.coverage_id
                 data_filename = base_filename + ".tif"
                 md_filename = base_filename + ".xml"
                 footprint_filename = base_filename + ".wkb"
                 
                 browse._file_name = data_filename
                 
                 # add optimized browse image to package
                 data_item = coverage.data_items.get(
                     semantic__startswith="bands"
                 )
                 browse_file_path = data_item.location
                 
                 with open(browse_file_path) as f:
                     p.add_browse(f, data_filename)
                     wkb = coverage.footprint.wkb
                     p.add_footprint(footprint_filename, wkb)
                 
                 if export_cache:
                     time_model = mapcache_models.Time.objects.get(
                         start_time__lte=browse_model.start_time,
                         end_time__gte=browse_model.end_time,
                         source__name=browse_layer_model.id
                     )
                     
                     # get "dim" parameter
                     dim = (isoformat(time_model.start_time) + "/" +
                            isoformat(time_model.end_time))
                     
                     # exit if a merged browse is found
                     if dim != (isoformat(browse_model.start_time) + "/" +
                            isoformat(browse_model.end_time)):
                         raise CommandError("Browse layer '%s' contains "
                                            "merged browses and exporting "
                                            "of cache is requested. Try "
                                            "without exporting the cache."
                                            % browse_layer_model.id)
                     
                     # get path to sqlite tileset and open it
                     ts = tileset.open(
                         get_tileset_path(browse_layer.id)
                     )
                     
                     for tile_desc in ts.get_tiles(
                         browse_layer.id, 
                         URN_TO_GRID[browse_layer.grid], dim=dim,
                         minzoom=browse_layer.highest_map_level,
                         maxzoom=browse_layer.lowest_map_level
                     ):
                         p.add_cache_file(*tile_desc)
                         
                     
             
             # save browse report xml and add it to the package
             p.add_browse_report(
                 serialize_browse_report(browse_report, pretty_print=True),
                 name="%s_%s_%s_%s.xml" % (
                     browse_report.browse_type,
                     browse_report.responsible_org_name,
                     browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                     uuid.uuid4().hex
                 )
             )