示例#1
0
文件: __init__.py 项目: EOX-A/ngeo-b
def _save_result_browse_report(browse_report, path):
    "Render the browse report to the template and save it under the given path."

    if isdir(path):
        # generate a filename
        path = join(
            path,
            _valid_path(
                "%s_%s_%s_%s.xml"
                % (
                    browse_report.browse_type,
                    browse_report.responsible_org_name,
                    browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                    datetime.utcnow().strftime("%Y%m%d%H%M%S%f"),
                )
            ),
        )

    if exists(path):
        logger.warn("Could not write result browse report as the file '%s' " "already exists.")
        return

    safe_makedirs(dirname(path))

    with open(path, "w+") as f:
        serialize_browse_report(browse_report, f)
示例#2
0
def _save_result_browse_report(browse_report, path):
    "Render the browse report to the template and save it under the given path."

    if isdir(path):
        # generate a filename
        path = join(path, _valid_path("%s_%s_%s_%s.xml" % (
            browse_report.browse_type, browse_report.responsible_org_name,
            browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
            datetime.utcnow().strftime("%Y%m%d%H%M%S%f")
        )))

    if exists(path):
        logger.warn("Could not write result browse report as the file '%s' "
                    "already exists.")
        return

    safe_makedirs(dirname(path))

    with open(path, "w+") as f:
        serialize_browse_report(browse_report, f)
def handle_file(xml_filename, image_filename, output_filename, pretty_print):
    # parse and decode input GSC file
    doc = etree.parse(xml_filename)
    decoded = gsc_report_decoder(doc)

    # prepare and initialize browse report and browse
    def _prepare_browse(decoded_browse):
        base, ext = splitext(image_filename)
        decoded_browse["image_type"] = EXT_TO_IMAGE_TYPE[ext]
        decoded_browse["file_name"] = image_filename

        if "browse_identifier" not in decoded_browse:
            decoded_browse["browse_identifier"] = base
        
        return ModelInGeotiffBrowse(**decoded_browse)
    
    decoded["browses"] = map(_prepare_browse, decoded["browses"])

    browse_report = BrowseReport(**decoded)

    # serialize the report to the file
    with open(output_filename, "w+") as f:
        serialize_browse_report(browse_report, f, pretty_print=pretty_print)
示例#4
0
    def handle(self, *args, **kwargs):
        System.init()
        
        # parse command arguments
        self.verbosity = int(kwargs.get("verbosity", 1))
        traceback = kwargs.get("traceback", False)
        self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)

        logger.info("Starting browse export from command line.")

        browse_layer_id = kwargs.get("browse_layer_id")
        browse_type = kwargs.get("browse_type")
        if not browse_layer_id and not browse_type:
            logger.error("No browse layer or browse type was specified.")
            raise CommandError("No browse layer or browse type was specified.")
        elif browse_layer_id and browse_type:
            logger.error("Both browse layer and browse type were specified.")
            raise CommandError("Both browse layer and browse type were specified.")
        
        start = kwargs.get("start")
        end = kwargs.get("end")
        compression = kwargs.get("compression")
        export_cache = kwargs["export_cache"]
        output_path = kwargs.get("output_path")
        
        # parse start/end if given
        if start: 
            start = getDateTime(start)
        if end:
            end = getDateTime(end)
        
        if not output_path:
            output_path = package.generate_filename(compression)
        
        with package.create(output_path, compression) as p:
            # query the browse layer
            if browse_layer_id:
                try:
                    browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer '%s' does not exist" 
                                 % browse_layer_id)
                    raise CommandError("Browse layer '%s' does not exist" 
                                       % browse_layer_id)
            else:
                try:
                    browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
                except BrowseLayer.DoesNotExist:
                    logger.error("Browse layer with browse type '%s' does "
                                 "not exist" % browse_type)
                    raise CommandError("Browse layer with browse type '%s' does "
                                       "not exist" % browse_type)
            
            browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
            p.set_browse_layer(
                serialize_browse_layers((browse_layer,), pretty_print=True)
            )
            
            # query browse reports; optionally filter for start/end time
            browse_reports_qs = BrowseReport.objects.all()
            
            # apply start/end filter
            if start and not end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
            elif end and not start:
                browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
            elif start and end:
                browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                             browses__end_time__lte=end)
            
            # use count annotation to exclude all browse reports with no browses
            browse_reports_qs = browse_reports_qs.annotate(
                browse_count=Count('browses')
            ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
            
            # iterate over all browse reports
            for browse_report_model in browse_reports_qs:
                browses_qs = Browse.objects.filter(
                    browse_report=browse_report_model
                )
                if start:
                    browses_qs = browses_qs.filter(start_time__gte=start)
                if end:
                    browses_qs = browses_qs.filter(end_time__lte=end)
                
                browse_report = browsereport_data.BrowseReport.from_model(
                    browse_report_model, browses_qs
                )
                
                # iterate over all browses in the query
                for browse, browse_model in izip(browse_report, browses_qs):
                    coverage_wrapper = System.getRegistry().getFromFactory(
                        "resources.coverages.wrappers.EOCoverageFactory",
                        {"obj_id": browse_model.coverage_id}
                    )
                    
                    # set the 
                    base_filename = browse_model.coverage_id
                    data_filename = base_filename + ".tif"
                    md_filename = base_filename + ".xml"
                    footprint_filename = base_filename + ".wkb"
                    
                    browse._file_name = data_filename
                    
                    # add optimized browse image to package
                    data_package = coverage_wrapper.getData()
                    data_package.prepareAccess()
                    browse_file_path = data_package.getGDALDatasetIdentifier()
                    with open(browse_file_path) as f:
                        p.add_browse(f, data_filename)
                        wkb = coverage_wrapper.getFootprint().wkb
                        p.add_footprint(footprint_filename, wkb)
                    
                    if export_cache:
                        time_model = mapcache_models.Time.objects.get(
                            start_time__lte=browse_model.start_time,
                            end_time__gte=browse_model.end_time,
                            source__name=browse_layer_model.id
                        )
                        
                        # get "dim" parameter
                        dim = (isotime(time_model.start_time) + "/" +
                               isotime(time_model.end_time))
                        
                        # exit if a merged browse is found
                        if dim != (isotime(browse_model.start_time) + "/" +
                               isotime(browse_model.end_time)):
                            logger.error("Browse layer '%s' contains "
                                         "merged browses and exporting "
                                         "of cache is requested. Try "
                                         "without exporting the cache."
                                         % browse_layer_model.id)
                            raise CommandError("Browse layer '%s' contains "
                                               "merged browses and exporting "
                                               "of cache is requested. Try "
                                               "without exporting the cache."
                                               % browse_layer_model.id)
                        
                        # get path to sqlite tileset and open it
                        ts = tileset.open(
                            get_tileset_path(browse_layer.browse_type)
                        )
                        
                        for tile_desc in ts.get_tiles(
                            browse_layer.id, 
                            URN_TO_GRID[browse_layer.grid], dim=dim,
                            minzoom=browse_layer.highest_map_level,
                            maxzoom=browse_layer.lowest_map_level
                        ):
                            p.add_cache_file(*tile_desc)
                            
                        
                
                # save browse report xml and add it to the package
                p.add_browse_report(
                    serialize_browse_report(browse_report, pretty_print=True),
                    name="%s_%s_%s_%s.xml" % (
                        browse_report.browse_type,
                        browse_report.responsible_org_name,
                        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                        uuid.uuid4().hex
                    )
                )

        logger.info("Successfully finished browse export from command line.")
示例#5
0
 def handle(self, *args, **kwargs):
     # parse command arguments
     self.verbosity = int(kwargs.get("verbosity", 1))
     traceback = kwargs.get("traceback", False)
     self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback)
     
     browse_layer_id = kwargs.get("browse_layer_id")
     browse_type = kwargs.get("browse_type")
     if not browse_layer_id and not browse_type:
         raise CommandError("No browse layer or browse type was specified.")
     elif browse_layer_id and browse_type:
         raise CommandError("Both browse layer and browse type were specified.")
     
     start = kwargs.get("start")
     end = kwargs.get("end")
     compression = kwargs.get("compression")
     export_cache = kwargs["export_cache"]
     output_path = kwargs.get("output_path")
     
     # parse start/end if given
     if start: 
         start = getDateTime(start)
     if end:
         end = getDateTime(end)
     
     if not output_path:
         output_path = package.generate_filename(compression)
     
     with package.create(output_path, compression) as p:
         # query the browse layer
         if browse_layer_id:
             try:
                 browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer '%s' does not exist" 
                                    % browse_layer_id)
         else:
             try:
                 browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type)
             except BrowseLayer.DoesNotExist:
                 raise CommandError("Browse layer with browse type '%s' does "
                                    "not exist" % browse_type)
         
         browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model)
         p.set_browse_layer(
             serialize_browse_layers((browse_layer,), pretty_print=True)
         )
         
         # query browse reports; optionally filter for start/end time
         browse_reports_qs = BrowseReport.objects.all()
         
         # apply start/end filter
         if start and not end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start)
         elif end and not start:
             browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end)
         elif start and end:
             browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, 
                                                          browses__end_time__lte=end)
         
         # use count annotation to exclude all browse reports with no browses
         browse_reports_qs = browse_reports_qs.annotate(
             browse_count=Count('browses')
         ).filter(browse_layer=browse_layer_model, browse_count__gt=0)
         
         # iterate over all browse reports
         for browse_report_model in browse_reports_qs:
             browses_qs = Browse.objects.filter(
                 browse_report=browse_report_model
             )
             if start:
                 browses_qs = browses_qs.filter(start_time__gte=start)
             if end:
                 browses_qs = browses_qs.filter(end_time__lte=end)
             
             browse_report = browsereport_data.BrowseReport.from_model(
                 browse_report_model, browses_qs
             )
             
             # iterate over all browses in the query
             for browse, browse_model in izip(browse_report, browses_qs):
                 coverage = eoxs_models.RectifiedDataset.objects.get(
                     identifier=browse_model.coverage_id
                 )
                 
                 # set the 
                 base_filename = browse_model.coverage_id
                 data_filename = base_filename + ".tif"
                 md_filename = base_filename + ".xml"
                 footprint_filename = base_filename + ".wkb"
                 
                 browse._file_name = data_filename
                 
                 # add optimized browse image to package
                 data_item = coverage.data_items.get(
                     semantic__startswith="bands"
                 )
                 browse_file_path = data_item.location
                 
                 with open(browse_file_path) as f:
                     p.add_browse(f, data_filename)
                     wkb = coverage.footprint.wkb
                     p.add_footprint(footprint_filename, wkb)
                 
                 if export_cache:
                     time_model = mapcache_models.Time.objects.get(
                         start_time__lte=browse_model.start_time,
                         end_time__gte=browse_model.end_time,
                         source__name=browse_layer_model.id
                     )
                     
                     # get "dim" parameter
                     dim = (isoformat(time_model.start_time) + "/" +
                            isoformat(time_model.end_time))
                     
                     # exit if a merged browse is found
                     if dim != (isoformat(browse_model.start_time) + "/" +
                            isoformat(browse_model.end_time)):
                         raise CommandError("Browse layer '%s' contains "
                                            "merged browses and exporting "
                                            "of cache is requested. Try "
                                            "without exporting the cache."
                                            % browse_layer_model.id)
                     
                     # get path to sqlite tileset and open it
                     ts = tileset.open(
                         get_tileset_path(browse_layer.id)
                     )
                     
                     for tile_desc in ts.get_tiles(
                         browse_layer.id, 
                         URN_TO_GRID[browse_layer.grid], dim=dim,
                         minzoom=browse_layer.highest_map_level,
                         maxzoom=browse_layer.lowest_map_level
                     ):
                         p.add_cache_file(*tile_desc)
                         
                     
             
             # save browse report xml and add it to the package
             p.add_browse_report(
                 serialize_browse_report(browse_report, pretty_print=True),
                 name="%s_%s_%s_%s.xml" % (
                     browse_report.browse_type,
                     browse_report.responsible_org_name,
                     browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
                     uuid.uuid4().hex
                 )
             )