예제 #1
0
def main(args):
    # create a parser instance
    parser = argparse.ArgumentParser(
        add_help=True,
        fromfile_prefix_chars="@",
        argument_default=argparse.SUPPRESS,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )

    parser.description = textwrap.dedent(
        """\
    Takes <infile> raster data and pre-processes it into 
    <outfiles_basename>.tif, a GeoTIFF converted to RGB using default internal 
    tiling, internal overviews, no compression, and 0 as no-data value, and 
    <outfiles_basename>.xml, a EOxServer simple XML EO metadata file.

    The outfiles are ready to be used with the eoxs_register command of an 
    EOxServer instance.

    !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    !!!Caution, you will loose data quality!!!
    !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    The data quality of <outfiles_basename>.tif is most likely different from 
    <infile> because of the pre-processing applied. Reasons include 
    re-projection, compression, conversion to RGB, indexing, etc.
    
    Examples:
    
    # basic usage with no creation of metadata
    eoxserver-preprocess.py --no-metadata input.tif
    
    # basic usage with creation of metadata with specific basename
    eoxserver-preprocess.py --coverage-id=a --begin-time=2008-03-01T13:00:00Z \\ 
                            --end-time=2008-03-01T13:00:00Z input.tif output
                            
    # with RGBA band selection
    eoxserver-preprocess.py --bands 1:0:255,2:0:255,3:0:100,4 --rgba \\
                            --no-metadata input.tif

    # with DEFLATE compression and color index from a palette file
    eoxserver-preprocess.py  --compression=DEFLATE --zlevel=2 --indexed \\ 
                             --pct palette.vrt --no-metadata input.tif
    
    # (re-)setting the extent of the file
    eoxserver-preprocess.py --extent 0,0,10,10,4326 --no-metadata input.tif
    
    # Using GCPs
    eoxserver-preprocess.py --no-metadata --gcp 0,0,10,10 --gcp 2560,0,50,10 \\
                            --gcp 0,1920,10,50 --gcp 2560,1920,50,50 \\ 
                            --georef-crs 4326 input.tif
                            
    # reading arguments from a file (1 line per argument), with overrides
    eoxserver-preprocess.py @args.txt --crs=3035 --no-tiling input.tif
    """
    )

    # ===========================================================================
    # Metadata parsing group
    # ===========================================================================

    # TODO: won't work with mutual exclusive groups. Bug in Argparse?
    # md_g = parser.add_mutually_exclusive_group(required=True)

    # md_g.add_argument("--no-metadata", dest="generate_metadata",
    #                  action="store_false",
    #                  help="Explicitly turn off the creation of a metadata "
    #                       "file.")

    # md_g_data = md_g.add_argument_group()

    # md_g_data.add_argument("--begin-time", dest="begin_time",
    #                       type=_parse_datetime,
    #                       help="The ISO 8601 timestamp of the begin time.")
    # md_g_data.add_argument("--end-time", dest="end_time",
    #                       type=_parse_datetime,
    #                       help="The ISO 8601 timestamp of the end time.")
    # md_g_data.add_argument("--coverage-id", dest="coverage_id",
    #                       type=_parse_coverage_id,
    #                       help="The ID of the coverage, must be a valid "
    #                            "NCName.")

    # should be mutually exclusive
    parser.add_argument(
        "--no-metadata",
        dest="generate_metadata",
        action="store_false",
        help="Explicitly turn off the creation of a metadata " "file.",
    )
    parser.add_argument(
        "--begin-time", dest="begin_time", type=_parse_datetime, help="The ISO 8601 timestamp of the begin time."
    )
    parser.add_argument(
        "--end-time", dest="end_time", type=_parse_datetime, help="The ISO 8601 timestamp of the end time."
    )
    parser.add_argument(
        "--coverage-id",
        dest="coverage_id",
        type=_parse_coverage_id,
        help="The ID of the coverage, must be a valid NCName.",
    )

    # ===========================================================================
    # Georeference group
    # ===========================================================================

    georef_g = parser.add_mutually_exclusive_group()
    georef_g.add_argument(
        "--extent", dest="extent", type=_parse_extent, help="The extent of the dataset, as a 4-tuple of " "floats."
    )
    # georef_g.add_argument("--footprint", dest="footprint",
    #                      type=_parse_footprint,
    #                      help="The footprint of the dataset, as a Polygon WKT.")
    georef_g.add_argument(
        "--gcp",
        dest="gcps",
        type=_parse_gcp,
        action="append",
        help="A Ground Control Point in the format: " "'pixel,line,easting,northing[,elevation]'.",
    )

    parser.add_argument("--georef-crs", dest="georef_crs")

    # ===========================================================================
    # Arbitraries
    # ===========================================================================

    parser.add_argument("--crs", dest="crs", type=int, help="The desired output CRS ID of the dataset.")
    parser.add_argument(
        "--no-tiling",
        dest="tiling",
        action="store_false",
        default=True,
        help="Explicitly turn of the tiling of the output " "dataset.",
    )
    parser.add_argument(
        "--no-overviews",
        dest="overviews",
        action="store_false",
        default=True,
        help="Explicitly turn of the creation of overview " "images of the output dataset.",
    )

    # ===========================================================================
    # Bands group
    # ===========================================================================

    bands_g = parser.add_mutually_exclusive_group()

    bands_g.add_argument(
        "--rgba",
        dest="bandmode",
        action="store_const",
        const=RGBA,
        help="Convert the image to RGBA, using the first four " "bands.",
    )
    bands_g.add_argument(
        "--orig-bands",
        dest="bandmode",
        action="store_const",
        const=ORIG_BANDS,
        help="Explicitly keep all original bands.",
    )

    parser.add_argument(
        "--bands",
        dest="bands",
        type=_parse_bands,
        help="A comma separated list of bands with optional "
        "subsets in the form: 'no[:low:high]'. Either "
        "three bands, or four when --rgba is requested.",
    )

    parser.add_argument(
        "--footprint-alpha",
        dest="footprint_alpha",
        action="store_true",
        default=False,
        help="A comma separated list of bands with optional "
        "subsets in the form: 'no[:low:high]'. Either "
        "three bands, or four when --rgba is requested.",
    )

    parser.add_argument(
        "--compression",
        dest="compression",
        choices=GeoTIFFFormatSelection.SUPPORTED_COMPRESSIONS,
        help="The desired compression technique.",
    )
    parser.add_argument(
        "--jpeg-quality",
        dest="jpeg_quality",
        type=int,
        help="The JPEG algorithm quality when JPEG compression " "is requested. Default is 75.",
    )
    parser.add_argument(
        "--zlevel", dest="zlevel", type=int, help="The zlevel quality setting when DEFLATE " "compression is requested."
    )

    indexed_group = parser.add_argument_group()

    # TODO: pct depends on indexed

    parser.add_argument("--indexed", dest="color_index", action="store_true", help="Create a paletted (indexed) image.")
    parser.add_argument(
        "--pct",
        dest="palette_file",
        help="Use the given file as color palette. If not " "given, a new palette will be calculated.",
    )

    parser.add_argument(
        "--no-data-value",
        dest="no_data_value",
        type=_parse_nodata_values,
        help="Either one, or a list of no-data values.",
    )

    parser.add_argument(
        "--co",
        dest="creation_options",
        type=int,
        action="append",
        help="Additional GDAL dataset creation options. " "See http://www.gdal.org/frmt_gtiff.html",
    )

    parser.add_argument("--traceback", action="store_true", default=False)

    parser.add_argument(
        "--force", "-f", dest="force", action="store_true", default=False, help="Override files, if they already exist."
    )

    parser.add_argument(
        "--verbosity", "-v", dest="verbosity", type=int, default=1, help="Set the verbosity (0, 1, 2). Default is 1."
    )

    parser.add_argument("input_filename", metavar="infile", nargs=1, help="The input raster file to be processed.")
    parser.add_argument(
        "output_basename",
        metavar="outfiles_basename",
        nargs="?",
        help="The base name of the outputfile(s) to be " "generated.",
    )

    values = vars(parser.parse_args(args))

    # check metadata values
    if "generate_metadata" in values and ("begin_time" in values or "end_time" in values or "coverage_id" in values):
        parser.error("--no-metadata is mutually exclusive with --begin-time, " "--end-time and --coverage-id.")

    elif "generate_metadata" not in values and not (
        "begin_time" in values and "end_time" in values and "coverage_id" in values
    ):
        parser.error("Enter the full metadata with --begin-time, --end-time " "and --coverage-id.")

    # hack to flatten the list
    values["input_filename"] = values["input_filename"][0]

    georef_crs = values.pop("georef_crs", None)

    if "extent" in values:
        values["geo_reference"] = Extent(*values.pop("extent"), srid=georef_crs or 4326)

    if "gcps" in values:
        values["geo_reference"] = GCPList(values.pop("gcps"), georef_crs or 4326)

    if "palette_file" in values and not "color_index" in values:
        parser.error("--pct can only be used with --indexed")

    # Extract format and execution specific values
    format_values = _extract(values, ("tiling", "compression", "jpeg_quality", "zlevel", "creation_options"))
    exec_values = _extract(values, ("input_filename", "geo_reference", "generate_metadata"))
    other_values = _extract(values, ("traceback",))
    metadata_values = _extract(values, ("coverage_id", "begin_time", "end_time"))

    force = values.pop("force", True)
    verbosity = values.pop("verbosity")
    output_basename = values.pop("output_basename", None)
    input_filename = exec_values.get("input_filename")

    # setup logging
    if verbosity > 0:
        if verbosity == 1:
            level = logging.WARN
        elif verbosity == 2:
            level = logging.INFO
        elif verbosity >= 3:
            level = logging.DEBUG
        logging.basicConfig(format="%(levelname)s: %(message)s", stream=sys.stderr, level=level)

    try:
        # create a format selection
        format_selection = get_format_selection("GTiff", **format_values)

        # TODO: make 'tif' dependant on format selection
        # check files exist
        if not output_basename:
            output_filename = splitext(input_filename)[0] + "_proc" + format_selection.extension
            output_md_filename = splitext(input_filename)[0] + "_proc.xml"

        else:
            output_filename = output_basename + format_selection.extension
            output_md_filename = output_basename + ".xml"

        if not force:
            check_file_existence(output_filename)

        exec_values["output_filename"] = output_filename

        # create and run the preprocessor
        preprocessor = WMSPreProcessor(format_selection, **values)
        result = preprocessor.process(**exec_values)

        if exec_values.get("generate_metadata", True):
            if not force:
                check_file_existence(output_md_filename)

            encoder = NativeMetadataFormatEncoder()
            xml = DOMElementToXML(
                encoder.encodeMetadata(
                    metadata_values["coverage_id"],
                    metadata_values["begin_time"],
                    metadata_values["end_time"],
                    result.footprint_raw,
                )
            )

            with open(output_md_filename, "w+") as f:
                f.write(xml)

    except Exception, e:
        # error wrapping
        if other_values["traceback"]:
            traceback.print_exc()
        sys.stderr.write("%s: %s\n" % (type(e).__name__, str(e)))
예제 #2
0
파일: __init__.py 프로젝트: EOX-A/ngeo-b
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None):
    """ Ingests a browse report. reraise_exceptions if errors shall be handled
    externally
    """

    # initialize the EOxServer system/registry/configuration
    System.init()

    try:
        # get the according browse layer
        browse_type = parsed_browse_report.browse_type
        browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type)
    except models.BrowseLayer.DoesNotExist:
        logger.warn("Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type)
        raise IngestionException(
            "Browse layer with browse type '%s' does not " "exist." % parsed_browse_report.browse_type
        )

    # generate a browse report model
    browse_report = create_browse_report(parsed_browse_report, browse_layer)

    # initialize the preprocessor with configuration values
    crs = None
    if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible":
        crs = "EPSG:3857"
    elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad":
        crs = "EPSG:4326"

    logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid))

    # create the required preprocessor/format selection
    format_selection = get_format_selection("GTiff", **get_format_config(config))
    if do_preprocessing:
        # add config parameters and custom params
        params = get_optimization_config(config)

        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"

        # add band selection
        if browse_layer.r_band is not None and browse_layer.g_band is not None and browse_layer.b_band is not None:

            bands = [
                (browse_layer.r_band, rad_min, rad_max),
                (browse_layer.g_band, rad_min, rad_max),
                (browse_layer.b_band, rad_min, rad_max),
            ]

            if params["bandmode"] == RGBA:
                # RGBA
                bands.append((0, 0, 0))

            params["bands"] = bands

        preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params)
    else:
        preprocessor = None  # TODO: CopyPreprocessor

    report_result = IngestBrowseReportResult()

    succeded = []
    failed = []

    timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f")
    browse_dirname = _valid_path(
        "%s_%s_%s_%s"
        % (
            browse_type,
            browse_report.responsible_org_name,
            browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
            timestamp,
        )
    )
    success_dir = join(get_success_dir(config), browse_dirname)
    failure_dir = join(get_failure_dir(config), browse_dirname)

    if exists(success_dir):
        logger.warn("Success directory '%s' already exists.")
    else:
        makedirs(success_dir)
    if exists(failure_dir):
        logger.warn("Failure directory '%s' already exists.")
    else:
        makedirs(failure_dir)

    # iterate over all browses in the browse report
    for parsed_browse in parsed_browse_report:
        # transaction management per browse
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:
                    seed_areas = []
                    # try ingest a single browse and log success
                    result = ingest_browse(
                        parsed_browse,
                        browse_report,
                        browse_layer,
                        preprocessor,
                        crs,
                        success_dir,
                        failure_dir,
                        seed_areas,
                        config=config,
                    )

                    report_result.add(result)
                    succeded.append(parsed_browse)

                    # commit here to allow seeding
                    transaction.commit()
                    transaction.commit(using="mapcache")

                    logger.info("Committed changes to database.")

                    for minx, miny, maxx, maxy, start_time, end_time in seed_areas:
                        try:

                            # seed MapCache synchronously
                            # TODO: maybe replace this with an async solution
                            seed_mapcache(
                                tileset=browse_layer.id,
                                grid=browse_layer.grid,
                                minx=minx,
                                miny=miny,
                                maxx=maxx,
                                maxy=maxy,
                                minzoom=browse_layer.lowest_map_level,
                                maxzoom=browse_layer.highest_map_level,
                                start_time=start_time,
                                end_time=end_time,
                                delete=False,
                                **get_mapcache_seed_config(config)
                            )
                            logger.info("Successfully finished seeding.")

                        except Exception, e:
                            logger.warn("Seeding failed: %s" % str(e))

                    # log ingestions for report generation
                    # date/browseType/browseLayerId/start/end
                    report_logger.info(
                        "/\\/\\".join(
                            (
                                datetime.utcnow().isoformat("T") + "Z",
                                parsed_browse_report.browse_type,
                                browse_layer.id,
                                (
                                    parsed_browse.start_time.replace(tzinfo=None) - parsed_browse.start_time.utcoffset()
                                ).isoformat("T")
                                + "Z",
                                (
                                    parsed_browse.end_time.replace(tzinfo=None) - parsed_browse.end_time.utcoffset()
                                ).isoformat("T")
                                + "Z",
                            )
                        )
                    )

                except Exception, e:
                    # report error
                    logger.error("Failure during ingestion of browse '%s'." % parsed_browse.browse_identifier)
                    logger.error("Exception was '%s': %s" % (type(e).__name__, str(e)))
                    logger.debug(traceback.format_exc() + "\n")

                    # undo latest changes, append the failure and continue
                    report_result.add(
                        IngestBrowseFailureResult(
                            parsed_browse.browse_identifier, getattr(e, "code", None) or type(e).__name__, str(e)
                        )
                    )
                    failed.append(parsed_browse)

                    transaction.rollback()
                    transaction.rollback(using="mapcache")
예제 #3
0
파일: __init__.py 프로젝트: baloola/ngeo-b
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None):
    """ Ingests a browse report. reraise_exceptions if errors shall be handled
    externally
    """

    # initialize the EOxServer system/registry/configuration
    System.init()

    try:
        # get the according browse layer
        browse_type = parsed_browse_report.browse_type
        browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type)
    except models.BrowseLayer.DoesNotExist:
        logger.warn("Browse layer with browse type '%s' does not "
                    "exist." % parsed_browse_report.browse_type)
        raise IngestionException("Browse layer with browse type '%s' does not "
                                 "exist." % parsed_browse_report.browse_type)

    # generate a browse report model
    browse_report = create_browse_report(parsed_browse_report, browse_layer)

    # initialize the preprocessor with configuration values
    crs = None
    if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible":
        crs = "EPSG:3857"
    elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad":
        crs = "EPSG:4326"

    logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid))

    # create the required preprocessor/format selection
    format_selection = get_format_selection("GTiff",
                                            **get_format_config(config))
    if do_preprocessing:
        # add config parameters and custom params
        params = get_optimization_config(config)

        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"

        # add band selection
        if (browse_layer.r_band is not None and
            browse_layer.g_band is not None and
            browse_layer.b_band is not None):

            bands = [(browse_layer.r_band, rad_min, rad_max),
                     (browse_layer.g_band, rad_min, rad_max),
                     (browse_layer.b_band, rad_min, rad_max)]

            if params["bandmode"] == RGBA:
                # RGBA
                bands.append((0, 0, 0))

            params["bands"] = bands

        preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params)
    else:
        preprocessor = None # TODO: CopyPreprocessor

    report_result = IngestBrowseReportResult()

    succeded = []
    failed = []

    timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f")
    browse_dirname = _valid_path("%s_%s_%s_%s" % (
        browse_type, browse_report.responsible_org_name,
        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
        timestamp
    ))
    success_dir = join(get_success_dir(config), browse_dirname)
    failure_dir = join(get_failure_dir(config), browse_dirname)

    if exists(success_dir):
        logger.warn("Success directory '%s' already exists.")
    else:
        makedirs(success_dir)
    if exists(failure_dir):
        logger.warn("Failure directory '%s' already exists.")
    else:
        makedirs(failure_dir)

    # iterate over all browses in the browse report
    for parsed_browse in parsed_browse_report:
        # transaction management per browse
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:
                    seed_areas = []
                    # try ingest a single browse and log success
                    result = ingest_browse(parsed_browse, browse_report,
                                           browse_layer, preprocessor, crs,
                                           success_dir, failure_dir,
                                           seed_areas, config=config)

                    report_result.add(result)
                    succeded.append(parsed_browse)

                    # commit here to allow seeding
                    transaction.commit()
                    transaction.commit(using="mapcache")

                    logger.info("Committed changes to database.")

                    for minx, miny, maxx, maxy, start_time, end_time in seed_areas:
                        try:

                            # seed MapCache synchronously
                            # TODO: maybe replace this with an async solution
                            seed_mapcache(tileset=browse_layer.id,
                                          grid=browse_layer.grid,
                                          minx=minx, miny=miny,
                                          maxx=maxx, maxy=maxy,
                                          minzoom=browse_layer.lowest_map_level,
                                          maxzoom=browse_layer.highest_map_level,
                                          start_time=start_time,
                                          end_time=end_time,
                                          delete=False,
                                          **get_mapcache_seed_config(config))
                            logger.info("Successfully finished seeding.")

                        except Exception, e:
                            logger.warn("Seeding failed: %s" % str(e))

                    # log ingestions for report generation
                    # date/browseType/browseLayerId/start/end
                    report_logger.info("/\\/\\".join((
                        datetime.utcnow().isoformat("T") + "Z",
                        parsed_browse_report.browse_type,
                        browse_layer.id,
                        (parsed_browse.start_time.replace(tzinfo=None)-parsed_browse.start_time.utcoffset()).isoformat("T") + "Z",
                        (parsed_browse.end_time.replace(tzinfo=None)-parsed_browse.end_time.utcoffset()).isoformat("T") + "Z"
                    )))

                except Exception, e:
                    # report error
                    logger.error("Failure during ingestion of browse '%s'." %
                                 parsed_browse.browse_identifier)
                    logger.error("Exception was '%s': %s" % (type(e).__name__, str(e)))
                    logger.debug(traceback.format_exc() + "\n")

                    # undo latest changes, append the failure and continue
                    report_result.add(IngestBrowseFailureResult(
                        parsed_browse.browse_identifier,
                        getattr(e, "code", None) or type(e).__name__, str(e))
                    )
                    failed.append(parsed_browse)

                    transaction.rollback()
                    transaction.rollback(using="mapcache")
예제 #4
0
def main(args):
    # create a parser instance
    parser = argparse.ArgumentParser(add_help=True, fromfile_prefix_chars='@',
                                     argument_default=argparse.SUPPRESS,
                                     formatter_class=argparse.RawDescriptionHelpFormatter)
    
    parser.description = textwrap.dedent("""\
    Takes <infile> raster data and pre-processes it into 
    <outfiles_basename>.tif, a GeoTIFF converted to RGB using default internal 
    tiling, internal overviews, no compression, and 0 as no-data value, and 
    <outfiles_basename>.xml, a EOxServer simple XML EO metadata file.

    The outfiles are ready to be used with the eoxs_register command of an 
    EOxServer instance.

    !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    !!!Caution, you will loose data quality!!!
    !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    The data quality of <outfiles_basename>.tif is most likely different from 
    <infile> because of the pre-processing applied. Reasons include 
    re-projection, compression, conversion to RGB, indexing, etc.
    
    Examples:
    
    # basic usage with no creation of metadata
    eoxserver-preprocess.py --no-metadata input.tif
    
    # basic usage with creation of metadata with specific basename
    eoxserver-preprocess.py --coverage-id=a --begin-time=2008-03-01T13:00:00Z \\ 
                            --end-time=2008-03-01T13:00:00Z input.tif output
                            
    # with RGBA band selection
    eoxserver-preprocess.py --bands 1:0:255,2:0:255,3:0:100,4 --rgba \\
                            --no-metadata input.tif

    # with DEFLATE compression and color index from a palette file
    eoxserver-preprocess.py  --compression=DEFLATE --zlevel=2 --indexed \\ 
                             --pct palette.vrt --no-metadata input.tif
    
    # (re-)setting the extent of the file
    eoxserver-preprocess.py --extent 0,0,10,10,4326 --no-metadata input.tif
    
    # Using GCPs
    eoxserver-preprocess.py --no-metadata --gcp 0,0,10,10 --gcp 2560,0,50,10 \\
                            --gcp 0,1920,10,50 --gcp 2560,1920,50,50 \\ 
                            --georef-crs 4326 input.tif
                            
    # reading arguments from a file (1 line per argument), with overrides
    eoxserver-preprocess.py @args.txt --crs=3035 --no-tiling input.tif
    """)
    
    #===========================================================================
    # Metadata parsing group
    #===========================================================================
    
    # TODO: won't work with mutual exclusive groups. Bug in Argparse?
    #md_g = parser.add_mutually_exclusive_group(required=True)
    
    #md_g.add_argument("--no-metadata", dest="generate_metadata",
    #                  action="store_false",
    #                  help="Explicitly turn off the creation of a metadata " 
    #                       "file.")
    
    #md_g_data = md_g.add_argument_group()
    
    #md_g_data.add_argument("--begin-time", dest="begin_time", 
    #                       type=_parse_datetime,
    #                       help="The ISO 8601 timestamp of the begin time.")
    #md_g_data.add_argument("--end-time", dest="end_time",
    #                       type=_parse_datetime,
    #                       help="The ISO 8601 timestamp of the end time.")
    #md_g_data.add_argument("--coverage-id", dest="coverage_id",
    #                       type=_parse_coverage_id, 
    #                       help="The ID of the coverage, must be a valid "
    #                            "NCName.")
    
    # should be mutually exclusive
    parser.add_argument("--no-metadata", dest="generate_metadata",
                        action="store_false",
                        help="Explicitly turn off the creation of a metadata " 
                             "file.")
    parser.add_argument("--begin-time", dest="begin_time", type=_parse_datetime,
                        help="The ISO 8601 timestamp of the begin time.")
    parser.add_argument("--end-time", dest="end_time", type=_parse_datetime,
                        help="The ISO 8601 timestamp of the end time.")
    parser.add_argument("--coverage-id", dest="coverage_id",
                        type=_parse_coverage_id, 
                        help="The ID of the coverage, must be a valid NCName.")
    
    #===========================================================================
    # Georeference group
    #===========================================================================
    
    georef_g = parser.add_mutually_exclusive_group()
    georef_g.add_argument("--extent", dest="extent", type=_parse_extent,
                          help="The extent of the dataset, as a 4-tuple of "
                               "floats.")
    #georef_g.add_argument("--footprint", dest="footprint", 
    #                      type=_parse_footprint,
    #                      help="The footprint of the dataset, as a Polygon WKT.")
    georef_g.add_argument("--gcp", dest="gcps", type=_parse_gcp,
                          action="append",
                          help="A Ground Control Point in the format: "
                               "'pixel,line,easting,northing[,elevation]'.")
    
    parser.add_argument("--georef-crs", dest="georef_crs")
    
    #===========================================================================
    # Arbitraries
    #===========================================================================
    
    parser.add_argument("--crs", dest="crs", type=int, 
                        help="The desired output CRS ID of the dataset.")
    parser.add_argument("--no-tiling", dest="tiling", action="store_false",
                        default=True, 
                        help="Explicitly turn of the tiling of the output "
                             "dataset.")
    parser.add_argument("--no-overviews", dest="overviews",
                        action="store_false", default=True,
                        help="Explicitly turn of the creation of overview "
                             "images of the output dataset.")
    
    #===========================================================================
    # Bands group
    #===========================================================================
    
    bands_g = parser.add_mutually_exclusive_group()
    
    bands_g.add_argument("--rgba", dest="bandmode", action="store_const",
                         const=RGBA,
                         help="Convert the image to RGBA, using the first four "
                              "bands.")
    bands_g.add_argument("--orig-bands", dest="bandmode", action="store_const",
                         const=ORIG_BANDS,
                         help="Explicitly keep all original bands.")
    
    parser.add_argument("--bands", dest="bands", type=_parse_bands, 
                        help="A comma separated list of bands with optional "
                             "subsets in the form: 'no[:low:high]'. Either "
                             "three bands, or four when --rgba is requested.")

    parser.add_argument("--footprint-alpha", dest="footprint_alpha",
                        action="store_true", default=False,
                        help="A comma separated list of bands with optional "
                             "subsets in the form: 'no[:low:high]'. Either "
                             "three bands, or four when --rgba is requested.")
    
    parser.add_argument("--compression", dest="compression",
                        choices=GeoTIFFFormatSelection.SUPPORTED_COMPRESSIONS,
                        help="The desired compression technique.")
    parser.add_argument("--jpeg-quality", dest="jpeg_quality", type=int,
                        help="The JPEG algorithm quality when JPEG compression "
                             "is requested. Default is 75.")
    parser.add_argument("--zlevel", dest="zlevel", type=int,
                        help="The zlevel quality setting when DEFLATE "
                             "compression is requested.")
    
    indexed_group = parser.add_argument_group()
    
    # TODO: pct depends on indexed
    
    parser.add_argument("--indexed", dest="color_index", action="store_true",
                        help="Create a paletted (indexed) image.")
    parser.add_argument("--pct", dest="palette_file",
                        help="Use the given file as color palette. If not "
                             "given, a new palette will be calculated.")
    
    parser.add_argument("--no-data-value", dest="no_data_value", 
                        type=_parse_nodata_values,
                        help="Either one, or a list of no-data values.")
    
    parser.add_argument("--co", dest="creation_options", type=int, 
                        action="append",
                        help="Additional GDAL dataset creation options. "
                             "See http://www.gdal.org/frmt_gtiff.html")
    
    parser.add_argument("--traceback", action="store_true", default=False)
    
    parser.add_argument("--force", "-f", dest="force", action="store_true",
                        default=False,
                        help="Override files, if they already exist.")

    parser.add_argument("--verbosity", "-v", dest="verbosity", type=int,
                        default=1,
                        help="Set the verbosity (0, 1, 2). Default is 1.")
    
    parser.add_argument("input_filename", metavar="infile", nargs=1,
                        help="The input raster file to be processed.")
    parser.add_argument("output_basename", metavar="outfiles_basename",
                        nargs="?", 
                        help="The base name of the outputfile(s) to be "
                             "generated.")
    
    values = vars(parser.parse_args(args))
    
    
    # check metadata values
    if "generate_metadata" in values and ("begin_time" in values 
                                          or "end_time" in values
                                          or "coverage_id" in values):
        parser.error("--no-metadata is mutually exclusive with --begin-time, "
                     "--end-time and --coverage-id.")

    elif "generate_metadata" not in values and not ("begin_time" in values 
                                                    and "end_time" in values
                                                    and "coverage_id" in values):
        parser.error("Enter the full metadata with --begin-time, --end-time "
                     "and --coverage-id.")
    
    # hack to flatten the list
    values["input_filename"] = values["input_filename"][0]
    
    georef_crs = values.pop("georef_crs", None)
    
    if "extent" in values:
        values["geo_reference"] = Extent(*values.pop("extent"), srid=georef_crs or 4326)
    
    if "gcps" in values:
        values["geo_reference"] = GCPList(values.pop("gcps"), georef_crs or 4326)
    
    if "palette_file" in values and not "color_index" in values:
        parser.error("--pct can only be used with --indexed")
    
    # Extract format and execution specific values
    format_values = _extract(values, ("tiling", "compression", "jpeg_quality", 
                                      "zlevel", "creation_options"))
    exec_values = _extract(values, ("input_filename", "geo_reference",
                                    "generate_metadata"))
    other_values = _extract(values, ("traceback", ))
    metadata_values = _extract(values, ("coverage_id", "begin_time",
                                         "end_time"))

    force = values.pop("force", True)
    verbosity = values.pop("verbosity")
    output_basename = values.pop("output_basename", None)
    input_filename = exec_values.get("input_filename")

    # setup logging
    if verbosity > 0:
        if verbosity == 1: level = logging.WARN
        elif verbosity == 2: level = logging.INFO
        elif verbosity >= 3: level = logging.DEBUG
        logging.basicConfig(format="%(levelname)s: %(message)s", stream=sys.stderr,
                            level=level)
        

    try:
        # create a format selection
        format_selection = get_format_selection("GTiff", **format_values)


        # TODO: make 'tif' dependant on format selection
        # check files exist
        if not output_basename:
            output_filename = splitext(input_filename)[0] + "_proc" + format_selection.extension
            output_md_filename = splitext(input_filename)[0] + "_proc.xml"
        
        else:
            output_filename = output_basename + format_selection.extension
            output_md_filename = output_basename + ".xml"

        if not force:
            check_file_existence(output_filename)

        exec_values["output_filename"] = output_filename

        # create and run the preprocessor
        preprocessor = WMSPreProcessor(format_selection, **values)
        result = preprocessor.process(**exec_values)

        if exec_values.get("generate_metadata", True):
            if not force:
                check_file_existence(output_md_filename)

            encoder = NativeMetadataFormatEncoder()
            xml = DOMElementToXML(encoder.encodeMetadata(metadata_values["coverage_id"],
                                                         metadata_values["begin_time"],
                                                         metadata_values["end_time"],
                                                         result.footprint_raw))
            
            with open(output_md_filename, "w+") as f:
                f.write(xml)
        
    except Exception, e:
        # error wrapping
        if other_values["traceback"]:
            traceback.print_exc()
        sys.stderr.write("%s: %s\n" % (type(e).__name__, str(e)))
예제 #5
0
파일: __init__.py 프로젝트: v-manip/ngeo-b
def ingest_browse_report(parsed_browse_report, do_preprocessing=True, config=None):
    """ Ingests a browse report. reraise_exceptions if errors shall be handled 
    externally
    """
    
    try:
        # get the according browse layer
        browse_type = parsed_browse_report.browse_type
        browse_layer = models.BrowseLayer.objects.get(browse_type=browse_type)
    except models.BrowseLayer.DoesNotExist:
        raise IngestionException("Browse layer with browse type '%s' does not "
                                 "exist." % parsed_browse_report.browse_type)
    
    # generate a browse report model
    browse_report = create_browse_report(parsed_browse_report, browse_layer)
    
    # initialize the preprocessor with configuration values
    crs = None
    if browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible":
        crs = "EPSG:3857"
    elif browse_layer.grid == "urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad":
        crs = "EPSG:4326"
        
    logger.debug("Using CRS '%s' ('%s')." % (crs, browse_layer.grid))
    
    # create the required preprocessor/format selection
    format_selection = get_format_selection("GTiff",
                                            **get_format_config(config))

    if do_preprocessing and not browse_layer.contains_vertical_curtains \
        and not browse_layer.contains_volumes:
        # add config parameters and custom params
        params = get_optimization_config(config)
        
        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"
        
        # add band selection
        if (browse_layer.r_band is not None and 
            browse_layer.g_band is not None and 
            browse_layer.b_band is not None):
            
            bands = [(browse_layer.r_band, rad_min, rad_max), 
                     (browse_layer.g_band, rad_min, rad_max), 
                     (browse_layer.b_band, rad_min, rad_max)]
            
            if params["bandmode"] == RGBA:
                # RGBA
                bands.append((0, 0, 0))
            
            params["bands"] = bands
        
        preprocessor = NGEOPreProcessor(format_selection, crs=crs, **params)

    elif browse_layer.contains_vertical_curtains:

        logger.info("Preparing Vertical Curtain Pre-Processor")

        params = {}

        # add radiometric interval
        rad_min = browse_layer.radiometric_interval_min
        if rad_min is not None:
            params["radiometric_interval_min"] = rad_min
        else:
            rad_min = "min"
        rad_max = browse_layer.radiometric_interval_max
        if rad_max is not None:
            params["radiometric_interval_max"] = rad_max
        else:
            rad_max = "max"

        preprocessor = VerticalCurtainPreprocessor(**params)

    elif browse_layer.contains_volumes:
        preprocessor = VolumePreProcessor()

    else:
        preprocessor = None # TODO: CopyPreprocessor
    
    report_result = IngestBrowseReportResult()
    
    succeded = []
    failed = []
    
    timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S%f")
    browse_dirname = _valid_path("%s_%s_%s_%s" % (
        browse_type, browse_report.responsible_org_name,
        browse_report.date_time.strftime("%Y%m%d%H%M%S%f"),
        timestamp
    ))
    success_dir = join(get_success_dir(config), browse_dirname)
    failure_dir = join(get_failure_dir(config), browse_dirname)
    
    if exists(success_dir): 
        logger.warn("Success directory '%s' already exists.")
    else:
        makedirs(success_dir)
    if exists(failure_dir): 
        logger.warn("Failure directory '%s' already exists.")
    else:
        makedirs(failure_dir)
    
    # iterate over all browses in the browse report
    for parsed_browse in parsed_browse_report:
        # transaction management per browse
        with transaction.commit_manually():
            with transaction.commit_manually(using="mapcache"):
                try:
                    seed_areas = []
                    # try ingest a single browse and log success
                    result = ingest_browse(parsed_browse, browse_report,
                                           browse_layer, preprocessor, crs,
                                           success_dir, failure_dir,
                                           seed_areas, config=config)
                    
                    report_result.add(result)
                    succeded.append(parsed_browse)
                    
                    # commit here to allow seeding
                    transaction.commit() 
                    transaction.commit(using="mapcache")
                    
                    
                    logger.info("Commited changes to database.")

                    if not browse_layer.contains_vertical_curtains and not browse_layer.contains_volumes:
                    
                        for minx, miny, maxx, maxy, start_time, end_time in seed_areas:
                            try:
                                
                                # seed MapCache synchronously
                                # TODO: maybe replace this with an async solution
                                seed_mapcache(tileset=browse_layer.id, 
                                              grid=browse_layer.grid, 
                                              minx=minx, miny=miny, 
                                              maxx=maxx, maxy=maxy, 
                                              minzoom=browse_layer.lowest_map_level, 
                                              maxzoom=browse_layer.highest_map_level,
                                              start_time=start_time,
                                              end_time=end_time,
                                              delete=False,
                                              **get_mapcache_seed_config(config))
                                logger.info("Successfully finished seeding.")
                                
                            except Exception, e:
                                logger.warn("Seeding failed: %s" % str(e))
                    
                    elif not browse_layer.contains_volumes:

                        host = "http://localhost/browse/ows"

                        level_0_num_tiles_y = 2  # rows
                        level_0_num_tiles_x = 4  # cols

                        seed_level = range(browse_layer.lowest_map_level, browse_layer.highest_map_level)

                        for tileLevel in seed_level:

                            tiles_x = level_0_num_tiles_x * pow(2, tileLevel);
                            tiles_y = level_0_num_tiles_y * pow(2, tileLevel)

                            #find which tiles are crossed by extent
                            tile_width = 360 / (tiles_x)
                            tile_height = 180 / (tiles_y)

                            coverage = eoxs_models.Coverage.objects.get(identifier=result.identifier)

                            #cycle through tiles
                            for col in range(tiles_x):
                                for row in range(tiles_y):

                                    west = -180 + (col * tile_width)
                                    east = west + tile_width
                                    north = 90 - (row * tile_height)
                                    south = north - tile_height

                                    if (coverage.footprint.intersects(Polygon.from_bbox( (west,south,east,north) ))):

                                        try:
                                            # NOTE: The MeshFactory ignores time
                                            time = (isoformat(result.time_interval[0]) + "/" + isoformat(result.time_interval[1]))
                                            
                                            baseurl = host + '?service=W3DS&request=GetTile&version=1.0.0&crs=EPSG:4326&layer={0}&style=default&format=model/gltf'.format(browse_layer.id)
                                            url = '{0}&tileLevel={1}&tilecol={2}&tilerow={3}&time={4}'.format(baseurl, tileLevel, col, row, time)

                                            logger.info('Seeding call to URL: %s' % (url,))

                                            response = urllib2.urlopen(url)
                                            response.close()

                                        except Exception, e:
                                            logger.warn("Seeding failed: %s" % str(e))

                        transaction.commit() 

                    else:
                        pass