示例#1
0
def main():
    from polar2grid.core.script_utils import create_basic_parser, create_exc_handler, setup_logging
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    parser = create_basic_parser(description="Create HDF5 files from provided gridded scene or product data")
    subgroup_titles = add_backend_argument_groups(parser)
    parser.add_argument("--scene", required=True, help="JSON SwathScene filename to be remapped")
    global_keywords = ("keep_intermediate", "overwrite_existing", "exit_on_error")
    args = parser.parse_args(subgroup_titles=subgroup_titles, global_keywords=global_keywords)

    # Logs are renamed once data the provided start date is known
    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing backend...")
    backend = Backend(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        backend.create_output_from_scene(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        backend.create_output_from_product(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")
示例#2
0
def main():
    from polar2grid.core.script_utils import create_basic_parser, create_exc_handler, setup_logging
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    parser = create_basic_parser(description="Create NinJo files from provided gridded scene or product data")
    subgroup_titles = add_backend_argument_groups(parser)
    parser.add_argument("--scene", required=True, help="JSON SwathScene filename to be remapped")
    parser.add_argument("-t", "--test", dest="run_test", default=None,
                        help="Run specified test [test_write, test_write_tags, etc]")
    global_keywords = ("keep_intermediate", "overwrite_existing", "exit_on_error")
    args = parser.parse_args(subgroup_titles=subgroup_titles, global_keywords=global_keywords)

    # Logs are renamed once data the provided start date is known
    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)

    if args.run_test is not None:
        if args.run_test not in TESTS:
            parser.print_usage()
            print "Available tests:\n\t%s" % ("\n\t".join(TESTS.keys()))
            return -1
        return TESTS[args.run_test](*args)

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing backend...")
    backend = Backend(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        backend.create_output_from_scene(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        backend.create_output_from_product(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")
示例#3
0
def main():
    from polar2grid.core.script_utils import create_basic_parser, create_exc_handler, setup_logging
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    parser = create_basic_parser(
        description=
        "Create HDF5 files from provided gridded scene or product data")
    subgroup_titles = add_backend_argument_groups(parser)
    parser.add_argument("--scene",
                        required=True,
                        help="JSON SwathScene filename to be remapped")
    global_keywords = ("keep_intermediate", "overwrite_existing",
                       "exit_on_error")
    args = parser.parse_args(subgroup_titles=subgroup_titles,
                             global_keywords=global_keywords)

    # Logs are renamed once data the provided start date is known
    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)],
                  log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing backend...")
    backend = Backend(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        backend.create_output_from_scene(
            gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        backend.create_output_from_product(
            gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")
示例#4
0
def main(argv=sys.argv[1:]):
    from polar2grid.core.script_utils import setup_logging, create_basic_parser, create_exc_handler
    from polar2grid.core.containers import GriddedScene
    parser = create_basic_parser(description="Extract swath data, remap it, and write it to a new file format")
    parser.add_argument("--compositor-configs", nargs="*", default=None,
                        help="Specify alternative configuration file(s) for compositors")
    # don't include the help flag
    argv_without_help = [x for x in argv if x not in ["-h", "--help"]]
    args, remaining_args = parser.parse_known_args(argv_without_help)

    # Load compositor information (we can't know the compositor choices until we've loaded the configuration)
    compositor_manager = CompositorManager(config_files=args.compositor_configs)
    # Hack: argparse doesn't let you use choices and nargs=* on a positional argument
    parser.add_argument("compositors", choices=compositor_manager.keys() + [[]], nargs="*",
                        help="Specify the compositors to apply to the provided scene (additional arguments are determined after this is specified)")
    parser.add_argument("--scene", required=True, help="JSON SwathScene filename to be remapped")
    parser.add_argument("-o", dest="output_filename",
                        help="Specify the filename for the newly modified scene (default: original_fn + 'composite')")
    global_keywords = ("keep_intermediate", "overwrite_existing", "exit_on_error")
    args = parser.parse_args(argv, global_keywords=global_keywords)

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)
    LOG.debug("Starting compositor script with arguments: %s", " ".join(sys.argv))

    # Compositor validation
    compositor_objects = {}
    for c in args.compositors:
        if c not in compositor_manager:
            LOG.error("Compositor '%s' is unknown" % (c,))
            raise RuntimeError("Compositor '%s' is unknown" % (c,))
        compositor_objects[c] = compositor_manager.get_compositor(c, **args.global_kwargs)

    scene = GriddedScene.load(args.scene)
    for c, comp in compositor_objects.items():
        try:
            scene = comp.modify_scene(scene, **args.subgroup_args[c + " Modification"])
        except StandardError:
            LOG.debug("Compositor Error: ", exc_info=True)
            LOG.error("Could not properly modify scene using compositor '%s'" % (c,))
            if args.exit_on_error:
                raise RuntimeError("Could not properly modify scene using compositor '%s'" % (c,))

    if args.output_filename is None:
        stem, ext = os.path.splitext(args.scene)
        args.output_filename = stem + "_composite" + ext
    scene.save(args.output_filename)
示例#5
0
def main():
    from polar2grid.core.script_utils import create_basic_parser, create_exc_handler, setup_logging
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    parser = create_basic_parser(
        description=
        "Create NinJo files from provided gridded scene or product data")
    subgroup_titles = add_backend_argument_groups(parser)
    parser.add_argument("--scene",
                        required=True,
                        help="JSON SwathScene filename to be remapped")
    parser.add_argument(
        "-t",
        "--test",
        dest="run_test",
        default=None,
        help="Run specified test [test_write, test_write_tags, etc]")
    global_keywords = ("keep_intermediate", "overwrite_existing",
                       "exit_on_error")
    args = parser.parse_args(subgroup_titles=subgroup_titles,
                             global_keywords=global_keywords)

    # Logs are renamed once data the provided start date is known
    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)],
                  log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)

    if args.run_test is not None:
        if args.run_test not in TESTS:
            parser.print_usage()
            print "Available tests:\n\t%s" % ("\n\t".join(TESTS.keys()))
            return -1
        return TESTS[args.run_test](*args)

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing backend...")
    backend = Backend(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        backend.create_output_from_scene(
            gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        backend.create_output_from_product(
            gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")
示例#6
0
def main_backend(argv=sys.argv[1:]):
    from polar2grid.core.script_utils import setup_logging, create_basic_parser, create_exc_handler, ExtendAction
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    backends = available_backends()
    parser = create_basic_parser(description="Create image/output file from provided gridded scene using a typical Polar2Grid backend (see specific backend for other features)")
    parser.add_argument("backend", choices=sorted(backends.keys()),
                        help="Specify the output generator to use (additional arguments are determined after this is specified)")
    parser.add_argument("--scene", required=True, help="JSON GriddedScene filename")
    parser.add_argument('-o', dest="output_filename", default=None,
                        help="Output filename for JSON scene (default is to stdout)")
    parser.add_argument('-f', dest='data_files', nargs="+", default=[], action=ExtendAction,
                        help="List of files or directories to extract data from")
    global_keywords = ("keep_intermediate", "overwrite_existing", "exit_on_error")

    # don't include the help flag
    argv_without_help = [x for x in argv if x not in ["-h", "--help"]]
    args, remaining_args = parser.parse_known_args(argv_without_help)
    LOG = logging.getLogger(args.backend)
    barg_func = get_backend_argument_func(backends, args.backend)
    bcls = get_backend_class(backends, args.backend)

    subgroup_titles = []
    subgroup_titles += barg_func(parser)
    args = parser.parse_args(argv, global_keywords=global_keywords, subgroup_titles=subgroup_titles)

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)
    LOG.debug("Starting script with arguments: %s", " ".join(sys.argv))

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing writer...")
    backend = bcls(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        result = backend.create_output_from_scene(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        result = backend.create_output_from_product(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")

    import json
    print(json.dumps(result))
    return 0
示例#7
0
def main_backend(argv=sys.argv[1:]):
    from polar2grid.core.script_utils import setup_logging, create_basic_parser, create_exc_handler, ExtendAction
    from polar2grid.core.containers import GriddedScene, GriddedProduct
    backends = available_backends()
    parser = create_basic_parser(description="Create image/output file from provided gridded scene using a typical Polar2Grid backend (see specific backend for other features)")
    parser.add_argument("backend", choices=sorted(backends.keys()),
                        help="Specify the output generator to use (additional arguments are determined after this is specified)")
    parser.add_argument("--scene", required=True, help="JSON GriddedScene filename")
    parser.add_argument('-o', dest="output_filename", default=None,
                        help="Output filename for JSON scene (default is to stdout)")
    parser.add_argument('-f', dest='data_files', nargs="+", default=[], action=ExtendAction,
                        help="List of files or directories to extract data from")
    global_keywords = ("keep_intermediate", "overwrite_existing", "exit_on_error")

    # don't include the help flag
    argv_without_help = [x for x in argv if x not in ["-h", "--help"]]
    args, remaining_args = parser.parse_known_args(argv_without_help)
    LOG = logging.getLogger(args.backend)
    barg_func = get_backend_argument_func(backends, args.backend)
    bcls = get_backend_class(backends, args.backend)

    subgroup_titles = []
    subgroup_titles += barg_func(parser)
    args = parser.parse_args(argv, global_keywords=global_keywords, subgroup_titles=subgroup_titles)

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)
    LOG.debug("Starting script with arguments: %s", " ".join(sys.argv))

    LOG.info("Loading scene or product...")
    gridded_scene = GriddedScene.load(args.scene)

    LOG.info("Initializing writer...")
    backend = bcls(**args.subgroup_args["Backend Initialization"])
    if isinstance(gridded_scene, GriddedScene):
        result = backend.create_output_from_scene(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    elif isinstance(gridded_scene, GriddedProduct):
        result = backend.create_output_from_product(gridded_scene, **args.subgroup_args["Backend Output Creation"])
    else:
        raise ValueError("Unknown Polar2Grid object provided")

    import json
    print(json.dumps(result))
    return 0
示例#8
0
    def _remap_scene_nearest(self,
                             swath_scene,
                             grid_def,
                             share_dynamic_grids=True,
                             share_remap_mask=True,
                             **kwargs):
        # TODO: Make methods more flexible than just a function call
        gridded_scene = GriddedScene()
        grid_name = grid_def["grid_name"]

        # Group products together that shared the same geolocation
        product_groups = defaultdict(list)
        for product_name, swath_product in swath_scene.items():
            swath_def = swath_product["swath_definition"]
            geo_id = swath_def["swath_name"]
            product_groups[geo_id].append(product_name)

        grid_coverage = kwargs.get("grid_coverage", GRID_COVERAGE)
        orig_grid_def = grid_def
        for geo_id, product_names in product_groups.items():
            pp_names = "\n\t".join(product_names)
            LOG.debug(
                "Running ll2cr on the geolocation data for the following products:\n\t%s",
                pp_names)
            LOG.debug("Swath name: %s", geo_id)

            # TODO: Move into it's own function if this gets complicated
            # TODO: Add some multiprocessing
            try:
                swath_def = swath_scene[product_names[0]]["swath_definition"]
                if not share_dynamic_grids:
                    grid_def = orig_grid_def.copy()
                cols_fn, rows_fn = self.run_ll2cr(swath_def, grid_def)
            except StandardError:
                LOG.error("Remapping error")
                if self.exit_on_error:
                    raise
                continue

            LOG.debug(
                "Running nearest neighbor for the following products:\n\t%s",
                "\n\t".join(product_names))
            edge_res = swath_def.get("limb_resolution", None)
            if kwargs.get("distance_upper_bound", None) is None:
                if edge_res is not None:
                    if grid_def.is_latlong:
                        distance_upper_bound = (edge_res /
                                                2) / grid_def.cell_width_meters
                    else:
                        distance_upper_bound = (edge_res /
                                                2) / grid_def["cell_width"]
                    LOG.debug("Distance upper bound dynamically set to %f",
                              distance_upper_bound)
                else:
                    distance_upper_bound = 3.0
                kwargs["distance_upper_bound"] = distance_upper_bound

            try:
                grid_x, grid_y = numpy.mgrid[:grid_def["height"], :
                                             grid_def["width"]]
                # we need flattened versions of these
                shape = (swath_def["swath_rows"] *
                         swath_def["swath_columns"], )
                cols_array = numpy.memmap(cols_fn,
                                          shape=shape,
                                          dtype=swath_def["data_type"])
                rows_array = numpy.memmap(rows_fn,
                                          shape=shape,
                                          dtype=swath_def["data_type"])
                good_mask = ~mask_helper(cols_array, swath_def["fill_value"])
                if share_remap_mask:
                    for product_name in product_names:
                        LOG.debug(
                            "Combining data masks before building KDTree for nearest neighbor: %s",
                            product_name)
                        good_mask &= ~swath_scene[product_name].get_data_mask(
                        ).ravel()
                x = _ndim_coords_from_arrays(
                    (cols_array[good_mask], rows_array[good_mask]))
                xi = _ndim_coords_from_arrays((grid_y, grid_x))
                dist, i = cKDTree(x).query(
                    xi, distance_upper_bound=kwargs["distance_upper_bound"])
            except StandardError:
                LOG.debug("Remapping exception: ", exc_info=True)
                LOG.error("Remapping error")
                if self.exit_on_error:
                    self._clear_ll2cr_cache()
                    raise
                continue

            product_filepaths = swath_scene.get_data_filepaths(product_names)
            output_filepaths = self._add_prefix("grid_%s_" % (grid_name, ),
                                                *product_filepaths)

            # Prepare the products
            fill_value = numpy.nan
            for product_name, output_fn in izip(product_names,
                                                output_filepaths):
                LOG.debug(
                    "Running nearest neighbor on '%s' with search distance %f",
                    product_name, kwargs["distance_upper_bound"])
                if os.path.isfile(output_fn):
                    if not self.overwrite_existing:
                        LOG.error(
                            "Intermediate remapping file already exists: %s" %
                            (output_fn, ))
                        raise RuntimeError(
                            "Intermediate remapping file already exists: %s" %
                            (output_fn, ))
                    else:
                        LOG.warning(
                            "Intermediate remapping file already exists, will overwrite: %s",
                            output_fn)

                try:
                    image_array = swath_scene[product_name].get_data_array(
                    ).ravel()
                    values = numpy.append(image_array[good_mask],
                                          image_array.dtype.type(fill_value))
                    output_array = values[i]
                    output_array.tofile(output_fn)

                    # Give the gridded product ownership of the remapped data
                    swath_product = swath_scene[product_name]
                    gridded_product = GriddedProduct()
                    gridded_product.from_swath_product(swath_product)
                    gridded_product["grid_definition"] = grid_def
                    gridded_product["fill_value"] = fill_value
                    gridded_product["grid_data"] = output_fn

                    # Check grid coverage
                    valid_points = numpy.count_nonzero(
                        ~gridded_product.get_data_mask())
                    grid_covered_ratio = valid_points / float(
                        grid_def["width"] * grid_def["height"])
                    grid_covered = grid_covered_ratio > grid_coverage
                    if not grid_covered:
                        msg = "Nearest neighbor resampling only found %f%% of the grid covered (need %f%%) for %s" % (
                            grid_covered_ratio * 100, grid_coverage * 100,
                            product_name)
                        LOG.warning(msg)
                        continue
                    LOG.debug(
                        "Nearest neighbor resampling found %f%% of the grid covered for %s"
                        % (grid_covered_ratio * 100, product_name))

                    gridded_scene[product_name] = gridded_product

                    # hopefully force garbage collection
                    del output_array
                except StandardError:
                    LOG.debug("Remapping exception: ", exc_info=True)
                    LOG.error("Remapping error")
                    self._safe_remove(output_fn)
                    if self.exit_on_error:
                        self._clear_ll2cr_cache()
                        raise
                    continue

                LOG.debug("Done running nearest neighbor on '%s'",
                          product_name)

        # Remove ll2cr files now that we are done with them
        self._clear_ll2cr_cache()

        if not gridded_scene:
            raise RuntimeError(
                "Nearest neighbor resampling could not remap any of the data to grid '%s'"
                % (grid_name, ))

        return gridded_scene
示例#9
0
    def _remap_scene_ewa(self,
                         swath_scene,
                         grid_def,
                         share_dynamic_grids=True,
                         **kwargs):
        # TODO: Make methods more flexible than just a function call
        gridded_scene = GriddedScene()
        grid_name = grid_def["grid_name"]

        # Group products together that shared the same geolocation
        product_groups = defaultdict(list)
        for product_name, swath_product in swath_scene.items():
            swath_def = swath_product["swath_definition"]
            is_cat = swath_product.get('flag_meanings') is not None
            geo_id = swath_def["swath_name"]
            product_groups[(is_cat, geo_id)].append(product_name)

        # keep a copy of the original grid definition
        # if a shared grid definition isn't used then
        # we start from the original
        orig_grid_def = grid_def
        for (is_cat, geo_id), product_names in product_groups.items():
            try:
                LOG.debug(
                    "Running ll2cr on the geolocation data for the following products:\n\t%s",
                    "\n\t".join(sorted(product_names)))
                swath_def = swath_scene[product_names[0]]["swath_definition"]
                if not share_dynamic_grids:
                    grid_def = orig_grid_def.copy()
                cols_fn, rows_fn = self.run_ll2cr(swath_def,
                                                  grid_def,
                                                  swath_usage=kwargs.get(
                                                      "swath_usage",
                                                      SWATH_USAGE))
            except StandardError:
                LOG.error("Remapping error")
                if self.exit_on_error:
                    raise
                continue

            # Run fornav for all of the products at once
            LOG.debug("Running fornav for the following products:\n\t%s",
                      "\n\t".join(sorted(product_names)))
            # XXX: May have to do something smarter if there are float products and integer products together (is_category property on SwathProduct?)
            product_filepaths = list(
                swath_scene.get_data_filepaths(product_names))
            fornav_filepaths = self._add_prefix("grid_%s_" % (grid_name, ),
                                                *product_filepaths)
            for fp in fornav_filepaths:
                if os.path.isfile(fp):
                    if not self.overwrite_existing:
                        LOG.error(
                            "Intermediate remapping file already exists: %s" %
                            (fp, ))
                        raise RuntimeError(
                            "Intermediate remapping file already exists: %s" %
                            (fp, ))
                    else:
                        LOG.warning(
                            "Intermediate remapping file already exists, will overwrite: %s",
                            fp)

            rows_per_scan = swath_def.get("rows_per_scan", 0)
            if rows_per_scan < 2:
                LOG.warning(
                    "Data has less than 2 rows per scan, this is not optimal for the EWA resampling algorithm. All rows will be used as one scan"
                )
                rows_per_scan = swath_def['swath_rows']
            edge_res = swath_def.get("limb_resolution", None)
            fornav_D = kwargs.get("fornav_D", None)
            if fornav_D is None:
                if edge_res is not None:
                    if grid_def.is_latlong:
                        fornav_D = (edge_res / 2) / grid_def.cell_width_meters
                    else:
                        fornav_D = (edge_res / 2) / grid_def["cell_width"]
                    LOG.debug("Fornav 'D' option dynamically set to %f",
                              fornav_D)
                else:
                    fornav_D = 10.0

            mwm = kwargs.get('maximum_weight_mode', False)
            if is_cat and not mwm:
                LOG.debug(
                    "Turning on maximum weight mode in EWA resampling for category products"
                )
                mwm = True

            try:
                # fornav.ms2gt_fornav(
                #     len(product_filepaths),
                #     swath_def["swath_columns"],
                #     swath_def["swath_rows"]/rows_per_scan,
                #     rows_per_scan,
                #     cols_fn,
                #     rows_fn,
                #     product_filepaths,
                #     grid_def["width"],
                #     grid_def["height"],
                #     fornav_filepaths,
                #     swath_data_type_1="f4",
                #     swath_fill_1=swath_scene.get_fill_value(product_names),
                #     grid_fill_1=numpy.nan,
                #     weight_delta_max=fornav_D,
                #     weight_distance_max=kwargs.get("fornav_d", None),
                #     maximum_weight_mode=kwargs.get("maximum_weight_mode", None),
                #     start_scan=(0, 0),
                # )
                cols_array = numpy.memmap(cols_fn,
                                          dtype=numpy.float32,
                                          mode='r',
                                          shape=(swath_def["swath_rows"],
                                                 swath_def["swath_columns"]))
                rows_array = numpy.memmap(rows_fn,
                                          dtype=numpy.float32,
                                          mode='r',
                                          shape=(swath_def["swath_rows"],
                                                 swath_def["swath_columns"]))
                # Assumed that all share the same fill value and data type
                input_dtype = [
                    swath_scene[pn]["data_type"] for pn in product_names
                ]
                input_fill = [
                    swath_scene[pn]["fill_value"] for pn in product_names
                ]
                LOG.debug("Running fornav with D={} and d={}".format(
                    fornav_D, kwargs.get('fornav_d', 1.0)))
                valid_list = fornav.fornav(cols_array,
                                           rows_array,
                                           rows_per_scan,
                                           product_filepaths,
                                           input_dtype=input_dtype,
                                           input_fill=input_fill,
                                           output_arrays=fornav_filepaths,
                                           grid_cols=grid_def["width"],
                                           grid_rows=grid_def["height"],
                                           weight_delta_max=fornav_D,
                                           weight_distance_max=kwargs.get(
                                               "fornav_d", 1.0),
                                           maximum_weight_mode=mwm,
                                           use_group_size=True)
            except StandardError:
                LOG.debug("Remapping exception: ", exc_info=True)
                LOG.error("Remapping error")
                self._safe_remove(*fornav_filepaths)
                if self.exit_on_error:
                    self._clear_ll2cr_cache()
                    raise
                continue

            # Give the gridded product ownership of the remapped data
            for product_name, fornav_fp, valid_points in zip(
                    product_names, fornav_filepaths, valid_list):
                swath_product = swath_scene[product_name]
                gridded_product = GriddedProduct()
                gridded_product.from_swath_product(swath_product)
                gridded_product["grid_definition"] = grid_def
                gridded_product["fill_value"] = numpy.nan
                gridded_product["grid_data"] = fornav_fp

                grid_coverage = kwargs.get("grid_coverage", GRID_COVERAGE)
                grid_covered_ratio = valid_points / float(
                    grid_def["width"] * grid_def["height"])
                grid_covered = grid_covered_ratio > grid_coverage
                if not grid_covered:
                    msg = "EWA resampling only found %f%% of the grid covered (need %f%%) for %s" % (
                        grid_covered_ratio * 100, grid_coverage * 100,
                        product_name)
                    LOG.warning(msg)
                    continue
                LOG.debug(
                    "EWA resampling found %f%% of the grid covered for %s" %
                    (grid_covered_ratio * 100, product_name))
                gridded_scene[product_name] = gridded_product

        self._clear_ll2cr_cache()

        if not gridded_scene:
            self._safe_remove(*fornav_filepaths)
            raise RuntimeError(
                "EWA resampling could not remap any of the data to grid '%s'" %
                (grid_name, ))

        return gridded_scene
示例#10
0
def main(argv=sys.argv[1:]):
    from polar2grid.core.script_utils import setup_logging, create_basic_parser, create_exc_handler
    from polar2grid.core.containers import GriddedScene
    parser = create_basic_parser(
        description=
        "Extract swath data, remap it, and write it to a new file format")
    parser.add_argument(
        "--compositor-configs",
        nargs="*",
        default=None,
        help="Specify alternative configuration file(s) for compositors")
    # don't include the help flag
    argv_without_help = [x for x in argv if x not in ["-h", "--help"]]
    args, remaining_args = parser.parse_known_args(argv_without_help)

    # Load compositor information (we can't know the compositor choices until we've loaded the configuration)
    compositor_manager = CompositorManager(
        config_files=args.compositor_configs)
    # Hack: argparse doesn't let you use choices and nargs=* on a positional argument
    parser.add_argument(
        "compositors",
        choices=compositor_manager.keys() + [[]],
        nargs="*",
        help=
        "Specify the compositors to apply to the provided scene (additional arguments are determined after this is specified)"
    )
    parser.add_argument("--scene",
                        required=True,
                        help="JSON SwathScene filename to be remapped")
    parser.add_argument(
        "-o",
        dest="output_filename",
        help=
        "Specify the filename for the newly modified scene (default: original_fn + 'composite')"
    )
    global_keywords = ("keep_intermediate", "overwrite_existing",
                       "exit_on_error")
    args = parser.parse_args(argv, global_keywords=global_keywords)

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)],
                  log_filename=args.log_fn)
    sys.excepthook = create_exc_handler(LOG.name)
    LOG.debug("Starting compositor script with arguments: %s",
              " ".join(sys.argv))

    # Compositor validation
    compositor_objects = {}
    for c in args.compositors:
        if c not in compositor_manager:
            LOG.error("Compositor '%s' is unknown" % (c, ))
            raise RuntimeError("Compositor '%s' is unknown" % (c, ))
        compositor_objects[c] = compositor_manager.get_compositor(
            c, **args.global_kwargs)

    scene = GriddedScene.load(args.scene)
    for c, comp in compositor_objects.items():
        try:
            scene = comp.modify_scene(
                scene, **args.subgroup_args[c + " Modification"])
        except StandardError:
            LOG.debug("Compositor Error: ", exc_info=True)
            LOG.error("Could not properly modify scene using compositor '%s'" %
                      (c, ))
            if args.exit_on_error:
                raise RuntimeError(
                    "Could not properly modify scene using compositor '%s'" %
                    (c, ))

    if args.output_filename is None:
        stem, ext = os.path.splitext(args.scene)
        args.output_filename = stem + "_composite" + ext
    scene.save(args.output_filename)