Example #1
0
 def default(self, obj):
     if isinstance(obj, BaseP2GObject):
         mod_str = str(obj.__class__.__module__)
         mod_str = mod_str + "." if mod_str != __name__ else ""
         cls_str = str(obj.__class__.__name__)
         obj = obj.copy(as_dict=True)
         # object should now be a builtin dict
         obj["__class__"] = mod_str + cls_str
         return obj
         # return super(P2GJSONEncoder, self).encode(obj)
     elif isinstance(obj, datetime):
         return obj.isoformat()
     elif numpy.issubclass_(obj, numpy.number) or isinstance(obj, numpy.dtype):
         return dtype_to_str(obj)
     elif hasattr(obj, 'dtype'):
         if obj.size > 1:
             return obj.tolist()
         return int(obj) if numpy.issubdtype(obj, numpy.integer) else float(obj)
     else:
         try:
             return super(P2GJSONEncoder, self).default(obj)
         except TypeError as e:
             print("TypeError:", str(e), type(obj))
             print(obj)
             raise
Example #2
0
    def get_rescale_options(self, gridded_product, data_type, inc_by_one=False, fill_value=None):
        all_meta = gridded_product["grid_definition"].copy(as_dict=True)
        all_meta.update(**gridded_product)
        kwargs = dict((k, all_meta.get(k, None)) for k in self.id_fields)
        # we don't want the product's current data_type, we want what the output will be
        kwargs["data_type"] = dtype_to_str(data_type)
        kwargs["inc_by_one"] = inc_by_one
        rescale_options = self.get_config_options(**kwargs)
        if "method" not in rescale_options:
            LOG.error("No rescaling method found and no default method configured for %s", gridded_product["product_name"])
            raise ValueError("No rescaling method configured for %s" % (gridded_product["product_name"],))
        LOG.debug("Product %s found in rescale config: %r", gridded_product["product_name"], rescale_options)

        min_out, max_out = dtype2range[kwargs["data_type"]]
        rescale_options.setdefault("min_out", min_out)
        rescale_options.setdefault("max_out", max_out - 1 if rescale_options["inc_by_one"] else max_out)
        rescale_options.setdefault("units", gridded_product.get("units", "kelvin"))
        rescale_options["fill_out"] = fill_value

        # Parse out colormaps
        colormap = rescale_options.get('colormap')
        if colormap is not None:
            import trollimage.colormap as ticolormap
            from polar2grid.add_colormap import load_color_table_file_to_colormap
            if isinstance(colormap, str):
                try:
                    colormap = load_color_table_file_to_colormap(colormap)
                except OSError:
                    colormap = getattr(ticolormap, colormap)
            elif not isinstance(colormap, ticolormap.Colormap):
                raise ValueError("Unknown 'colormap' type: %s", str(type(colormap)))
            if 'min_in' in rescale_options:
                colormap.set_range(rescale_options['min_in'], rescale_options['max_in'])
            rescale_options['colormap'] = colormap
        return rescale_options
Example #3
0
 def default(self, obj):
     if isinstance(obj, BaseP2GObject):
         mod_str = str(obj.__class__.__module__)
         mod_str = mod_str + "." if mod_str != __name__ else ""
         cls_str = str(obj.__class__.__name__)
         obj = obj.copy(as_dict=True)
         # object should now be a builtin dict
         obj["__class__"] = mod_str + cls_str
         return obj
         # return super(P2GJSONEncoder, self).encode(obj)
     elif isinstance(obj, datetime):
         return obj.isoformat()
     elif numpy.issubclass_(obj, numpy.number) or isinstance(
             obj, numpy.dtype):
         return dtype_to_str(obj)
     elif hasattr(obj, 'dtype'):
         if obj.size > 1:
             return obj.tolist()
         return int(obj) if numpy.issubdtype(obj,
                                             numpy.integer) else float(obj)
     else:
         try:
             return super(P2GJSONEncoder, self).default(obj)
         except TypeError as e:
             print("TypeError:", str(e), type(obj))
             print(obj)
             raise
Example #4
0
    def get_rescale_options(self,
                            gridded_product,
                            data_type,
                            inc_by_one=False,
                            fill_value=None):
        all_meta = gridded_product["grid_definition"].copy(as_dict=True)
        all_meta.update(**gridded_product)
        kwargs = dict((k, all_meta.get(k, None)) for k in self.id_fields)
        # we don't want the product's current data_type, we want what the output will be
        kwargs["data_type"] = dtype_to_str(data_type)
        kwargs["inc_by_one"] = inc_by_one
        rescale_options = self.get_config_options(**kwargs)
        if "method" not in rescale_options:
            LOG.error(
                "No rescaling method found and no default method configured for %s",
                gridded_product["product_name"])
            raise ValueError("No rescaling method configured for %s" %
                             (gridded_product["product_name"], ))
        LOG.debug("Product %s found in rescale config: %r",
                  gridded_product["product_name"], rescale_options)

        min_out, max_out = dtype2range[kwargs["data_type"]]
        rescale_options.setdefault("min_out", min_out)
        rescale_options.setdefault(
            "max_out",
            max_out - 1 if rescale_options["inc_by_one"] else max_out)
        rescale_options.setdefault("units",
                                   gridded_product.get("units", "kelvin"))
        rescale_options["fill_out"] = fill_value
        return rescale_options
Example #5
0
    def create_output_from_product(self, gridded_product, output_pattern=None,
                                   data_type=None, inc_by_one=None, fill_value=0, **kwargs):
        # FIXME: Previous version had -999.0 as the fill value...really?
        grid_def = gridded_product["grid_definition"]
        data_type = data_type or numpy.uint8
        inc_by_one = inc_by_one or False
        band_config_info = self.band_config_reader.get_config_options(
            product_name=gridded_product["product_name"],
            satellite=gridded_product["satellite"],
            instrument=gridded_product["instrument"],
            data_type=gridded_product["data_type"],
            data_kind=gridded_product["data_kind"],
            allow_default=False,
        )
        band_config_info["satellite_id"] = self.sat_config_reader.get_satellite_id(gridded_product)

        if not output_pattern:
            output_pattern = DEFAULT_OUTPUT_PATTERN
        if "{" in output_pattern:
            # format the filename
            of_kwargs = gridded_product.copy(as_dict=True)
            of_kwargs["data_type"] = dtype_to_str(data_type)
            output_filename = self.create_output_filename(output_pattern,
                                                          grid_name=grid_def["grid_name"],
                                                          rows=grid_def["height"],
                                                          columns=grid_def["width"],
                                                          **of_kwargs)
        else:
            output_filename = output_pattern

        if os.path.isfile(output_filename):
            if not self.overwrite_existing:
                LOG.error("NinJo TIFF file already exists: %s", output_filename)
                raise RuntimeError("NinJo TIFF file already exists: %s" % (output_filename,))
            else:
                LOG.warning("NinJo TIFF file already exists, will overwrite: %s", output_filename)

        try:
            LOG.debug("Scaling %s data to fit in ninjotiff...", gridded_product["product_name"])
            data = self.rescaler.rescale_product(gridded_product, data_type,
                                                 inc_by_one=inc_by_one, fill_value=fill_value)

            # Create the geotiff
            save(data, grid_def, output_filename,
                 sat_id=band_config_info["satellite_id"],
                 chan_id=band_config_info["band_id"],
                 data_source=band_config_info["data_source"],
                 data_cat=band_config_info["data_category"],
                 fill_value=fill_value,
                 data_kind=gridded_product["data_kind"],
                 begin_time=gridded_product["begin_time"],
                 product_name=gridded_product["product_name"],
                 )
        except StandardError:
            if not self.keep_intermediate and os.path.isfile(output_filename):
                os.remove(output_filename)
            raise

        return output_filename
Example #6
0
    def save_image(self, img, filename=None, compute=True, dtype=None, fill_value=None, **kwargs):
        filename = filename or self.get_filename(
            data_type=dtype_to_str(dtype), rows=img.data.shape[0], columns=img.data.shape[1], **img.data.attrs
        )

        data = self._prep_data(img.data, dtype, fill_value)

        logger.info("Saving product %s to binary file %s", img.data.attrs["p2g_name"], filename)
        dst = np.memmap(filename, shape=img.data.shape, dtype=dtype, mode="w+")
        if compute:
            da.store(data, dst)
            return
        return [[data], [dst]]
Example #7
0
 def default(self, obj):
     if isinstance(obj, BaseP2GObject):
         mod_str = str(obj.__class__.__module__)
         mod_str = mod_str + "." if mod_str != __name__ else ""
         cls_str = str(obj.__class__.__name__)
         obj = obj.copy(as_dict=True)
         # object should now be a builtin dict
         obj["__class__"] = mod_str + cls_str
         return obj
         # return super(P2GJSONEncoder, self).encode(obj)
     elif isinstance(obj, datetime):
         return obj.isoformat()
     elif numpy.issubclass_(obj, numpy.number):
         return dtype_to_str(obj)
     else:
         return super(P2GJSONEncoder, self).default(obj)
Example #8
0
 def default(self, obj):
     if isinstance(obj, BaseP2GObject):
         mod_str = str(obj.__class__.__module__)
         mod_str = mod_str + "." if mod_str != __name__ else ""
         cls_str = str(obj.__class__.__name__)
         obj = obj.copy(as_dict=True)
         # object should now be a builtin dict
         obj["__class__"] = mod_str + cls_str
         return obj
         # return super(P2GJSONEncoder, self).encode(obj)
     elif isinstance(obj, datetime):
         return obj.isoformat()
     elif numpy.issubclass_(obj, numpy.number):
         return dtype_to_str(obj)
     else:
         return super(P2GJSONEncoder, self).default(obj)
Example #9
0
    def get_rescale_options(self, gridded_product, data_type, inc_by_one=False, fill_value=None):
        all_meta = gridded_product["grid_definition"].copy(as_dict=True)
        all_meta.update(**gridded_product)
        kwargs = dict((k, all_meta.get(k, None)) for k in self.id_fields)
        # we don't want the product's current data_type, we want what the output will be
        kwargs["data_type"] = dtype_to_str(data_type)
        kwargs["inc_by_one"] = inc_by_one
        rescale_options = self.get_config_options(**kwargs)
        if "method" not in rescale_options:
            LOG.error("No rescaling method found and no default method configured for %s", gridded_product["product_name"])
            raise ValueError("No rescaling method configured for %s" % (gridded_product["product_name"],))
        LOG.debug("Product %s found in rescale config: %r", gridded_product["product_name"], rescale_options)

        min_out, max_out = dtype2range[kwargs["data_type"]]
        rescale_options.setdefault("min_out", min_out)
        rescale_options.setdefault("max_out", max_out - 1 if rescale_options["inc_by_one"] else max_out)
        rescale_options.setdefault("units", gridded_product.get("units", "kelvin"))
        rescale_options["fill_out"] = fill_value
        return rescale_options
Example #10
0
    def create_output_filename_old(self, pattern, satellite, instrument,
                                   product_name, grid_name, **kwargs):
        """Helper function that will take common meta data and put it into
        the output filename pattern provided. If either of the keyword arguments
        ``begin_time`` or ``end_time`` are not specified the other is used
        in its place.  If neither are specified the current time in UTC is
        taken.

        Some arguments are handled in special ways:
            - begin_time : begin_time is converted into 5 different strings
                that can each be individually specified in the pattern:
                    * begin_time     : YYYYMMDD_HHMMSS
                    * begin_YYYYMMDD : YYYYMMDD
                    * begin_YYMMDD   : YYMMDD
                    * begin_HHMMSS   : HHMMSS
                    * begin_HHMM     : HHMM
            - end_time   : Same as begin_time

        If a keyword is provided that is not recognized it will be provided
        to the pattern after running through a `str` filter.

        Possible pattern keywords (\*created internally in this function):
            - satellite       : identifier for the instrument's satellite
            - instrument      : name of the instrument
            - product_name    : name of the product in the output
            - data_kind       : kind of data (brightness temperature, radiance, reflectance, etc.)
            - data_type       : data type name of data in-memory (ex. uint1, int4, real4)
            - grid_name       : name of the grid the data was mapped to
            - columns         : number of columns in the data
            - rows            : number of rows in the data
            - begin_time      : begin time of the first scan (YYYYMMDD_HHMMSS)
            - begin_YYYYMMDD\* : begin date of the first scan
            - begin_YYMMDD\*   : begin date of the first scan
            - begin_HHMMSS\*   : begin time of the first scan
            - begin_HHMM\*     : begin time of the first scan
            - end_time        : end time of the first scan. Same keywords as start_time.

        >>> from datetime import datetime
        >>> pattern = "%(satellite)s_%(instrument)s_%(product_name)s_%(data_kind)s_%(grid_name)s_%(start_time)s.%(data_type)s.%(columns)s.%(rows)s"
        >>> class FakeBackend(BackendRole):
        ...     def create_output_from_product(self, gridded_product, **kwargs): pass
        ...     @property
        ...     def known_grids(self): return None
        >>> backend = FakeBackend()
        >>> filename = backend.create_output_filename_old(pattern,
        ...     "npp",
        ...     "viirs",
        ...     "i04",
        ...     data_kind="btemp",
        ...     grid_name="wgs84_fit",
        ...     data_type="uint1",
        ...     columns = 2500, rows=3000, begin_time=datetime(2012, 11, 10, 9, 8, 7))
        >>> print filename
        npp_viirs_i04_btemp_wgs84_fit_20121110_090807.uint1.2500.3000

        """
        # Keyword arguments
        data_type = kwargs.pop("data_type", None)
        data_kind = kwargs.pop("data_kind", None)
        columns = kwargs.pop("columns", None)
        rows = kwargs.pop("rows", None)
        begin_time_dt = kwargs.pop("begin_time", None)
        end_time_dt = kwargs.pop("end_time", None)

        if data_type and not isinstance(data_type, (str, unicode)):
            data_type = dtype_to_str(data_type)

        # Convert begin time and end time
        if begin_time_dt is None and end_time_dt is None:
            begin_time_dt = end_time_dt = datetime.utc_now()
        elif begin_time_dt is None:
            begin_time_dt = end_time_dt
        elif end_time_dt is None:
            end_time_dt = begin_time_dt

        begin_time = begin_time_dt.strftime("%Y%m%d_%H%M%S")
        begin_YYYYMMDD = begin_time_dt.strftime("%Y%m%d")
        begin_YYMMDD = begin_time_dt.strftime("%y%m%d")
        begin_HHMMSS = begin_time_dt.strftime("%H%M%S")
        begin_HHMM = begin_time_dt.strftime("%H%M")
        end_time = end_time_dt.strftime("%Y%m%d_%H%M%S")
        end_YYYYMMDD = end_time_dt.strftime("%Y%m%d")
        end_YYMMDD = end_time_dt.strftime("%y%m%d")
        end_HHMMSS = end_time_dt.strftime("%H%M%S")
        end_HHMM = end_time_dt.strftime("%H%M")

        try:
            output_filename = pattern % dict(satellite=satellite,
                                             instrument=instrument,
                                             product_name=product_name,
                                             data_kind=data_kind,
                                             data_type=data_type,
                                             grid_name=grid_name,
                                             columns=columns,
                                             rows=rows,
                                             begin_time=begin_time,
                                             begin_YYYYMMDD=begin_YYYYMMDD,
                                             begin_YYMMDD=begin_YYMMDD,
                                             begin_HHMMSS=begin_HHMMSS,
                                             begin_HHMM=begin_HHMM,
                                             end_time=end_time,
                                             end_YYYYMMDD=end_YYYYMMDD,
                                             end_YYMMDD=end_YYMMDD,
                                             end_HHMMSS=end_HHMMSS,
                                             end_HHMM=end_HHMM,
                                             **kwargs)
        except KeyError as e:
            LOG.error("Unknown output pattern key: '%s'" % (e.message, ))
            raise

        return output_filename
Example #11
0
    def create_output_from_product(self,
                                   gridded_product,
                                   output_pattern=None,
                                   data_type=None,
                                   inc_by_one=None,
                                   fill_value=-999.0,
                                   **kwargs):
        # FIXME: Previous version had -999.0 as the fill value...really?
        grid_def = gridded_product["grid_definition"]
        grid_name = grid_def["grid_name"]
        data_type = data_type or numpy.uint8
        inc_by_one = inc_by_one or False
        grid_config_info = self.grid_config_reader.get_config_options(
            grid_name=grid_name, allow_default=False)
        band_config_info = self.band_config_reader.get_config_options(
            product_name=gridded_product["product_name"],
            satellite=gridded_product["satellite"],
            instrument=gridded_product["instrument"],
            data_type=gridded_product["data_type"],
            data_kind=gridded_product["data_kind"],
            allow_default=False,
        )
        band_config_info[
            "satellite_id"] = self.sat_config_reader.get_satellite_id(
                gridded_product)

        if not output_pattern:
            output_pattern = DEFAULT_OUTPUT_PATTERN
        if "{" in output_pattern:
            # format the filename
            of_kwargs = gridded_product.copy(as_dict=True)
            of_kwargs["data_type"] = dtype_to_str(data_type)
            output_filename = self.create_output_filename(
                output_pattern,
                grid_name=grid_def["grid_name"],
                rows=grid_def["height"],
                columns=grid_def["width"],
                **of_kwargs)
        else:
            output_filename = output_pattern

        if os.path.isfile(output_filename):
            if not self.overwrite_existing:
                LOG.error("NinJo TIFF file already exists: %s",
                          output_filename)
                raise RuntimeError("NinJo TIFF file already exists: %s" %
                                   (output_filename, ))
            else:
                LOG.warning(
                    "NinJo TIFF file already exists, will overwrite: %s",
                    output_filename)

        try:
            LOG.debug("Extracting additional information from grid projection")
            map_origin_lon, map_origin_lat = grid_def.lonlat_upperleft
            proj_dict = grid_def.proj4_dict
            equ_radius = proj_dict["a"]
            pol_radius = proj_dict["b"]
            central_meridian = proj_dict.get("lon_0", None)
            ref_lat1 = proj_dict.get("lat_ts", None)

            LOG.debug("Scaling %s data to fit in ninjotiff...",
                      gridded_product["product_name"])
            data = self.rescaler.rescale_product(gridded_product,
                                                 data_type,
                                                 inc_by_one=inc_by_one,
                                                 fill_value=fill_value)

            # Create the geotiff
            create_ninjo_tiff(data,
                              output_filename,
                              pixel_xres=grid_config_info["xres"],
                              pixel_yres=grid_config_info["yres"],
                              projection=grid_config_info["projection"],
                              origin_lat=map_origin_lat,
                              origin_lon=map_origin_lon,
                              radius_a=equ_radius,
                              radius_b=pol_radius,
                              central_meridian=central_meridian,
                              ref_lat1=ref_lat1,
                              is_calibrated=1,
                              sat_id=band_config_info["satellite_id"],
                              chan_id=band_config_info["band_id"],
                              data_source=band_config_info["data_source"],
                              data_cat=band_config_info["data_category"],
                              image_dt=gridded_product["begin_time"],
                              data_kind=gridded_product["data_kind"])
        except StandardError:
            if not self.keep_intermediate and os.path.isfile(output_filename):
                os.remove(output_filename)
            raise

        return output_filename
Example #12
0
    def rescale_product(self, gridded_product, data_type, inc_by_one=False, fill_value=None):
        """Rescale a gridded product based on how the rescaler is configured.

        The caller should know if it wants to increment the output data by 1 (`inc_by_one` keyword).

        :param data_type: Desired data type of the output data
        :param inc_by_one: After rescaling should 1 be added to all data values to leave the minumum value as the fill

        FUTURE: dec_by_one (mutually exclusive to inc_by_one)

        """
        all_meta = gridded_product["grid_definition"].copy(as_dict=True)
        all_meta.update(**gridded_product)
        kwargs = dict((k, all_meta.get(k, None)) for k in self.id_fields)
        # we don't want the product's current data_type, we want what the output will be
        kwargs["data_type"] = dtype_to_str(data_type)
        kwargs["inc_by_one"] = inc_by_one
        rescale_options = self.get_config_options(**kwargs)
        inc_by_one = rescale_options.pop("inc_by_one")
        if "method" not in rescale_options:
            LOG.error(
                "No rescaling method found and no default method configured for %s", gridded_product["product_name"]
            )
            raise ValueError("No rescaling method configured for %s" % (gridded_product["product_name"],))
        LOG.debug("Product %s found in rescale config: %r", gridded_product["product_name"], rescale_options)

        method = rescale_options.pop("method")
        # if the configuration file didn't force these then provide a logical default
        clip = rescale_options.pop("clip", True)
        min_out, max_out = dtype2range[kwargs["data_type"]]
        rescale_options.setdefault("min_out", min_out)
        rescale_options.setdefault("max_out", max_out - 1 if inc_by_one else max_out)
        rescale_options.setdefault("units", gridded_product.get("units", "kelvin"))
        rescale_options["fill_out"] = fill_value

        data = gridded_product.copy_array(read_only=False)
        good_data_mask = ~gridded_product.get_data_mask()
        if rescale_options.get("separate_rgb", True) and data.ndim == 3:
            data = numpy.concatenate(
                (
                    [
                        self._rescale_data(
                            method,
                            data[0],
                            good_data_mask[0],
                            rescale_options,
                            fill_value,
                            clip=clip,
                            inc_by_one=inc_by_one,
                        )
                    ],
                    [
                        self._rescale_data(
                            method,
                            data[1],
                            good_data_mask[1],
                            rescale_options,
                            fill_value,
                            clip=clip,
                            inc_by_one=inc_by_one,
                        )
                    ],
                    [
                        self._rescale_data(
                            method,
                            data[2],
                            good_data_mask[2],
                            rescale_options,
                            fill_value,
                            clip=clip,
                            inc_by_one=inc_by_one,
                        )
                    ],
                )
            )
        else:
            data = self._rescale_data(
                method, data, good_data_mask, rescale_options, fill_value, clip=clip, inc_by_one=inc_by_one
            )

        log_level = logging.getLogger("").handlers[0].level or 0
        # Only perform this calculation if it will be shown, its very time consuming
        if log_level <= logging.DEBUG:
            try:
                # assumes NaN fill value
                LOG.debug("Data min: %f, max: %f" % (numpy.nanmin(data), numpy.nanmax(data)))
            except StandardError:
                LOG.debug("Couldn't get min/max values for %s (all fill data?)", gridded_product["product_name"])

        return data
Example #13
0
    def create_output_filename_old(self, pattern, satellite, instrument, product_name, grid_name, **kwargs):
        """Helper function that will take common meta data and put it into
        the output filename pattern provided. If either of the keyword arguments
        ``begin_time`` or ``end_time`` are not specified the other is used
        in its place.  If neither are specified the current time in UTC is
        taken.

        Some arguments are handled in special ways:
            - begin_time : begin_time is converted into 5 different strings
                that can each be individually specified in the pattern:
                    * begin_time     : YYYYMMDD_HHMMSS
                    * begin_YYYYMMDD : YYYYMMDD
                    * begin_YYMMDD   : YYMMDD
                    * begin_HHMMSS   : HHMMSS
                    * begin_HHMM     : HHMM
            - end_time   : Same as begin_time

        If a keyword is provided that is not recognized it will be provided
        to the pattern after running through a `str` filter.

        Possible pattern keywords:
            - satellite       : identifier for the instrument's satellite
            - instrument      : name of the instrument
            - product_name    : name of the product in the output
            - data_kind       : kind of data (brightness temperature, radiance, reflectance, etc.)
            - data_type       : data type name of data in-memory (ex. uint1, int4, real4)
            - grid_name       : name of the grid the data was mapped to
            - columns         : number of columns in the data
            - rows            : number of rows in the data
            - begin_time      : begin time of the first scan (YYYYMMDD_HHMMSS)
            - begin_YYYYMMDD  : begin date of the first scan
            - begin_YYMMDD    : begin date of the first scan
            - begin_HHMMSS    : begin time of the first scan
            - begin_HHMM      : begin time of the first scan
            - end_time        : end time of the first scan. Same keywords as start_time.

        >>> from datetime import datetime
        >>> pattern = "%(satellite)s_%(instrument)s_%(product_name)s_%(data_kind)s_%(grid_name)s_%(start_time)s.%(data_type)s.%(columns)s.%(rows)s"
        >>> class FakeBackend(BackendRole):
        ...     def create_output_from_product(self, gridded_product, **kwargs): pass
        ...     @property
        ...     def known_grids(self): return None
        >>> backend = FakeBackend()
        >>> filename = backend.create_output_filename_old(pattern,
        ...     "npp",
        ...     "viirs",
        ...     "i04",
        ...     data_kind="btemp",
        ...     grid_name="wgs84_fit",
        ...     data_type="uint1",
        ...     columns = 2500, rows=3000, begin_time=datetime(2012, 11, 10, 9, 8, 7))
        >>> print(filename)
        npp_viirs_i04_btemp_wgs84_fit_20121110_090807.uint1.2500.3000

        """
        # Keyword arguments
        data_type = kwargs.pop("data_type", None)
        data_kind = kwargs.pop("data_kind", None)
        columns = kwargs.pop("columns", None)
        rows = kwargs.pop("rows", None)
        begin_time_dt = kwargs.pop("begin_time", None)
        end_time_dt = kwargs.pop("end_time", None)

        if data_type and not isinstance(data_type, str):
            data_type = dtype_to_str(data_type)

        # Convert begin time and end time
        if begin_time_dt is None and end_time_dt is None:
            begin_time_dt = end_time_dt = datetime.utcnow()
        elif begin_time_dt is None:
            begin_time_dt = end_time_dt
        elif end_time_dt is None:
            end_time_dt   = begin_time_dt

        begin_time = begin_time_dt.strftime("%Y%m%d_%H%M%S")
        begin_YYYYMMDD = begin_time_dt.strftime("%Y%m%d")
        begin_YYMMDD = begin_time_dt.strftime("%y%m%d")
        begin_HHMMSS = begin_time_dt.strftime("%H%M%S")
        begin_HHMM = begin_time_dt.strftime("%H%M")
        end_time = end_time_dt.strftime("%Y%m%d_%H%M%S")
        end_YYYYMMDD = end_time_dt.strftime("%Y%m%d")
        end_YYMMDD = end_time_dt.strftime("%y%m%d")
        end_HHMMSS = end_time_dt.strftime("%H%M%S")
        end_HHMM = end_time_dt.strftime("%H%M")

        try:
            output_filename = pattern % dict(
                satellite=satellite,
                instrument=instrument,
                product_name=product_name,
                data_kind=data_kind,
                data_type=data_type,
                grid_name=grid_name,
                columns=columns,
                rows=rows,
                begin_time=begin_time,
                begin_YYYYMMDD=begin_YYYYMMDD,
                begin_YYMMDD=begin_YYMMDD,
                begin_HHMMSS=begin_HHMMSS,
                begin_HHMM=begin_HHMM,
                end_time=end_time,
                end_YYYYMMDD=end_YYYYMMDD,
                end_YYMMDD=end_YYMMDD,
                end_HHMMSS=end_HHMMSS,
                end_HHMM=end_HHMM,
                **kwargs
            )
        except KeyError as e:
            LOG.error("Unknown output pattern key: '%s'" % (str(e),))
            raise

        return output_filename
Example #14
0
    def create_output_from_product(self, gridded_product, output_pattern=None,
                                   data_type=None, inc_by_one=None, fill_value=-999.0, **kwargs):
        # FIXME: Previous version had -999.0 as the fill value...really?
        grid_def = gridded_product["grid_definition"]
        grid_name = grid_def["grid_name"]
        data_type = data_type or numpy.uint8
        inc_by_one = inc_by_one or False
        grid_config_info = self.grid_config_reader.get_config_options(grid_name=grid_name, allow_default=False)
        band_config_info = self.band_config_reader.get_config_options(
            product_name=gridded_product["product_name"],
            satellite=gridded_product["satellite"],
            instrument=gridded_product["instrument"],
            data_type=gridded_product["data_type"],
            data_kind=gridded_product["data_kind"],
            allow_default=False,
        )
        band_config_info["satellite_id"] = self.sat_config_reader.get_satellite_id(gridded_product)

        if not output_pattern:
            output_pattern = DEFAULT_OUTPUT_PATTERN
        if "{" in output_pattern:
            # format the filename
            of_kwargs = gridded_product.copy(as_dict=True)
            of_kwargs["data_type"] = dtype_to_str(data_type)
            output_filename = self.create_output_filename(output_pattern,
                                                          grid_name=grid_def["grid_name"],
                                                          rows=grid_def["height"],
                                                          columns=grid_def["width"],
                                                          **of_kwargs)
        else:
            output_filename = output_pattern

        if os.path.isfile(output_filename):
            if not self.overwrite_existing:
                LOG.error("NinJo TIFF file already exists: %s", output_filename)
                raise RuntimeError("NinJo TIFF file already exists: %s" % (output_filename,))
            else:
                LOG.warning("NinJo TIFF file already exists, will overwrite: %s", output_filename)

        try:
            LOG.debug("Extracting additional information from grid projection")
            map_origin_lon, map_origin_lat = grid_def.lonlat_upperleft
            proj_dict = grid_def.proj4_dict
            equ_radius = proj_dict["a"]
            pol_radius = proj_dict["b"]
            central_meridian = proj_dict.get("lon_0", None)
            ref_lat1 = proj_dict.get("lat_ts", None)

            LOG.debug("Scaling %s data to fit in ninjotiff...", gridded_product["product_name"])
            data = self.rescaler.rescale_product(gridded_product, data_type,
                                                 inc_by_one=inc_by_one, fill_value=fill_value)

            # Create the geotiff
            create_ninjo_tiff(data, output_filename,
                              pixel_xres=grid_config_info["xres"],
                              pixel_yres=grid_config_info["yres"],
                              projection=grid_config_info["projection"],
                              origin_lat=map_origin_lat,
                              origin_lon=map_origin_lon,
                              radius_a=equ_radius,
                              radius_b=pol_radius,
                              central_meridian=central_meridian,
                              ref_lat1=ref_lat1,
                              is_calibrated=1,
                              sat_id=band_config_info["satellite_id"],
                              chan_id=band_config_info["band_id"],
                              data_source=band_config_info["data_source"],
                              data_cat=band_config_info["data_category"],
                              image_dt=gridded_product["begin_time"],
                              data_kind=gridded_product["data_kind"]
                              )
        except StandardError:
            if not self.keep_intermediate and os.path.isfile(output_filename):
                os.remove(output_filename)
            raise

        return output_filename
Example #15
0
    def rescale_product(self,
                        gridded_product,
                        data_type,
                        inc_by_one=False,
                        fill_value=None):
        """Rescale a gridded product based on how the rescaler is configured.

        The caller should know if it wants to increment the output data by 1 (`inc_by_one` keyword).

        :param data_type: Desired data type of the output data
        :param inc_by_one: After rescaling should 1 be added to all data values to leave the minumum value as the fill

        FUTURE: dec_by_one (mutually exclusive to inc_by_one)

        """
        all_meta = gridded_product["grid_definition"].copy(as_dict=True)
        all_meta.update(**gridded_product)
        kwargs = dict((k, all_meta.get(k, None)) for k in self.id_fields)
        # we don't want the product's current data_type, we want what the output will be
        kwargs["data_type"] = dtype_to_str(data_type)
        kwargs["inc_by_one"] = inc_by_one
        rescale_options = self.get_config_options(**kwargs)
        inc_by_one = rescale_options.pop("inc_by_one")
        if "method" not in rescale_options:
            LOG.error(
                "No rescaling method found and no default method configured for %s",
                gridded_product["product_name"])
            raise ValueError("No rescaling method configured for %s" %
                             (gridded_product["product_name"], ))
        LOG.debug("Product %s found in rescale config: %r",
                  gridded_product["product_name"], rescale_options)

        method = rescale_options.pop("method")
        # if the configuration file didn't force these then provide a logical default
        clip = rescale_options.pop("clip", True)
        min_out, max_out = dtype2range[kwargs["data_type"]]
        rescale_options.setdefault("min_out", min_out)
        rescale_options.setdefault("max_out",
                                   max_out - 1 if inc_by_one else max_out)
        rescale_options.setdefault("units",
                                   gridded_product.get("units", "kelvin"))
        rescale_options["fill_out"] = fill_value

        data = gridded_product.copy_array(read_only=False)
        good_data_mask = ~gridded_product.get_data_mask()
        if rescale_options.get("separate_rgb", True) and data.ndim == 3:
            data = numpy.concatenate((
                [
                    self._rescale_data(method,
                                       data[0],
                                       good_data_mask[0],
                                       rescale_options,
                                       fill_value,
                                       clip=clip,
                                       inc_by_one=inc_by_one)
                ],
                [
                    self._rescale_data(method,
                                       data[1],
                                       good_data_mask[1],
                                       rescale_options,
                                       fill_value,
                                       clip=clip,
                                       inc_by_one=inc_by_one)
                ],
                [
                    self._rescale_data(method,
                                       data[2],
                                       good_data_mask[2],
                                       rescale_options,
                                       fill_value,
                                       clip=clip,
                                       inc_by_one=inc_by_one)
                ],
            ))
        else:
            data = self._rescale_data(method,
                                      data,
                                      good_data_mask,
                                      rescale_options,
                                      fill_value,
                                      clip=clip,
                                      inc_by_one=inc_by_one)

        log_level = logging.getLogger('').handlers[0].level or 0
        # Only perform this calculation if it will be shown, its very time consuming
        if log_level <= logging.DEBUG:
            try:
                # assumes NaN fill value
                LOG.debug("Data min: %f, max: %f" %
                          (numpy.nanmin(data), numpy.nanmax(data)))
            except StandardError:
                LOG.debug(
                    "Couldn't get min/max values for %s (all fill data?)",
                    gridded_product["product_name"])

        return data