def create_swath_definition(self, lon_product, lat_product): product_def = PRODUCTS[lon_product["product_name"]] lon_file_reader = self.file_readers[product_def.file_type] product_def = PRODUCTS[lat_product["product_name"]] lat_file_reader = self.file_readers[product_def.file_type] # sanity check for k in ["data_type", "swath_rows", "swath_columns", "rows_per_scan", "fill_value"]: if lon_product[k] != lat_product[k]: if k == "fill_value" and numpy.isnan(lon_product[k]) and numpy.isnan(lat_product[k]): # NaN special case: NaNs can't be compared normally continue LOG.error("Longitude and latitude products do not have equal attributes: %s", k) raise RuntimeError("Longitude and latitude products do not have equal attributes: %s" % (k,)) swath_name = lon_product["product_name"] + "_" + lat_product["product_name"] swath_definition = containers.SwathDefinition( swath_name=swath_name, longitude=lon_product["swath_data"], latitude=lat_product["swath_data"], data_type=lon_product["data_type"], swath_rows=lon_product["swath_rows"], swath_columns=lon_product["swath_columns"], rows_per_scan=lon_product["rows_per_scan"], source_filenames=sorted(set(lon_file_reader.filepaths + lat_file_reader.filepaths)), nadir_resolution=lon_file_reader.nadir_resolution, limb_resolution=lat_file_reader.limb_resolution, fill_value=lon_product["fill_value"], ) # Tell the lat and lon products not to delete the data arrays, the swath definition will handle that lon_product.set_persist() lat_product.set_persist() # mmmmm, almost circular lon_product["swath_definition"] = swath_definition lat_product["swath_definition"] = swath_definition return swath_definition
def create_swath_definition(self, lon_product, lat_product): index = None if lon_product in [ viirs_module.PRODUCT_I_LON, viirs_module.PRODUCT_M_LON ]: index = 0 if self.use_terrain_corrected else 1 product_def = PRODUCTS[lon_product["product_name"]] file_type = product_def.get_file_type(self.available_file_types, index=index) lon_file_reader = self.file_readers[file_type] product_def = PRODUCTS[lat_product["product_name"]] file_type = product_def.get_file_type(self.available_file_types, index=index) lat_file_reader = self.file_readers[file_type] # sanity check for k in [ "data_type", "swath_rows", "swath_columns", "rows_per_scan", "fill_value" ]: if lon_product[k] != lat_product[k]: if k == "fill_value" and numpy.isnan( lon_product[k]) and numpy.isnan(lat_product[k]): # NaN special case: NaNs can't be compared normally continue LOG.error( "Longitude and latitude products do not have equal attributes: %s", k) raise RuntimeError( "Longitude and latitude products do not have equal attributes: %s" % (k, )) swath_name = GEO_PAIRS[product_def.get_geo_pair_name( self.available_file_types)].name swath_definition = containers.SwathDefinition( swath_name=swath_name, longitude=lon_product["swath_data"], latitude=lat_product["swath_data"], data_type=lon_product["data_type"], swath_rows=lon_product["swath_rows"], swath_columns=lon_product["swath_columns"], rows_per_scan=lon_product["rows_per_scan"], source_filenames=sorted( set(lon_file_reader.filepaths + lat_file_reader.filepaths)), # nadir_resolution=lon_file_reader.nadir_resolution, limb_resolution=lat_file_reader.limb_resolution, fill_value=lon_product["fill_value"], ) # Tell the lat and lon products not to delete the data arrays, the swath definition will handle that lon_product.set_persist() lat_product.set_persist() # mmmmm, almost circular lon_product["swath_definition"] = swath_definition lat_product["swath_definition"] = swath_definition return swath_definition
def area_to_swath_def(area, overwrite_existing=False): lons = area.lons lats = area.lats name = area.name name = name.replace(":", "") if lons.ndim == 1: rows, cols = lons.shape[0], 1 else: rows, cols = lons.shape info = { "swath_name": name, "longitude": name + "_lon.dat", "latitude": name + "_lat.dat", "swath_rows": rows, "swath_columns": cols, "data_type": lons.dtype, "fill_value": np.nan, } if hasattr(area, "info"): info.update(area.info) # Write lons to disk filename = info["longitude"] if os.path.isfile(filename): if not overwrite_existing: LOG.error("Binary file already exists: %s" % (filename, )) raise RuntimeError("Binary file already exists: %s" % (filename, )) else: LOG.warning("Binary file already exists, will overwrite: %s", filename) lon_arr = np.memmap(filename, mode="w+", dtype=lons.dtype, shape=lons.shape) lon_arr[:] = lons.data lon_arr[lons.mask] = np.nan # Write lats to disk filename = info["latitude"] if os.path.isfile(filename): if not overwrite_existing: LOG.error("Binary file already exists: %s" % (filename, )) raise RuntimeError("Binary file already exists: %s" % (filename, )) else: LOG.warning("Binary file already exists, will overwrite: %s", filename) lat_arr = np.memmap(filename, mode="w+", dtype=lats.dtype, shape=lats.shape) lat_arr[:] = lats.data lat_arr[lats.mask] = np.nan return containers.SwathDefinition(**info)
def area_to_swath_def(area, chunks=4096, overwrite_existing=False): if hasattr(area, 'lons') and area.lons is not None: lons = area.lons lats = area.lats else: lons, lats = area.get_lonlats(chunks=chunks) # get dask array underneath if isinstance(lons, xr.DataArray): lons = lons.data lats = lats.data name = area.name name = name.replace(":", "") if lons.ndim == 1: rows, cols = lons.shape[0], 1 else: rows, cols = lons.shape info = { "swath_name": name, "longitude": name + "_lon.dat", "latitude": name + "_lat.dat", "swath_rows": rows, "swath_columns": cols, "data_type": lons.dtype, "fill_value": np.nan, } if hasattr(area, "attrs"): info.update(area.attrs) # Write lons to disk filename = info["longitude"] if os.path.isfile(filename): if not overwrite_existing: LOG.error("Binary file already exists: %s" % (filename, )) raise RuntimeError("Binary file already exists: %s" % (filename, )) else: LOG.warning("Binary file already exists, will overwrite: %s", filename) LOG.info("Writing longitude data to disk cache...") lon_arr = np.memmap(filename, mode="w+", dtype=lons.dtype, shape=lons.shape) da.store(lons, lon_arr) # Write lats to disk filename = info["latitude"] if os.path.isfile(filename): if not overwrite_existing: LOG.error("Binary file already exists: %s" % (filename, )) raise RuntimeError("Binary file already exists: %s" % (filename, )) else: LOG.warning("Binary file already exists, will overwrite: %s", filename) LOG.info("Writing latitude data to disk cache...") lat_arr = np.memmap(filename, mode="w+", dtype=lats.dtype, shape=lats.shape) da.store(lats, lat_arr) return containers.SwathDefinition(**info)