def test_def2yaml_converter(self): from pyresample import utils import tempfile def_file = os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg') filehandle, yaml_file = tempfile.mkstemp() os.close(filehandle) try: utils.convert_def_to_yaml(def_file, yaml_file) areas_new = set(utils.parse_area_file(yaml_file)) areas_old = set(utils.parse_area_file(def_file)) self.assertEqual(areas_new, areas_old) finally: os.remove(yaml_file)
def get_area_def(area_name): """Get the definition of *area_name* from file. The file is defined to use is to be placed in the $PPP_CONFIG_DIR directory, and its name is defined in satpy's configuration file. """ from pyresample.utils import parse_area_file return parse_area_file(get_area_file(), area_name)[0]
def _get_legacy_and_yaml_areas( grid_configs: list[str, ...]) -> tuple[GridManager, dict[str, AreaDefinition]]: if "grids.conf" in grid_configs: logger.debug( "Replacing 'grids.conf' with builtin YAML grid configuration file." ) grid_configs[grid_configs.index("grids.conf")] = GRIDS_YAML_FILEPATH if not grid_configs: grid_configs = [GRIDS_YAML_FILEPATH] p2g_grid_configs = [x for x in grid_configs if x.endswith(".conf")] pyresample_area_configs = [ x for x in grid_configs if not x.endswith(".conf") ] if p2g_grid_configs: grid_manager = GridManager(*p2g_grid_configs) else: grid_manager = {} if pyresample_area_configs: yaml_areas = parse_area_file(pyresample_area_configs) yaml_areas = {x.area_id: x for x in yaml_areas} else: yaml_areas = {} return grid_manager, yaml_areas
def test_area_parser_legacy(self): """Test legacy area parser.""" from pyresample import utils ease_nh, ease_sh = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ease_nh', 'ease_sh') nh_str = """Area ID: ease_nh Description: Arctic EASE grid Projection ID: ease_nh Projection: {'a': '6371228.0', 'lat_0': '90.0', 'lon_0': '0.0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""" self.assertEqual(ease_nh.__str__(), nh_str) self.assertIsInstance(ease_nh.proj_dict['lat_0'], float) sh_str = """Area ID: ease_sh Description: Antarctic EASE grid Projection ID: ease_sh Projection: {'a': '6371228.0', 'lat_0': '-90.0', 'lon_0': '0.0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""" self.assertEqual(ease_sh.__str__(), sh_str) self.assertIsInstance(ease_sh.proj_dict['lat_0'], float)
def read_config(filename): """Read the config file *filename* and replace the values in global variables. """ cfg = ConfigParser() cfg.read(filename) station = cfg.get("default", "station") satellites = cfg.get("default", "satellites").split(",") forward = cfg.getint("default", "forward") start = cfg.getfloat("default", "start") station_name = cfg.get(station, "name") station_lon = cfg.getfloat(station, "longitude") station_lat = cfg.getfloat(station, "latitude") station_alt = cfg.getfloat(station, "altitude") sat_scores = {} for sat in satellites: sat_scores[sat] = (cfg.getfloat(sat, "night"), cfg.getfloat(sat, "day")) area = utils.parse_area_file(cfg.get(station, "area_file"), cfg.get(station, "area"))[0] return ((station_lon, station_lat, station_alt), sat_scores, station_name, area, forward, start)
def test_area_def2basemap(self): area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ease_sh')[0] bmap = plot.area_def2basemap(area_def) self.assertTrue(bmap.rmajor == bmap.rminor and bmap.rmajor == 6371228.0, 'Failed to create Basemap object')
def test_area_def2basemap(self): area_def = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ease_sh')[0] bmap = plot.area_def2basemap(area_def) self.assertTrue( bmap.rmajor == bmap.rminor and bmap.rmajor == 6371228.0, 'Failed to create Basemap object')
def test_orthoplot(self): area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ortho')[0] swath_def = geometry.SwathDefinition(self.lons, self.lats) result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def, radius_of_influence=20000, fill_value=None) plt = plot._get_quicklook(area_def, result)
def test_plate_carreeplot(self): area_def = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'pc_world')[0] swath_def = geometry.SwathDefinition(self.lons, self.lats) result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def, radius_of_influence=20000, fill_value=None) plt = plot._get_quicklook(area_def, result, num_meridians=0, num_parallels=0)
def test_orthoplot(self): area_def = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ortho')[0] swath_def = geometry.SwathDefinition(self.lons, self.lats) result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def, radius_of_influence=20000, fill_value=None) plt = plot._get_quicklook(area_def, result)
def test_plate_carreeplot(self): area_def = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'pc_world')[0] swath_def = geometry.SwathDefinition(self.lons, self.lats) result = kd_tree.resample_nearest(swath_def, self.tb37v, area_def, radius_of_influence=20000, fill_value=None) plt = plot._get_quicklook(area_def, result, num_meridians=0, num_parallels=0)
def __init__(self, station_id, name, longitude, latitude, altitude, area, satellites, area_file=None): super(Station, self).__init__() self.id = station_id self.name = name self.longitude = longitude self.latitude = latitude self.altitude = altitude self.area = area self.satellites = satellites if area_file is not None: try: self.area = resample_utils.parse_area_file(area_file, area)[0] except TypeError: pass
def _get_legacy_and_custom_areas(grid_configs): p2g_grid_configs = [x for x in grid_configs if x.endswith('.conf')] pyresample_area_configs = [x for x in grid_configs if not x.endswith('.conf')] if not grid_configs or p2g_grid_configs: # if we were given p2g grid configs or we weren't given any to choose from from polar2grid.grids import GridManager grid_manager = GridManager(*p2g_grid_configs) else: grid_manager = {} if pyresample_area_configs: from pyresample.utils import parse_area_file custom_areas = parse_area_file(pyresample_area_configs) custom_areas = {x.area_id: x for x in custom_areas} else: custom_areas = {} return grid_manager, custom_areas
def _remap_a_score_on_an_area(self, plot_area_name='npole', vmin=0.0, vmax=1.0, score='Kuipers'): from pyresample import image, geometry area_def = utils.parse_area_file( 'reshaped_files_plotting/region_config_test.cfg', plot_area_name)[0] data = getattr(self, score) data = data.copy() if np.ma.is_masked(data): data[np.logical_and(np.equal(data.mask,False),data>vmax)]=vmax data[np.logical_and(np.equal(data.mask,False),data<vmin)]=vmin #do not wan't low ex hitrates set to nodata! else: data[data>vmax]=vmax data[data<vmin]=vmin #lons = np.ma.masked_array(self.lons, mask=data.mask) #lats = np.ma.masked_array(self.lats, mask=data.mask) lons = self.lons lats = self.lats swath_def = geometry.SwathDefinition(lons=lons, lats=lats) swath_con = image.ImageContainerNearest( data, swath_def, radius_of_influence=self.radius_km*1000*2.5, epsilon=1.0) area_con = swath_con.resample(area_def) result = area_con.image_data #pr.plot.show_quicklook(area_def, result, # vmin=vmin, vmax=vmax, label=score) if "FAR" in score: matplotlib.rcParams['image.cmap']= "BrBG" elif "diff" in score: matplotlib.rcParams['image.cmap']= "BrBG" elif "mae" in score: matplotlib.rcParams['image.cmap']= "Reds" else: matplotlib.rcParams['image.cmap']= "BrBG" plot_label =score if "mae" in score: plot_label ="" pr.plot.save_quicklook(self.PLOT_DIR_SCORE + self.PLOT_FILENAME_START+ plot_area_name +'.png', area_def, result, vmin=vmin, vmax=vmax, label=plot_label)
def test_multiple_file_content(self): from pyresample import utils area_list = [ """ease_sh: description: Antarctic EASE grid projection: a: 6371228.0 units: m lon_0: 0 proj: laea lat_0: -90 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m """, """ease_sh2: description: Antarctic EASE grid projection: a: 6371228.0 units: m lon_0: 0 proj: laea lat_0: -90 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m """ ] results = utils.parse_area_file(area_list) self.assertEqual(len(results), 2) self.assertIn(results[0].area_id, ('ease_sh', 'ease_sh2')) self.assertIn(results[1].area_id, ('ease_sh', 'ease_sh2'))
def test_area_parser(self): ease_nh, ease_sh = utils.parse_area_file(os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ease_nh', 'ease_sh') nh_found = (ease_nh.__str__() =="""Area ID: ease_nh Name: Arctic EASE grid Projection ID: ease_nh Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""") sh_found = (ease_sh.__str__() =="""Area ID: ease_sh Name: Antarctic EASE grid Projection ID: ease_sh Projection: {'a': '6371228.0', 'lat_0': '-90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""") self.assertTrue(nh_found and sh_found, msg='Failed to parse areas correctly')
def test_area_parser(self): ease_nh, ease_sh = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg'), 'ease_nh', 'ease_sh') nh_found = (ease_nh.__str__() == """Area ID: ease_nh Name: Arctic EASE grid Projection ID: ease_nh Projection: {'a': '6371228.0', 'lat_0': '90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""") sh_found = (ease_sh.__str__() == """Area ID: ease_sh Name: Antarctic EASE grid Projection ID: ease_sh Projection: {'a': '6371228.0', 'lat_0': '-90', 'lon_0': '0', 'proj': 'laea', 'units': 'm'} Number of columns: 425 Number of rows: 425 Area extent: (-5326849.0625, -5326849.0625, 5326849.0625, 5326849.0625)""") self.assertTrue(nh_found and sh_found, msg='Failed to parse areas correctly')
continue if k == 5001: lat.append((0, 0, v)) if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1 lons = np.concatenate(lon) lats = np.concatenate(lat) pres = np.concatenate(pres) / 100.0 # hPa pres = np.ma.masked_greater(pres, 1.0e+6) import pyresample as pr from pyresample import kd_tree, geometry from pyresample import utils swath_def = geometry.SwathDefinition(lons=lons, lats=lats) area_def = utils.parse_area_file('region_config.cfg', AREA)[0] result = kd_tree.resample_nearest(swath_def, pres, area_def, radius_of_influence=12000, epsilon=100, fill_value=None) pr.plot.save_quicklook(PNGFILE % AREA, area_def, result, label='IASI - Cloud Top Pressure', coast_res='l')
def test_commented(self): from pyresample import utils areas = utils.parse_area_file( os.path.join(os.path.dirname(__file__), 'test_files', 'areas.cfg')) self.assertNotIn('commented', [area.name for area in areas])
def get_area_def(area_name): """Get the definition of *area_name* from file. The file is defined to use is to be placed in the $PPP_CONFIG_DIR directory, and its name is defined in mpop's configuration file. """ return utils.parse_area_file(get_area_file(), area_name)[0]
def main(argv=sys.argv[1:]): global LOG from satpy import Scene from satpy.resample import get_area_def from satpy.writers import compute_writer_results from dask.diagnostics import ProgressBar from polar2grid.core.script_utils import ( setup_logging, rename_log_file, create_exc_handler) import argparse prog = os.getenv('PROG_NAME', sys.argv[0]) # "usage: " will be printed at the top of this: usage = """ %(prog)s -h see available products: %(prog)s -r <reader> -w <writer> --list-products -f file1 [file2 ...] basic processing: %(prog)s -r <reader> -w <writer> [options] -f file1 [file2 ...] basic processing with limited products: %(prog)s -r <reader> -w <writer> [options] -p prod1 prod2 -f file1 [file2 ...] """ parser = argparse.ArgumentParser(prog=prog, usage=usage, description="Load, composite, resample, and save datasets.") parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") parser.add_argument('--progress', action='store_true', help="show processing progress bar (not recommended for logged output)") parser.add_argument('--num-workers', type=int, default=4, help="specify number of worker threads to use (default: 4)") parser.add_argument('--match-resolution', dest='preserve_resolution', action='store_false', help="When using the 'native' resampler for composites, don't save data " "at its native resolution, use the resolution used to create the " "composite.") parser.add_argument('-w', '--writers', nargs='+', help='writers to save datasets with') parser.add_argument("--list-products", dest="list_products", action="store_true", help="List available reader products and exit") subgroups = add_scene_argument_groups(parser) subgroups += add_resample_argument_groups(parser) argv_without_help = [x for x in argv if x not in ["-h", "--help"]] args, remaining_args = parser.parse_known_args(argv_without_help) # get the logger if we know the readers and writers that will be used if args.reader is not None and args.writers is not None: glue_name = args.reader + "_" + "-".join(args.writers or []) LOG = logging.getLogger(glue_name) # add writer arguments if args.writers is not None: for writer in (args.writers or []): parser_func = WRITER_PARSER_FUNCTIONS.get(writer) if parser_func is None: continue subgroups += parser_func(parser) args = parser.parse_args(argv) if args.reader is None: parser.print_usage() parser.exit(1, "\nERROR: Reader must be provided (-r flag).\n" "Supported readers:\n\t{}\n".format('\n\t'.join(['abi_l1b', 'ahi_hsd', 'hrit_ahi']))) if args.writers is None: parser.print_usage() parser.exit(1, "\nERROR: Writer must be provided (-w flag) with one or more writer.\n" "Supported writers:\n\t{}\n".format('\n\t'.join(['geotiff']))) def _args_to_dict(group_actions): return {ga.dest: getattr(args, ga.dest) for ga in group_actions if hasattr(args, ga.dest)} scene_args = _args_to_dict(subgroups[0]._group_actions) load_args = _args_to_dict(subgroups[1]._group_actions) resample_args = _args_to_dict(subgroups[2]._group_actions) writer_args = {} for idx, writer in enumerate(args.writers): sgrp1, sgrp2 = subgroups[3 + idx * 2: 5 + idx * 2] wargs = _args_to_dict(sgrp1._group_actions) if sgrp2 is not None: wargs.update(_args_to_dict(sgrp2._group_actions)) writer_args[writer] = wargs # get default output filename if 'filename' in wargs and wargs['filename'] is None: wargs['filename'] = get_default_output_filename(args.reader, writer) if not args.filenames: parser.print_usage() parser.exit(1, "\nERROR: No data files provided (-f flag)\n") # Prepare logging rename_log = False if args.log_fn is None: rename_log = True args.log_fn = glue_name + "_fail.log" levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn) logging.getLogger('rasterio').setLevel(levels[min(2, args.verbosity)]) sys.excepthook = create_exc_handler(LOG.name) if levels[min(3, args.verbosity)] > logging.DEBUG: import warnings warnings.filterwarnings("ignore") LOG.debug("Starting script with arguments: %s", " ".join(sys.argv)) # Set up dask and the number of workers if args.num_workers: from multiprocessing.pool import ThreadPool dask.config.set(pool=ThreadPool(args.num_workers)) # Parse provided files and search for files if provided directories scene_args['filenames'] = get_input_files(scene_args['filenames']) # Create a Scene, analyze the provided files LOG.info("Sorting and reading input files...") try: scn = Scene(**scene_args) except ValueError as e: LOG.error("{} | Enable debug message (-vvv) or see log file for details.".format(str(e))) LOG.debug("Further error information: ", exc_info=True) return -1 except OSError: LOG.error("Could not open files. Enable debug message (-vvv) or see log file for details.") LOG.debug("Further error information: ", exc_info=True) return -1 if args.list_products: print("\n".join(sorted(scn.available_dataset_names(composites=True)))) return 0 # Rename the log file if rename_log: rename_log_file(glue_name + scn.attrs['start_time'].strftime("_%Y%m%d_%H%M%S.log")) # Load the actual data arrays and metadata (lazy loaded as dask arrays) if load_args['products'] is None: try: reader_mod = importlib.import_module('polar2grid.readers.' + scene_args['reader']) load_args['products'] = reader_mod.DEFAULT_PRODUCTS LOG.info("Using default product list: {}".format(load_args['products'])) except (ImportError, AttributeError): LOG.error("No default products list set, please specify with `--products`.") return -1 LOG.info("Loading product metadata from files...") scn.load(load_args['products']) resample_kwargs = resample_args.copy() areas_to_resample = resample_kwargs.pop('grids') grid_configs = resample_kwargs.pop('grid_configs') resampler = resample_kwargs.pop('resampler') if areas_to_resample is None and resampler in [None, 'native']: # no areas specified areas_to_resample = ['MAX'] elif areas_to_resample is None: raise ValueError("Resampling method specified (--method) without any destination grid/area (-g flag).") elif not areas_to_resample: # they don't want any resampling (they used '-g' with no args) areas_to_resample = [None] has_custom_grid = any(g not in ['MIN', 'MAX', None] for g in areas_to_resample) if has_custom_grid and resampler == 'native': LOG.error("Resampling method 'native' can only be used with 'MIN' or 'MAX' grids " "(use 'nearest' method instead).") return -1 p2g_grid_configs = [x for x in grid_configs if x.endswith('.conf')] pyresample_area_configs = [x for x in grid_configs if not x.endswith('.conf')] if not grid_configs or p2g_grid_configs: # if we were given p2g grid configs or we weren't given any to choose from from polar2grid.grids import GridManager grid_manager = GridManager(*p2g_grid_configs) else: grid_manager = {} if pyresample_area_configs: from pyresample.utils import parse_area_file custom_areas = parse_area_file(pyresample_area_configs) custom_areas = {x.area_id: x for x in custom_areas} else: custom_areas = {} ll_bbox = resample_kwargs.pop('ll_bbox') if ll_bbox: scn = scn.crop(ll_bbox=ll_bbox) wishlist = scn.wishlist.copy() preserve_resolution = get_preserve_resolution(args, resampler, areas_to_resample) if preserve_resolution: preserved_products = set(wishlist) & set(scn.datasets.keys()) resampled_products = set(wishlist) - preserved_products # original native scene to_save = write_scene(scn, args.writers, writer_args, preserved_products) else: preserved_products = set() resampled_products = set(wishlist) to_save = [] LOG.debug("Products to preserve resolution for: {}".format(preserved_products)) LOG.debug("Products to use new resolution for: {}".format(resampled_products)) for area_name in areas_to_resample: if area_name is None: # no resampling area_def = None elif area_name == 'MAX': area_def = scn.max_area() elif area_name == 'MIN': area_def = scn.min_area() elif area_name in custom_areas: area_def = custom_areas[area_name] elif area_name in grid_manager: from pyresample.geometry import DynamicAreaDefinition p2g_def = grid_manager[area_name] area_def = p2g_def.to_satpy_area() if isinstance(area_def, DynamicAreaDefinition) and p2g_def['cell_width'] is not None: area_def = area_def.freeze(scn.max_area(), resolution=(abs(p2g_def['cell_width']), abs(p2g_def['cell_height']))) else: area_def = get_area_def(area_name) if resampler is None and area_def is not None: rs = 'native' if area_name in ['MIN', 'MAX'] else 'nearest' LOG.debug("Setting default resampling to '{}' for grid '{}'".format(rs, area_name)) else: rs = resampler if area_def is not None: LOG.info("Resampling data to '%s'", area_name) new_scn = scn.resample(area_def, resampler=rs, **resample_kwargs) elif not preserve_resolution: # the user didn't want to resample to any areas # the user also requested that we don't preserve resolution # which means we have to save this Scene's datasets # because they won't be saved new_scn = scn to_save = write_scene(new_scn, args.writers, writer_args, resampled_products, to_save=to_save) if args.progress: pbar = ProgressBar() pbar.register() LOG.info("Computing products and saving data to writers...") compute_writer_results(to_save) LOG.info("SUCCESS") return 0
# -*- coding: utf-8 -*- """ Created on Sun Nov 12 13:26:55 2017 @author: vkvalappil """ import pyresample as pr from pyresample import kd_tree, geometry from pyresample import utils swath_def = geometry.SwathDefinition(lons=lon1, lats=lat1) area_def = utils.parse_area_file( '/home/vkvalappil/Data/modelWRF/input/region_config.cfg', 'scan2')[0] result = kd_tree.resample_nearest(swath_def, brt, area_def, radius_of_influence=12000, epsilon=100, fill_value=None) pr.plot.save_quicklook( '/home/vkvalappil/Data/modelWRF/input/iasi_ctp_quick.png', area_def, result, label='AMSUA Brightness Temp', coast_res='h') area_id = 'uae' description = 'uae wrf domain' proj_id = 'uae' x_size = 425
if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1 print len(lon), len(lat), len(pres) lons = np.concatenate(lon) lats = np.concatenate(lat) pres = np.concatenate(pres) / 100.0 # hPa pres = np.ma.masked_greater(pres, 1.0e+6) import pyresample as pr from pyresample import kd_tree, geometry from pyresample import utils swath_def = geometry.SwathDefinition(lons=lons, lats=lats) area_def = utils.parse_area_file('region_config.cfg', 'scanX')[0] result = kd_tree.resample_nearest(swath_def, pres, area_def, radius_of_influence=12000, epsilon=100, fill_value=None) pr.plot.save_quicklook(pngfile, area_def, result, label='IASI - Cloud Top Pressure', coast_res='l')
for subset in bfr.next_subset(): gotit = 0 for k, m, (v, q) in subset.next_data(): if gotit: continue if k == 5001: lat.append((0, 0, v)) if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1 lons = np.concatenate(lon) lats = np.concatenate(lat) pres = np.concatenate(pres) / 100.0 # hPa pres = np.ma.masked_greater(pres, 1.0e+6) import pyresample as pr from pyresample import kd_tree, geometry from pyresample import utils swath_def = geometry.SwathDefinition(lons=lons, lats=lats) area_def = utils.parse_area_file('region_config.cfg', AREA)[0] result = kd_tree.resample_nearest(swath_def, pres, area_def, radius_of_influence=12000, epsilon=100, fill_value=None) pr.plot.save_quicklook(PNGFILE % AREA, area_def, result, label='IASI - Cloud Top Pressure', coast_res='l')