def main(): failures = open(r'D:\failures.txt', 'w') # Get List of downloaded files file_list = os.listdir(data_dir) print('Roughly {} data samples found'.format(len(file_list)/3)) for file in file_list: # Only run loop when you have a geo file if not file.startswith('VNP03'): continue # Get the band file if os.path.exists(os.path.join(pp_dir, file[:-3] + '.png')): continue geo_path = os.path.join(data_dir, file) subs = file[:4] + '2' + file[5:24] band_path = [i for i in file_list if subs in i] if len(band_path) != 1: print('failure {} {}'.format(file, band_path)) break band_path = os.path.join(data_dir, band_path[0]) # Get comparable GOES file v_time = datetime.strptime(file[10:22], '%Y%j.%H%M') g_subs = 'C07_G17_s{}'.format(file[10:17]) g_files = [i for i in file_list if g_subs in i] goes_file = None for g_file in g_files: g_time = datetime.strptime(g_file[27:38], '%Y%j%H%M') tm_delta = v_time - g_time if abs(tm_delta.total_seconds()) < 4*60: goes_file = g_file if not goes_file: print('No Goes File for {}'.format(file)) failures.write("No Match found for {}\n".format(file)) continue # Load SatPy Scenes viirs_files = [band_path, geo_path] goes_files = [os.path.join(data_dir, goes_file)] viirs_scene = Scene(reader = v_reader, filenames = viirs_files) goes_scene = Scene(reader = g_reader, filenames = goes_files) viirs_scene.load(['I04']) goes_scene.load(['C07']) # Resample and Save PNGs print(file) rs = viirs_scene.resample(viirs_scene['I04'].attrs['area'], resampler = 'nearest') rs.save_dataset('I04', os.path.join(pp_dir, file[:-3] + '.png')) rs_g = goes_scene.resample(viirs_scene['I04'].attrs['area'], resampler = 'nearest') rs_g.save_dataset('C07', os.path.join(pp_dir, goes_file[:-3] + '.png')) failures.close
def process_pair(pair, image_dir: Path, curr_idx, len_pairs): """Pair is a list of two parsed filenames (see the function parse_filename below). Given these two files, use Scene to load the appropriate channels. Then save these original channels (.png and optionally .nc files). Then resample (colocate) to make the sensor channels match up. Then save these colocated channels. Crop the NaN edges, tag with meta information (which files were used as input), And finally save the numpy arrays (so we don't need to recompute next time)""" log.info(f'{rgb(255,0,0)}Processing{reset} timestep {bold}{curr_idx + 1}/{len_pairs}{reset}') dt = pair[0]["datetime"] log.info(f'Colocating {blue}{dt}{reset}') scn = Scene(reader='viirs_sdr', filenames=[f['path'] for f in pair]) scn.load(all_channels + lat_long_both + lunar_data) #save_datasets(scn, 'ORIGINAL_', str(image_dir)) log.info(f'Resampling {blue}{dt}{reset}') resample_scn = scn.resample(scn['DNB'].attrs['area'], resampler='nearest') log.info(f'Saving images {blue}{dt}{reset}') t = time.time() save_datasets(resample_scn, 'COLOCATED_', str(image_dir)) log.debug(f'Saving images took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds') log.info(f'Cropping nan edges of {blue}{dt}{reset}') t = time.time() data = crop.crop_nan_edges(resample_scn) log.debug(f'Cropping nan edges took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds') data['channels'] = list(data) data['filenames'] = [f['filename'] for f in pair] data["datetime"] = dt return data
def EDR2Geotiff(HDF, output_dir,areaid, radius): QF1_VIIRSCMEDR, QF2_VIIRSCMEDR, lon_data, lat_data = readhdfDatasets(HDF) HDF = ntpath.basename(HDF)#get filename without path #https://pytroll.slack.com/archives/C06GJFRN0/p1545083373181100 lon_data[QF1_VIIRSCMEDR == 0] = np.nan lat_data[QF1_VIIRSCMEDR == 0] = np.nan mask=maskByte(byte1=QF1_VIIRSCMEDR, byte2=QF2_VIIRSCMEDR) mask=mask.astype(np.uint8) fill_value=255 #fill_value is parameter of save_datasets(...). satpy sets 255 for pixels not included in my AOI. swath_def = geometry.SwathDefinition( xr.DataArray(da.from_array(lon_data, chunks=4096), dims=('y', 'x')), xr.DataArray(da.from_array(lat_data, chunks=4096), dims=('y', 'x'))) metadata_dict = {'name': 'mask', 'area':swath_def} scn = Scene() scn['mask'] = xr.DataArray( da.from_array(mask, chunks=4096), attrs=metadata_dict, dims=('y', 'x')) #https://satpy.readthedocs.io/en/latest/dev_guide/xarray_migration.html#id1 scn.load(["mask"]) proj_scn = scn.resample(areaSettings.getarea(areaid),radius_of_influence=radius) proj_scn.save_datasets(writer='geotiff',base_dir=output_dir ,file_pattern="{}.{}.{}".format(HDF,"{name}","tif"),enhancement_config=False, dtype=np.uint8, fill_value=fill_value) #
def get_extent_in_coordinates(extent, area_def, files, composite='realistic_colors'): """Returns list of lats,lons coresponding to the pixel points of given extent Parameters: """ # TODO: Cheeck this fukin shit; from satpy.scene import Scene # import math # from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def # files = return_files(time, hrit_files) scn = Scene(filenames=files) scn.load([composite]) new_scn = scn local_scn = scn.resample(area_def, radius_of_influence=50000) lons, lats = local_scn[composite].attrs['area'].get_lonlats() ext_lats, ext_lons = ([], []) for i in range(len(extent)): x = extent[i][0][1].astype(int) y = extent[i][0][0].astype(int) if 0 <= x <= len(lats) and 0 <= y <= len(lats): ext_lats.append(lats[x][y]) ext_lons.append(lons[x][y]) return ext_lats, ext_lons
def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader='abi_l1b') scene.load(['true_color_nocorr', 'C04'], calibration='radiance') resampled_scene = scene.resample(scene.coarsest_area(), resampler='native') assert len(resampled_scene.keys()) == 2
def plot_sat_img(datetime, wndw): ''' This function plots the original satellite image at datetime. Arguments: - period: pandas datetime Timestamp. - wndw: scalar with the training window length (for validation purposes) Output: - Saves satellite image. ''' dirs = create_instant_filenames(datetime, wndw) filename = glob( dirs + '*.nat') # Because dirs was a list of lists extract first element. if not filename: # If filename is empty, continue to the next iteration. print("This satellite image is missing.") else: global_scene = Scene(reader="seviri_l1b_native", filenames=filename) # Load the HRV channel: global_scene.load(['HRV']) # Resample: local_scene = global_scene.resample( "scan1", radius_of_influence=50e3, resampler='nearest', neighbours=16 ) # nearest='bilinear',cache_dir=REFLECTANCE_DATA_PATH # Get coordinate reference system from satellite crs = local_scene['HRV'].attrs['area'].to_cartopy_crs() PROJ = local_scene[ 'HRV'].coords # Extract reflectance values into xarray.DataArray crs_4326 = proj.Proj( init='epsg:4326') # assuming you're using WGS84 geographic crs_proj = proj.Proj(str(PROJ['crs'].values)) x, y = proj.transform(crs_4326, crs_proj, list(longitudes.values()), list(latitudes.values())) #x, y = proj.transform(crs_4326, crs_proj, 18.6101, 67.9123) #img = calc_cloudIndex(datetime) ax = plt.axes(projection=crs) ax.scatter(x, y, zorder=1, alpha=1, c='b', s=15) ax.coastlines(color='grey') #ax.gridlines() ax.set_global() plt.imshow(local_scene['HRV'], transform=crs, extent=crs.bounds, origin='upper', cmap='gist_gray') cbar = plt.colorbar(pad=0.025, orientation="horizontal", fraction=0.046) #cbar.ax.tick_params(labelsize="small") cbar.set_label("Reflectance (%)") #,size="small") plt.show()
class FogCompositorDay(FogCompositor): def __init__(self, path_dem, *args, **kwargs): self.elevation = Scene(reader="generic_image", filenames=[path_dem]) self.elevation.load(["image"]) return super().__init__(*args, **kwargs) def __call__(self, projectables, *args, **kwargs): (area, lat, lon) = self._get_area_lat_lon(projectables) # fogpy is still working with masked arrays and does not yet support # xarray / dask (see #6). For now, convert to masked arrays. maskproj = self._convert_projectables(projectables) elev = self.elevation.resample(area) flsinput = { 'vis006': maskproj[0], 'vis008': maskproj[1], 'ir108': maskproj[5], 'nir016': maskproj[2], 'ir039': maskproj[3], 'ir120': maskproj[6], 'ir087': maskproj[4], 'lat': lat, 'lon': lon, 'time': projectables[0].start_time, 'elev': numpy.ma.masked_invalid(elev["image"].sel(bands="L").values, copy=False), 'cot': maskproj[7], 'reff': maskproj[9], 'lwp': maskproj[8], "cwp": maskproj[8] } # Compute fog mask flsalgo = DayFogLowStratusAlgorithm(**flsinput) fls, mask = flsalgo.run() (xrfls, xrmsk) = self._convert_to_xr(projectables, fls, mask) return super().__call__((xrfls, xrmsk), *args, **kwargs)
def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader='abi_l1b') scene.load(['C04'], calibration='radiance') my_id = make_dataid(name='my_name', wavelength=(10, 11, 12)) scene[my_id] = scene['C04'].copy() resampled = scene.resample('eurol') assert resampled[my_id].shape == (2048, 2560)
def process_set(grouped_files, curr_idx, total_groups): """process_ALLis a list of parsed filenames (DNB, Mband, ABI, Cband) Given these files, use Scene to load the appropriate channels. Then resample (colocate) to make the channels match up. Then save these colocated channels. Crop the NaN edges, tag with meta information (which files were used as input), And finally save the numpy arrays (so we don't need to recompute next time)""" log.info( f'{rgb(255,0,0)}Processing{reset} timestep {bold}{curr_idx + 1}/{total_groups}{reset}' ) dt = grouped_files['viirs'][0]["datetime"] viirsfiles = [f["path"] for f in grouped_files['viirs']] abifiles = [f["path"] for f in grouped_files['abi']] master_scene = Scene(filenames={ 'viirs_sdr': viirsfiles, 'abi_l1b': abifiles }) master_scene.load(VIIRS_channels + ABI_channels + lat_long_both) #load and pair the reflectance reflectfile = grouped_files['reflectance']['path'] Reflectance = xarray.open_dataset(reflectfile) swath_def = SwathDefinition(Reflectance['longitude'], Reflectance['latitude']) sm_refl = Reflectance['SM_Reflectance'] sm_refl.attrs['area'] = swath_def #bring reflectance back to the satpy "Scene" master_scene['SM_Reflectance'] = sm_refl resample_scn = master_scene.resample(master_scene['DNB'].attrs['area'], resampler='nearest') log.info(f'Cropping nan edges of {blue}{dt}{reset}') t = time.time() data = crop.crop_nan_edges(resample_scn, crop_channels, all_channels) log.debug( f'Cropping nan edges took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds' ) data['channels'] = list(data) data['filenames'] = viirsfiles + abifiles + [reflectfile] data["datetime"] = dt return data
def process_trio(trio, curr_idx, len_trio): """trio is a list of three parsed filenames (see the function parse_filename below). Given these three files, use Scene to load the appropriate channels. Then resample (colocate) to make the channels match up. Then save these colocated channels. Crop the NaN edges, tag with meta information (which files were used as input), And finally save the numpy arrays (so we don't need to recompute next time)""" dt = trio[0]["datetime"] log.info( f'{rgb(255,0,0)}Processing{reset} timestep {bold}{curr_idx + 1}/{len_trio}{reset} {blue}{dt}{reset} ' ) #load the sat data scn = Scene( reader='viirs_sdr', filenames=[f['path'] for f in trio if f['filename'].endswith(".h5")]) scn.load(viirs_channels + lat_long_both) #load and pair the reflectance Reflectance = xarray.open_dataset(find_ncfile(trio)['path']) swath_def = SwathDefinition(Reflectance['longitude'], Reflectance['latitude']) sm_refl = Reflectance['SM_Reflectance'] sm_refl.attrs['area'] = swath_def #bring reflectance back to the satpy "Scene" scn['SM_reflectance'] = sm_refl log.info(f'Resampling {blue}{dt}{reset}') resample_scn = scn.resample(scn['DNB'].attrs['area'], resampler='nearest') log.info(f'Cropping nan edges of {blue}{dt}{reset}') t = time.time() data = crop.crop_nan_edges(resample_scn, all_channels) log.debug( f'Cropping nan edges took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds' ) data['channels'] = list(data) data['filenames'] = [f['filename'] for f in trio] data["datetime"] = dt return data
def colorplot_with_band(self, band, HSD_Dir, imgFile, *args, axLatRange=[20, 60], axLonRange=[90, 140], cmap=None, pixels=100, **kwargs): """ colorplot the variables together with radiance data. Parameters ---------- band: int band number [1-16]. See band specification in `../doc/2018_A_Yamashita.md` HSD_Dir: str path for hosting the HSD files. imgFile: str filename of the exported image Keywords -------- axLatRange: list latitude range of the plot (default: [20, 60]). [degree] axLonRange: list longitude range of the plot (default: [90, 140]). [degree] cmap: str colormap name. pixels: int resampled pixels of the band data (default: 100). Take care of time consumption when pixels > 1000! History ------- 2020-02-24 First version. """ files = find_files_and_readers( start_time=(self.mTime - dt.timedelta(seconds=300)), end_time=(self.mTime + dt.timedelta(seconds=300)), base_dir=HSD_Dir, reader='ahi_hsd') matched_files = [] for file in files['ahi_hsd']: if fnmatch.fnmatch( os.path.basename(file), 'HS_H08_*_B{0:02d}_FLDK_*_S0[1234]*DAT*'.format(band)): matched_files.append(file) h8_scene = Scene(filenames=matched_files, reader='ahi_hsd', sensor='ahi') band_label = 'B{0:02d}'.format(band) h8_scene.load([band_label]) roi = create_area_def('roi', { 'proj': 'eqc', 'ellps': 'WGS84' }, width=pixels, height=pixels, area_extent=[ axLonRange[0], axLatRange[0], axLonRange[1], axLatRange[1] ], units='degrees') roi_scene = h8_scene.resample(roi) # read China boundaries with open(os.path.join(PROJECTDIR, 'include', 'CN-border-La.dat'), 'r') as fd: context = fd.read() blocks = [cnt for cnt in context.split('>') if len(cnt) > 0] borders = [ np.fromstring(block, dtype=float, sep=' ') for block in blocks ] LON, LAT = np.meshgrid(self.lon, self.lat) fig = plt.figure(figsize=[8, 8]) plt.tight_layout(False) # Set projection and plot the main figure ax1 = plt.axes([0.1, 0.1, 0.8, 0.8], projection=ccrs.PlateCarree()) # Add ocean, land, rivers and lakes ax1.add_feature(cfeature.OCEAN.with_scale('50m')) ax1.add_feature(cfeature.LAND.with_scale('50m')) ax1.add_feature(cfeature.RIVERS.with_scale('50m')) ax1.add_feature(cfeature.LAKES.with_scale('50m')) # Plot border lines for line in borders: ax1.plot(line[0::2], line[1::2], '-', lw=1, color='k', transform=ccrs.Geodetic()) # loading colormap if cmap is None: cmap = chiljet_colormap() # Plot gridlines crs = roi.to_cartopy_crs() pcmesh_band = ax1.imshow(roi_scene[band_label], transform=crs, origin='upper', extent=crs.bounds, cmap='Greys') pcmesh = ax1.pcolormesh(LON, LAT, self.data, vmin=kwargs['vmin'], vmax=kwargs['vmax'], cmap=cmap, transform=ccrs.PlateCarree()) ax1.set_xticks( np.linspace(axLonRange[0], axLonRange[1], 5, endpoint=True)) ax1.set_yticks( np.linspace(axLatRange[0], axLatRange[1], 5, endpoint=True)) lon_formatter = LongitudeFormatter(number_format='.1f', degree_symbol='', dateline_direction_label=True) lat_formatter = LatitudeFormatter(number_format='.1f', degree_symbol='') ax1.xaxis.set_major_formatter(lon_formatter) ax1.yaxis.set_major_formatter(lat_formatter) ax1.set_ylim(axLatRange) ax1.set_xlim(axLonRange) ax1.set_title('{0} {1}'.format( self.mTime.strftime('%Y-%m-%d %H:%M (Himawari-8)'), self.long_name)) if 'cb_ticks' not in kwargs.keys(): kwargs['cb_ticks'] = np.linspace(kwargs['vmin'], kwargs['vmax'], 5, endpoint=True) cbar = fig.colorbar(pcmesh, fraction=0.03, ticks=kwargs['cb_ticks'], orientation='vertical') cbar.ax.tick_params(direction='out', labelsize=15, pad=5) cbar.ax.set_title(self.unit, fontsize=10) if 'cb_ticklabels' in kwargs.keys(): cbar.ax.set_yticklabels(kwargs['cb_ticklabels']) # Show figure # plt.show() plt.savefig(imgFile) plt.close()
bands = [ 'M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', 'M12', 'M13', 'M14', 'M15', 'M16' ] # Reproject dust composite and visualize scn = Scene(filenames=[viirs_folder + fn1, viirs_folder + fn2], reader='viirs_l1b') # load VNP02 and VNP03 files together scn.load(available_bands + ['dust']) minlon, maxlon, minlat, maxlat = cal_lon.min(), cal_lon.max(), cal_lat.min( ), cal_lat.max() dst_area = AreaDefinition('crop_area', 'crop_area', 'crop_latlong', {'proj': 'latlong'}, (maxlon - minlon) / 0.0075, (maxlat - minlat) / 0.0075, [minlon, minlat, maxlon, maxlat]) local_scn = scn.resample(dst_area) local_scn.show('dust') local_scn.save_dataset('dust', test_plot_folder + grandule_dt + '_dust.png') try: scn.load(['true_color_raw']) local_scn = scn.resample(dst_area) local_scn.show('true_color_raw') local_scn.save_dataset('true_color_raw', test_plot_folder + grandule_dt + '_true_color.png') except: print('no true color') # retrieve 16 bands if path.exists(test_plot_folder + grandule_dt + '_predictors.npy'): predictors = np.load(test_plot_folder + grandule_dt + '_predictors.npy',
# 'start_time', 'end_time', 'area', 'name', 'resolution', 'calibration', 'polarization', 'level', 'modifiers', # 'ancillary_variables']) #print(global_scene['VIS006'].attrs["area"]) ## this is an area definition #print(global_scene['VIS006'].attrs["area"].proj_str) ## '+a=6378169 +b=6356583.8 +h=35785831 +lon_0=9.5 +no_defs +proj=geos +type=crs +units=m +x_0=0 +y_0=0' global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / ( global_scene[0.8] + global_scene[0.6]) #from satpy import DatasetID #my_channel_id = DatasetID(name='IR_016', calibration='radiance') #global_scene.load([my_channel_id]) #print(scn['IR_016']) #local_scene = global_scene.resample("eurol") local_scene = global_scene.resample("EuropeCanaryS95") # BUG: ndvi is not resampled from the global scene local_scene["ndvi"] = (local_scene[0.8] - local_scene[0.6]) / ( local_scene[0.8] + local_scene[0.6]) print(global_scene.available_composite_names()) #local_scene.show('overview') print("display " + './local_overview.png') local_scene.save_dataset('overview', './local_overview.png') print("display " + './local_ndvi.png') local_scene.save_dataset('ndvi', './local_ndvi.png') plot_nwc = True if plot_nwc: files_nwc = find_files_and_readers(
def main(argv=sys.argv[1:]): global LOG from satpy import Scene from satpy.resample import get_area_def from satpy.writers import compute_writer_results from dask.diagnostics import ProgressBar from polar2grid.core.script_utils import ( setup_logging, rename_log_file, create_exc_handler) import argparse prog = os.getenv('PROG_NAME', sys.argv[0]) # "usage: " will be printed at the top of this: usage = """ %(prog)s -h see available products: %(prog)s -r <reader> -w <writer> --list-products -f file1 [file2 ...] basic processing: %(prog)s -r <reader> -w <writer> [options] -f file1 [file2 ...] basic processing with limited products: %(prog)s -r <reader> -w <writer> [options] -p prod1 prod2 -f file1 [file2 ...] """ parser = argparse.ArgumentParser(prog=prog, usage=usage, description="Load, composite, resample, and save datasets.") parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") parser.add_argument('--progress', action='store_true', help="show processing progress bar (not recommended for logged output)") parser.add_argument('--num-workers', type=int, default=4, help="specify number of worker threads to use (default: 4)") parser.add_argument('--match-resolution', dest='preserve_resolution', action='store_false', help="When using the 'native' resampler for composites, don't save data " "at its native resolution, use the resolution used to create the " "composite.") parser.add_argument('-w', '--writers', nargs='+', help='writers to save datasets with') parser.add_argument("--list-products", dest="list_products", action="store_true", help="List available reader products and exit") subgroups = add_scene_argument_groups(parser) subgroups += add_resample_argument_groups(parser) argv_without_help = [x for x in argv if x not in ["-h", "--help"]] args, remaining_args = parser.parse_known_args(argv_without_help) # get the logger if we know the readers and writers that will be used if args.reader is not None and args.writers is not None: glue_name = args.reader + "_" + "-".join(args.writers or []) LOG = logging.getLogger(glue_name) # add writer arguments if args.writers is not None: for writer in (args.writers or []): parser_func = WRITER_PARSER_FUNCTIONS.get(writer) if parser_func is None: continue subgroups += parser_func(parser) args = parser.parse_args(argv) if args.reader is None: parser.print_usage() parser.exit(1, "\nERROR: Reader must be provided (-r flag).\n" "Supported readers:\n\t{}\n".format('\n\t'.join(['abi_l1b', 'ahi_hsd', 'hrit_ahi']))) if args.writers is None: parser.print_usage() parser.exit(1, "\nERROR: Writer must be provided (-w flag) with one or more writer.\n" "Supported writers:\n\t{}\n".format('\n\t'.join(['geotiff']))) def _args_to_dict(group_actions): return {ga.dest: getattr(args, ga.dest) for ga in group_actions if hasattr(args, ga.dest)} scene_args = _args_to_dict(subgroups[0]._group_actions) load_args = _args_to_dict(subgroups[1]._group_actions) resample_args = _args_to_dict(subgroups[2]._group_actions) writer_args = {} for idx, writer in enumerate(args.writers): sgrp1, sgrp2 = subgroups[3 + idx * 2: 5 + idx * 2] wargs = _args_to_dict(sgrp1._group_actions) if sgrp2 is not None: wargs.update(_args_to_dict(sgrp2._group_actions)) writer_args[writer] = wargs # get default output filename if 'filename' in wargs and wargs['filename'] is None: wargs['filename'] = get_default_output_filename(args.reader, writer) if not args.filenames: parser.print_usage() parser.exit(1, "\nERROR: No data files provided (-f flag)\n") # Prepare logging rename_log = False if args.log_fn is None: rename_log = True args.log_fn = glue_name + "_fail.log" levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn) logging.getLogger('rasterio').setLevel(levels[min(2, args.verbosity)]) sys.excepthook = create_exc_handler(LOG.name) if levels[min(3, args.verbosity)] > logging.DEBUG: import warnings warnings.filterwarnings("ignore") LOG.debug("Starting script with arguments: %s", " ".join(sys.argv)) # Set up dask and the number of workers if args.num_workers: from multiprocessing.pool import ThreadPool dask.config.set(pool=ThreadPool(args.num_workers)) # Parse provided files and search for files if provided directories scene_args['filenames'] = get_input_files(scene_args['filenames']) # Create a Scene, analyze the provided files LOG.info("Sorting and reading input files...") try: scn = Scene(**scene_args) except ValueError as e: LOG.error("{} | Enable debug message (-vvv) or see log file for details.".format(str(e))) LOG.debug("Further error information: ", exc_info=True) return -1 except OSError: LOG.error("Could not open files. Enable debug message (-vvv) or see log file for details.") LOG.debug("Further error information: ", exc_info=True) return -1 if args.list_products: print("\n".join(sorted(scn.available_dataset_names(composites=True)))) return 0 # Rename the log file if rename_log: rename_log_file(glue_name + scn.attrs['start_time'].strftime("_%Y%m%d_%H%M%S.log")) # Load the actual data arrays and metadata (lazy loaded as dask arrays) if load_args['products'] is None: try: reader_mod = importlib.import_module('polar2grid.readers.' + scene_args['reader']) load_args['products'] = reader_mod.DEFAULT_PRODUCTS LOG.info("Using default product list: {}".format(load_args['products'])) except (ImportError, AttributeError): LOG.error("No default products list set, please specify with `--products`.") return -1 LOG.info("Loading product metadata from files...") scn.load(load_args['products']) resample_kwargs = resample_args.copy() areas_to_resample = resample_kwargs.pop('grids') grid_configs = resample_kwargs.pop('grid_configs') resampler = resample_kwargs.pop('resampler') if areas_to_resample is None and resampler in [None, 'native']: # no areas specified areas_to_resample = ['MAX'] elif areas_to_resample is None: raise ValueError("Resampling method specified (--method) without any destination grid/area (-g flag).") elif not areas_to_resample: # they don't want any resampling (they used '-g' with no args) areas_to_resample = [None] has_custom_grid = any(g not in ['MIN', 'MAX', None] for g in areas_to_resample) if has_custom_grid and resampler == 'native': LOG.error("Resampling method 'native' can only be used with 'MIN' or 'MAX' grids " "(use 'nearest' method instead).") return -1 p2g_grid_configs = [x for x in grid_configs if x.endswith('.conf')] pyresample_area_configs = [x for x in grid_configs if not x.endswith('.conf')] if not grid_configs or p2g_grid_configs: # if we were given p2g grid configs or we weren't given any to choose from from polar2grid.grids import GridManager grid_manager = GridManager(*p2g_grid_configs) else: grid_manager = {} if pyresample_area_configs: from pyresample.utils import parse_area_file custom_areas = parse_area_file(pyresample_area_configs) custom_areas = {x.area_id: x for x in custom_areas} else: custom_areas = {} ll_bbox = resample_kwargs.pop('ll_bbox') if ll_bbox: scn = scn.crop(ll_bbox=ll_bbox) wishlist = scn.wishlist.copy() preserve_resolution = get_preserve_resolution(args, resampler, areas_to_resample) if preserve_resolution: preserved_products = set(wishlist) & set(scn.datasets.keys()) resampled_products = set(wishlist) - preserved_products # original native scene to_save = write_scene(scn, args.writers, writer_args, preserved_products) else: preserved_products = set() resampled_products = set(wishlist) to_save = [] LOG.debug("Products to preserve resolution for: {}".format(preserved_products)) LOG.debug("Products to use new resolution for: {}".format(resampled_products)) for area_name in areas_to_resample: if area_name is None: # no resampling area_def = None elif area_name == 'MAX': area_def = scn.max_area() elif area_name == 'MIN': area_def = scn.min_area() elif area_name in custom_areas: area_def = custom_areas[area_name] elif area_name in grid_manager: from pyresample.geometry import DynamicAreaDefinition p2g_def = grid_manager[area_name] area_def = p2g_def.to_satpy_area() if isinstance(area_def, DynamicAreaDefinition) and p2g_def['cell_width'] is not None: area_def = area_def.freeze(scn.max_area(), resolution=(abs(p2g_def['cell_width']), abs(p2g_def['cell_height']))) else: area_def = get_area_def(area_name) if resampler is None and area_def is not None: rs = 'native' if area_name in ['MIN', 'MAX'] else 'nearest' LOG.debug("Setting default resampling to '{}' for grid '{}'".format(rs, area_name)) else: rs = resampler if area_def is not None: LOG.info("Resampling data to '%s'", area_name) new_scn = scn.resample(area_def, resampler=rs, **resample_kwargs) elif not preserve_resolution: # the user didn't want to resample to any areas # the user also requested that we don't preserve resolution # which means we have to save this Scene's datasets # because they won't be saved new_scn = scn to_save = write_scene(new_scn, args.writers, writer_args, resampled_products, to_save=to_save) if args.progress: pbar = ProgressBar() pbar.register() LOG.info("Computing products and saving data to writers...") compute_writer_results(to_save) LOG.info("SUCCESS") return 0
import os from satpy import Scene from datetime import datetime from satpy.utils import debug_on import pyninjotiff from glob import glob from pyresample.utils import load_area import copy debug_on() chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', # ninjo product name to look for in .cfg file ninjo_product_name="IR_108", # custom configuration file for ninjo tiff products # if not specified PPP_CONFIG_DIR is used as config file directory ninjo_product_file="/config_dir/ninjotiff_products.cfg")
end_time=datetime(2021, 7, 25, 11, 25), base_dir=sen3_data_l2, reader='olci_l2', sensor='olci', ) """ Create Scene object """ scn = Scene(filenames=filenames) """ Load selected datasets Available OLCI Level 2 datasets are: 'chl_nn.nc','chl_oc4me.nc', 'iop_nn.nc','iwv.nc','par.nc','trsp.nc','tsm_nn.nc','w_aer.nc', 'Oa01_reflectance.nc','Oa02_reflectance.nc','Oa03_reflectance.nc','Oa04_reflectance.nc', 'Oa05_reflectance.nc','Oa06_reflectance.nc','Oa07_reflectance.nc','Oa08_reflectance.nc', 'Oa09_reflectance.nc','Oa10_reflectance.nc','Oa11_reflectance.nc','Oa12_reflectance.nc', 'Oa16_reflectance.nc','Oa17_reflectance.nc','Oa18_reflectance.nc','Oa21_reflectance.nc', 'wqsf.nc' """ datasets = ['Oa08', 'Oa06', 'chl_nn', 'chl_oc4me', 'mask'] scn.load(datasets) my_area = load_area( os.path.join(settings.base_directory, 'etc/areas/local_areas.yaml'), 'baws300_sweref99tm') scn = scn.resample(my_area, radius_of_influence=800) """ Chlorophyll data are stored as logarithmic values. Convert to real values: """ scn['chl_nn'] = np.power(10, scn['chl_nn']) plt.imshow(scn['chl_nn']) plt.colorbar() plt.clim(0, 10)
def show_sat_perspective(hrit_files, central_lat, central_lon, elevation, time, dpi, save_path, fov, composite=None): """Shows in Jupyter Notebook results of pictures seen from sat Parameters Array of saved on disc files. :param save_path: :param composite: :param dpi: :param time: :param elevation: :param central_lon: :param central_lat: :param hrit_files: """ # TO DO: Add local earth radius if composite is None: composite = 'realistic_colors' import datetime as dt from satpy.scene import Scene from satpy.resample import get_area_def from datetime import datetime import matplotlib.pyplot as plt import cartopy.crs as ccrs import cartopy import cartopy.feature as cfeature from skyfield.api import Topos, load import numpy as np from astropy import units as u from astropy.coordinates import Angle # %matplotlib inline import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap from pyresample.geometry import AreaDefinition, create_area_def area_def = [] for i in range(0, len(central_lon)): area_id = 'ease_sh' center = (central_lat[i], central_lon[i]) radius = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] resolution = 2500 proj_string = '+proj=laea +lat_0=' + np.array2string( central_lat[i]) + ' +lon_0=' + np.array2string( central_lon[i]) + ' +a=6371228.0 +units=m' area_def.append( create_area_def(area_id, proj_string, center=center, radius=radius, resolution=resolution)) files = return_files(time, hrit_files) scn = Scene(filenames=files) scn.load([composite]) new_scn = scn for i, area_def in enumerate(area_def, start=0): local_scn = scn.resample(area_def, radius_of_influence=50000) local_scn.show(composite) path = save_path + composite + '_' + str(i) + '.png' local_scn.save_dataset(composite, path, writer='simple_image', num_threads=8) if save_path: if (isinstance(load_photo[0], float)): photo_type = str(load_photo[0]) else: photo_type = load_photo[0] name = photo_path + photo_type + pro_name + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format( date=scn.start_time) plt.savefig(name, dpi=dpi) if not save_path: plt.show() return ()
swath_files = glob.glob('SVDNB_npp_d*.h5') txt = open(os.path.join(BASEDIR, "errors.txt"), "a") if not os.path.exists(OUTPUT_DIR): os.makedirs(OUTPUT_DIR) for file in swath_files: try: scene = Scene(filenames=[file], reader='viirs_sdr') scene.load( ["DNB"] ) #alternative method: scene.load([0.7]). You ca also load other composites like "dynamic_dnb", adaptive_dnb,histogram_dnb ,hncc_dnb. Check scene.available_composite_names() print(scene) proj_scn = scene.resample(area_def) proj_scn.save_datasets( writer='geotiff', base_dir=OUTPUT_DIR, file_pattern= '{name}_{start_time:%Y%m%d_%H%M%S}_{end_time:%Y%m%d_%H%M%S}_so{start_orbit}_eo{end_orbit}_epsg2100.tif', enhancement_config=False, dtype=np.float32) except Exception, e: msg = "File:{},Error:{}\n".format(file, e) txt.write(msg) txt.close()
Author(s): Daniel Hueholt @dhueholt GitHub """ from glob import glob import matplotlib.pyplot as plt from satpy import Scene import cartopy.crs as ccrs import pdb FILENAMES = glob('/Users/dhueholt/Documents/Data/Sips/CLDMSK*.nc') SCN = Scene(reader='viirs_l2', filenames=FILENAMES) SCN.load(['geophysical_data/Integer_Cloud_Mask']) MY_AREA = SCN['geophysical_data/Integer_Cloud_Mask'].attrs['area'].compute_optimal_bb_area({'proj': 'lcc', 'lon_0': -96., 'lat_0': 39., 'lat_1': 25., 'lat_2': 25.}) NEW_SCN = SCN.resample(MY_AREA) NEW_SCN.save_dataset('geophysical_data/Integer_Cloud_Mask','/Users/dhueholt/Documents/Hollings_2019/Images/l2/icm.tif') CRS = NEW_SCN['geophysical_data/Integer_Cloud_Mask'].attrs['area'].to_cartopy_crs() lambert_proj = ccrs.LambertConformal() AX = plt.axes(projection=CRS) AX.coastlines() AX.gridlines() AX.set_global() plt.imshow(NEW_SCN['geophysical_data/Integer_Cloud_Mask'], transform=CRS, extent=CRS.bounds, origin='upper') CBAR = plt.colorbar() CBAR.set_label('Integer Cloud Mask') plt.clim(-1,3) plt.savefig('/Users/dhueholt/Documents/Hollings_2019/Images/l2/integer_cloud_mask_multiple_test.png')
filenames = glob('/Users/dhueholt/Documents/Data/test/*20190201*.h5') # print(filenames) # t.sleep(2) # print("Continuing") scn = Scene(reader='viirs_sdr', filenames=filenames) scn.load(['I04']) my_area = scn['I04'].attrs['area'].compute_optimal_bb_area({ 'proj': 'lcc', 'lon_0': -95., 'lat_0': 25., 'lat_1': 25., 'lat_2': 25. }) new_scn = scn.resample(my_area) crs = new_scn['I04'].attrs['area'].to_cartopy_crs() lambert_proj = ccrs.LambertConformal() ax = plt.axes(projection=lambert_proj) # ax = plt.axes(projection=crs) ax.coastlines() ax.gridlines() ax.set_global() plt.imshow(new_scn['I04'], transform=crs, extent=crs.bounds, origin='upper') cbar = plt.colorbar() cbar.set_label("Kelvin") #plt.show() plt.savefig('/Users/dhueholt/Documents/Hollings_2019/I04_test.png')
#!/usr/bin/env python import sys from satpy import Scene filename = sys.argv[1] print(type(filename)) global_scene = Scene(reader='satpy_cf_nc', filenames=[filename]) products = ['overview'] global_scene.load(products) local_scene = global_scene.resample('arctic_europe_9km') local_scene.show(products[0]) #, overlay={'coast_dir': '/home/remotesensing/', # 'level_coast': [1, 6], # 'color': (255, 255, 255)})
def show_sat_perspective3(hrit_files, central_lat, central_lon, elevation, time, save_path, fov, shape, proj, projection_parameters, composite=None, fov_deg=True): """Shows in Jupyter Notebook results of pictures seen from sat Parameters Array of saved on disc files. :param save_path: :param composite: :param time: :param elevation: :param central_lon: :param central_lat: :param hrit_files: """ # TO DO: Add local earth radius from satpy.scene import Scene import math from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def from pyresample import create_area_def if composite is None: composite = 'realistic_colors' if fov_deg == True: fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180] # lla = mat.find_sourounding_list(earth_rads, latitudes, longitudes, elevations, fov) area_def = [] for i in range(0, len(central_lon)): altitude = elevation[i] rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2 lat_0, lon_0 = central_lat[i], central_lon[i] lat_1, lat_2 = central_lat[i], central_lon[i] center = (central_lat[i], central_lon[i]) radius = (rad, rad) area_id = 'wrf_circle' proj_dict = {'proj': proj, 'lat_0': lat_0, 'lon_0': lon_0, \ 'lat_1': lat_1, 'lat_2': lat_2, \ 'a': 6370000, 'b': 6370000, 'h': altitude, 'azi': projection_parameters[0], 'tilt': projection_parameters[1]} area_def.append( AreaDefinition.from_circle(area_id, proj_dict, center, radius=radius, shape=shape)) # area_def.append(AreaDefinition.create_area_def(area_id, proj_dict, center, radius=radius, shape=shape)) files = return_files(time, hrit_files) scn = Scene(filenames=files) scn.load([composite]) new_scn = scn for i, area_def in enumerate(area_def, start=0): local_scn = scn.resample(area_def, radius_of_influence=50000) local_scn.show(composite) path = save_path + proj + str(projection_parameters[0]) + '_' + str( projection_parameters[1]) + '_' + str(shape[0]) + '_' + str( composite) + '_' + '_{date:%Y-%m-%d_%H_%M_%S}'.format( date=scn.start_time) + '/' + str(i) + '.png' local_scn.save_dataset(composite, path, writer='simple_image', num_threads=8) # if save_path: # if (isinstance(load_photo[0], float)): # photo_type = str(load_photo[0]) # else: # photo_type = load_photo[0] # name = photo_path + photo_type + pro_name + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time) # plt.savefig(name, dpi=dpi) # if not save_path: # plt.show() return ()
def show_sat_perspective2(hrit_files, central_lat, central_lon, elevation, time, save_path, fov, shape, composite=None, fov_deg=True): """Shows in Jupyter Notebook results of pictures seen from sat Parameters Array of saved on disc files. :param save_path: :param composite: :param time: :param elevation: :param central_lon: :param central_lat: :param hrit_files: """ # TO DO: Add local earth radius import datetime as dt from satpy.scene import Scene import math from satpy.resample import get_area_def from datetime import datetime import matplotlib.pyplot as plt import cartopy.crs as ccrs import cartopy import cartopy.feature as cfeature from skyfield.api import Topos, load import pyproj import numpy as np from astropy import units as u from astropy.coordinates import Angle # %matplotlib inline import matplotlib.pyplot as plt from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def if composite is None: composite = 'realistic_colors' if fov_deg == True: fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180] area_def = [] for i in range(0, len(central_lon)): area_id = 'ease_sh' rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2 lat_0, lon_0 = central_lat[i], central_lon[i] lat_1, lat_2 = central_lat[i], central_lon[i] center = (central_lat[i], central_lon[i]) radius = (rad, rad) area_id = 'wrf_circle' proj_dict = {'proj': 'lcc', 'lat_0': lat_0, 'lon_0': lon_0, \ 'lat_1': lat_1, 'lat_2': lat_2, \ 'a': 6370000, 'b': 6370000} area_def.append( AreaDefinition.from_circle(area_id, proj_dict, center, radius, shape=shape)) files = return_files(time, hrit_files) scn = Scene(filenames=files) scn.load([composite]) new_scn = scn for i, area_def in enumerate(area_def, start=0): local_scn = scn.resample(area_def, radius_of_influence=50000) local_scn.show(composite) path = save_path + str(shape[0]) + '_' + str(composite) + '_' + str( i) + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time) local_scn.save_dataset(composite, path, writer='simple_image', num_threads=8) # if save_path: # if (isinstance(load_photo[0], float)): # photo_type = str(load_photo[0]) # else: # photo_type = load_photo[0] # name = photo_path + photo_type + pro_name + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time) # plt.savefig(name, dpi=dpi) # if not save_path: # plt.show() return ()
el = Path("/export/home/mbrewer/Documents/GMTED2010_15n030_0125deg.nc") rad = Path( "/export/home/mbrewer/Documents/radar_files/KBBX20181108_213133_V06") radar = pyart.io.read_nexrad_archive(rad) #gf = pyart.filters.GateFilter(radar) #gf.exclude_transition() #gf.exclude_above('reflectivity', 100) #Mask out dBZ above 100 #gf.exclude_below('reflectivity', 5) #Mask out dBZ below 5 #despec = pyart.correct.despeckle_field(radar, 'reflectivity',gatefilter = gf, size = 20) #The despeckling mask routine that takes out small noisey reflectivity bits not near the main plume elev = xr.open_dataset(el) scn = Scene(filenames=glob("npp/*"), reader='viirs_l1b') scn.load(['true_color', 'I02']) new_scn = scn.resample('northamerica') var = get_enhanced_image(new_scn['true_color']).data var = var.transpose('y', 'x', 'bands') st = str(scn.attrs['sensor'])[2:-2] fig = plt.figure(figsize=(20, 10), dpi=200) crs = new_scn['true_color'].attrs['area'].to_cartopy_crs() ax = fig.add_subplot(1, 1, 1, projection=crs) ax.imshow(var.data, extent=(var.x[0], var.x[-1], var.y[-1], var.y[0]), origin='upper') #ax.add_feature(cfeature.COASTLINE.with_scale('10m'), edgecolor='orange') #ax.add_feature(cfeature.STATES.with_scale('10m'), edgecolor='orange') #ax.add_feature(USCOUNTIES.with_scale('500k'), edgecolor='orange', alpha = .75)
minS = "%02d" % min filenames = glob("/var/tmp/cll/data/H-*MSG4*"+yearS+monthS+dayS+hourS+minS+"*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) # first try, it stays here only for the memory # global_scene.load(["HRV", "IR_108"]) # local_scene = global_scene.resample("ccs4") # lonlats = local_scene["HRV"].area.get_lonlats() # sza = sun_zenith_angle(local_scene.start_time, lonlats[0], lonlats[1]) # ds = DataArray(sza, dims=['y','x']) # local_scene['sza'] = ds # end of the first try, stuff below here is working again global_scene.load(["ir108", "hrv", "IR_108", "hrv_with_ir"]) local_scene = global_scene.resample("ccs4") local_scene.load(["hrv_with_ir", "IR_108"]) swiss = load_area("/opt/users/cll/cllwork/etc_work/areas.def", "ccs4") tmpFileA = "/tmp/welcome.png" tmpFileB = "/tmp/welcome-ir.png" outputFile = "/var/tmp/cll/out/PY_visir-ch_"+yearS+monthS+dayS+hourS+minS+".png" bgFile = "/opt/users/cll/cllwork/ccs4.png" local_scene.save_dataset("hrv_with_ir", tmpFileA) local_scene.save_dataset("ir108", tmpFileB) background = Image.open(bgFile) foreground = Image.open(tmpFileA) background = background.convert("RGBA") foreground.putalpha(foreground.convert('L')) foreground = foreground.convert("RGBA")
def calculate_and_project(hrit_files, sat_positions, time, save_path, fov, shape, proj, nadir_proj=True, composite=None, fov_deg=True, save_data_path=None, save_photos=True): """Shows in Jupyter Notebook results of pictures seen from sat Parameters Array of saved on disc files. :param save_path: :param composite: :param time: :param elevation: :param central_lon: :param central_lat: :param hrit_files: """ # TO DO: Add local earth radius from satpy.scene import Scene from wutsat.fun import mat_fun import math from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def # from pyresample import create_area_def import os if composite is None: composite = 'realistic_colors' if fov_deg == True: fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180] if nadir_proj: nadir_proj = [0, 0] central_lat, central_lon, elevation = mat_fun.find_sourounding_list( earth_radius=sat_positions[3], lat=sat_positions[0], lon=sat_positions[1], alt=sat_positions[2], fov=fov) #print(len(central_lat)) area_def = [] for i in range(0, len(central_lon)): altitude = elevation[i] rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2 lat_0, lon_0 = central_lat[i], central_lon[i] lat_1, lat_2 = central_lat[i], central_lon[i] center = (central_lat[i], central_lon[i]) radius = (rad, rad) area_id = 'wrf_circle' proj_dict = {'proj': proj, 'lat_0': lat_0, 'lon_0': lon_0, \ 'lat_1': lat_1, 'lat_2': lat_2, \ 'a': 6370000, 'b': 6370000, 'h': altitude, 'azi': nadir_proj[0], 'tilt': nadir_proj[1]} area_def.append( AreaDefinition.from_circle(area_id, proj_dict, center, radius=radius, shape=shape)) # area_def.append(AreaDefinition.create_area_def(area_id, proj_dict, center, radius=radius, shape=shape)) files = return_files(time, hrit_files) if save_photos: scn = Scene(filenames=files) scn.load([composite]) for i, area in enumerate(area_def, start=0): local_scn = scn.resample(area, radius_of_influence=50000) local_scn.show(composite) path = save_path + '/' + str(i) + '.png' local_scn.save_dataset(composite, path, writer='simple_image', num_threads=8) sat_data = [area_def, files, [central_lat, central_lon, elevation]] if save_data_path: mat_fun.rwdata(save_data_path, 'sat_data.pkl', 'w', sat_data) return ()
# Use the following settings: # - lat and lon of origin: -3/23 # - width and height of the resulting domain: 500px # - projection x/y coordinates of lower left: -15E5 # - projection x/y coordinates of upper right: 15E5 area_id = "Dem. Rep. Kongo" description = "Dem. Rep. Kongo und Umgebung in der Lambert Azimuthal Equal Area Projektion" proj_id = "Dem. Rep. Kongo" proj_dict = {"proj": "laea", "lat_ts": -3, "lon_0": 23} width = 500 height = 500 llx = -15E5 lly = -15E5 urx = 15E5 ury = 15E5 area_extent = (llx,lly,urx,ury) from pyresample.geometry import AreaDefinition area_def = AreaDefinition(area_id, proj_id, description, proj_dict, width, height, area_extent) local_scn = scn.resample(area_def) # 4. Save both loaded composites of the resampled Scene as simple png images. [2P] local_scn.save_datasets(writer="simple_image", datasets=["natural_color", "convection"], filename="{name}_{start_time:%Y%m%d_%H%M%S}.png", base_dir=output_dir)
import os from satpy import Scene from datetime import datetime from satpy.utils import debug_on import pyninjotiff from glob import glob from pyresample.utils import load_area debug_on() chn = "airmass" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="airmass.tif", writer='ninjotiff', sat_id=6300014, chan_id=6500015, data_cat='GPRN', data_source='EUMCAST', nbits=8)
def main(): args = get_args() product = args['product'] if 'L2' in product: reader = 'abi_l2_nc' channel = args['channel'] elif 'L1' in product: reader = 'abi_l1b' channel = args["channel"] assert args['mesoregion'] in [ '', 'M1', 'M2' ], "Mesoregion needs to be None (default), M1 or M2" res_lat_str, res_lon_str = args["spatialresolution"] spatial_res = (float(res_lat_str.split('/')[0]) / float(res_lat_str.split('/')[1]), float(res_lon_str.split('/')[0]) / float(res_lon_str.split('/')[1])) date_str = args["date"] dates = date_input2dates(date_str) verbose = args["verbose"] region = args["region"] token = args["googletoken"] setup_logging(verbose) setup_logging(verbose) logging.info("Set up logging.") if args["outputfile"] is not None: outputfile = args["outputfile"] else: config = load_configuration(args["configfile"]) logging.info('Using outputfile and path defined in config') outputfile = config["DOWNLOAD_GOES16"]["OUTPUT_FILE"] outputfile = outputfile.replace('{N1}', region[0]) outputfile = outputfile.replace('{N2}', region[1]) outputfile = outputfile.replace('{E1}', region[2]) outputfile = outputfile.replace('{E2}', region[3]) outputfile = outputfile.replace('{channel}', channel) outputfile = outputfile.replace('{mesoregion}', args["mesoregion"]) output_path = os.path.dirname(outputfile) if not os.path.isdir(output_path): logging.info( f"Directory {output_path} does not exist, needs to be created:") os.makedirs(output_path) else: logging.info(f"Directory {output_path} already exists!") try: git_module_version = subprocess.check_output(["git", "describe" ]).strip().decode() except: logging.warning("Could not find git hash.", exc_info=True) git_module_version = "--" if args['keep_rawdata'] is None: tmpdir, tmpdir_obj = get_tmp_dir() else: pathlib.Path(args['keep_rawdata']).mkdir(parents=True, exist_ok=True) tmpdir = args['keep_rawdata'] try: fs = gcsfs.GCSFileSystem(project='gcp-public-data-goes-16/' + product + '/', token=token) except: logging.error('Connection not successful', exc_info=True) logging.info( 'Start downloading raw data to temporary directory {}'.format(tmpdir)) if isinstance(dates, datetime.date): files_2_download = find_remote_files(product, dates, channel, fs, args["mesoregion"]) elif isinstance(dates, pd.DatetimeIndex): files_2_download = [] for date in dates: files_2_download.extend( find_remote_files(product, date, channel, fs, args["mesoregion"])) if args['timesteps'] is not None: mod_hour, mod_minute = args['timesteps'] files_2_download = filter_filelist(files_2_download, mod_hour, mod_minute) local_files = download_remote_files(tmpdir + '/', files_2_download) logging.info('Start regridding and cropping data') netcdf_attrs = dict( title='Geostationary satellite imagery from GOES16 on regular grid', description='GOES16 satellite data regridded on a regular ' 'grid', converted_by='Hauke Schulz ([email protected])', institution='Max Planck Institute for Meteorology, Hamburg, Germany', Conventions='CF-1.7', python_version="{} (with pyresample version: {}; satpy: {})".format( sys.version, pyresample.__version__, satpy.__version__), creation_date=time.asctime(), created_with=os.path.basename(__file__) + " with its last modification on " + time.ctime(os.path.getmtime(os.path.realpath(__file__))), version=git_module_version) lat_min, lat_max, lon_min, lon_max = np.array(args['region'], dtype='float') if 'L1' in product: channel = "C{0:0>2}".format(args['channel']) elif 'L2' in product: pass files_local = sorted(local_files) logging.debug('Local files: {}'.format(files_local)) area_out = define_output_area(lat_min, lon_min, lat_max, lon_max, spatial_res) lons, lats = area_out.get_lonlats() for f_i, f in enumerate(tqdm(files_local)): logging.info('Loading scene') input_sat_scene = Scene(reader=reader, filenames=[f]) input_sat_scene.load([channel]) logging.info('Resampling scene') if check_numpy_compatibility(): output_region_scene = input_sat_scene.resample(area_out, cache_dir='./') else: output_region_scene = input_sat_scene.resample(area_out) try: resampled_data = output_region_scene.datasets[channel] except: resampled_data = output_region_scene[channel] logging.warning( 'The version of satpy you are using is depreciated. Please update' ) logging.info('Write output to netcdf') write_netcdf(resampled_data, lons, lats, files_2_download[f_i], channel, outputfile, netcdf_attrs, args["compression"]) input_sat_scene.unload() output_region_scene.unload() del input_sat_scene del output_region_scene del resampled_data gc.collect() if args['keep_rawdata'] is None: tmpdir_obj.cleanup()
# loop over areas, resample and create products # create netCDF file for area cosmo1 # create png file for area cosmo1_150 (50% more pixels) ############################################################ #for area in ['SeviriDisk00Cosmo',"cosmo1x150"]: #for area in ['cosmo1', 'cosmo1eqc3km']: for area in ['cosmo1eqc3km']: #for area in ['cosmo1x150', 'cosmo1eqc3km']: # resample MSG L2 ################## print("") print("=======================") print("resample to " + area) local_scene = global_scene.resample(area) # fake a new channel print("fake a new channel") local_scene['lscl'] = deepcopy(local_scene['IR_120']) #local_scene['lscl'].wavelength="" #local_scene['lscl'].standard_name="low_stratus_confidence_level" #local_scene['lscl'].calibration="brightness_temperature_difference" #print(local_scene['IR_120']) #print(dir(local_scene['IR_120'])) #print(local_scene['IR_120'].standard_name) #print(type(local_scene['IR_120'].standard_name)) #local_scene['lscl'].standard_name = "toa_brightness_temperature_difference" #print(local_scene['lscl'])
'save_dataset', 'save_datasets', 'show', 'slice', 'start_time', 'to_geoviews', 'to_xarray_dataset', 'unload', 'values', 'wishlist'] """ #!!# print(global_scene['overview']) ### this one does only work in the develop version print("") print("available_composite_names") print(global_scene.available_composite_names()) print(global_scene.all_dataset_names()) print(global_scene.available_dataset_names()) print(global_scene.datasets) # resample to another projection print("resample") area = "ccs4" area = "EuropeCanaryS95" local_scene = global_scene.resample(area) print("dir(local_scene)", dir(local_scene)) for p_name in nwcsaf.product[p_]: #local_scene.show('cloudtype') #local_scene.save_dataset('cloudtype', './local_cloudtype.png') #print "display ./local_cloudtype.png &" print("======================") print("======================") print(global_scene['cloud_top_temperature']) print(global_scene['cloud_top_temperature'].attrs['area']) print(global_scene['cloud_top_temperature'].attrs["start_time"]) #long_name: NWC GEO CTTH Cloud Top Altitude #level: None #end_time: 2017-07-07 12:03:32
filenames=[data_dir+testfile] print(filenames) #global_scene = Scene(platform_name="Meteosat-9", sensor="seviri", reader=reader, filenames=filenames) global_scene = Scene(sensor="seviri", reader=reader, filenames=filenames) #global_scene.load([0.6, 0.8, 10.8]) global_scene.load(['overview']) #global_scene.load(["VIS006", "VIS008", "IR_108"]) global_scene.save_dataset('overview', data_dir+'/overview_global.png') area="ccs4" #area="SeviriDisk00" local_scene = global_scene.resample("ccs4") local_scene.save_dataset('overview', data_dir+'/overview_'+area+'.png') #print (global_scene) #print (global_scene[0.6]) #local_scene = global_scene.project("ccs4", precompute=True) # print (global_scene.available_datasets()) ## does not work #global_scene.show(0.6) #global_scene.show('VIS006') #global_scene.show('overview') #Traceback (most recent call last):