def readMeteosatScene(folder, correct, areaBorders=(-802607, 3577980, 1498701, 5108186), channels=[ 'VIS006', 'VIS008', 'IR_016', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134' ], simpleProfiler=None): if (simpleProfiler is not None): simpleProfiler.start("readMeteosatScene") scene_data = None time_slot = None error_message = None dateiListe = glob.glob(folder + 'H-000-MSG*-__') # Automatisches Rausfinden des Meteosat-Typs satType = '10' # entspr. MSG3 if dateiListe[0][dateiListe[0].rfind("/") + 10:dateiListe[0].rfind("/") + 11] == '2': satType = '09' if dateiListe[0][dateiListe[0].rfind("/") + 10:dateiListe[0].rfind("/") + 11] == '1': satType = '08' # Automatisches Rausfinden des Datums der Szene: try: time_slot = datetime.datetime.strptime(dateiListe[0][-15:-3], "%Y%m%d%H%M") except: error_message = 'FEHLER: INKORREKTES DATUMS-FORMAT IN DATEI ' + dateiListe[ 0] # Testen, ob Prolog und Epilog-Datei vorhanden sind: epiAndProExist = False for datei in dateiListe: if '-EPI_' in datei and not epiAndProExist: for dat in dateiListe: if '-PRO_' in dat: epiAndProExist = True break if not epiAndProExist: error_message = 'KEINE PRO/EPILOG-DATEI !!! - ' else: if error_message is None: scene_data = GeostationaryFactory.create_scene( "meteosat", satType, "seviri", time_slot) scene_data.load(channels, area_extent=areaBorders) if correct: scene_data = correction_sed_coszen( scene_data, time_slot ) # sun-earth distance correction & cosine of the solar zenith angle correction scene_data = correction_co2(scene_data) # co2-correction return scene_data, time_slot, error_message
def read_HRW(sat, sat_nr, instrument, time_slot, ntimes, dt=5, read_basic_or_detailed='detailed', min_correlation=85, min_conf_nwp=80, min_conf_no_nwp=80, cloud_type=None, level=None, p_limits=None): #print time_slot data = GeostationaryFactory.create_scene(sat, sat_nr, instrument, time_slot) data.load(['HRW'], reader_level="seviri-level5", read_basic_or_detailed=read_basic_or_detailed) # read data for previous time steps if needed for it in range(1, ntimes): time_slot_i = time_slot - timedelta(minutes=it * 5) data_i = GeostationaryFactory.create_scene("meteosat", "09", "seviri", time_slot_i) data_i.load(['HRW'], reader_level="seviri-level5", read_basic_or_detailed=read_basic_or_detailed) # merge all datasets data['HRW'].HRW_detailed = data['HRW'].HRW_detailed + data_i[ 'HRW'].HRW_detailed data['HRW'].HRW_basic = data['HRW'].HRW_basic + data_i['HRW'].HRW_basic # apply quality filter data['HRW'].HRW_detailed = data['HRW'].HRW_detailed.filter(min_correlation=min_correlation, \ min_conf_nwp=min_conf_nwp, min_conf_no_nwp=min_conf_no_nwp, cloud_type=cloud_type, level=level, p_limits=p_limits) return data
def get_hrit_data(batch_data,\ llcrnrlon = x_ll, llcrnrlat = y_ll,\ urcrnrlon = x_ur, urcrnrlat = y_ur,\ hrit_listb = hrit_listb , hrit_list = hrit_list): # reproject lat lon points llcrnrlon, llcrnrlat = HS_proj(llcrnrlon, llcrnrlat) urcrnrlon, urcrnrlat = HS_proj(urcrnrlon, urcrnrlat) # get time hrit (common time) time_ = batch_data[0][12:24] grp_B_time = {} grp_B_time['time'] = time_ year_ = batch_data[0][12:16] month_ = batch_data[0][16:18] day_ = batch_data[0][18:20] hh_ = batch_data[0][20:22] mm_ = batch_data[0][22:24] t = (datetime.datetime(int(year_), int(month_), int(day_), int(hh_), int(mm_))) global_data = GeostationaryFactory.create_scene("Himawari-", "8", "ahi", t) global_data.load(hrit_listb, area_extent=(llcrnrlon, llcrnrlat,\ urcrnrlon, urcrnrlat)) #print global_data # get longitude, latitude lon_geos, lat_geos = global_data[hrit_list[0]].area.get_lonlats() grp_B_coors = {} grp_B_coors["Longitude"] = lon_geos grp_B_coors["Latitude"] = lat_geos # get data from bands 1 to 16 grp_B_data = {} for k in hrit_listb: #print "Extracting reflectance/BT data from Band "+k grp_B_data[k] = global_data[k].data # close read data del global_data return grp_B_data, grp_B_coors, grp_B_time
def load_rgb(satellite, satellite_nr, satellites_name, time_slot, rgb, area, in_msg, data_CTP): if rgb != 'CTP': # read the data we would like to forecast global_data_RGBforecast = GeostationaryFactory.create_scene( satellite, satellite_nr, satellites_name, time_slot) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data_RGBforecast, [rgb], in_msg, area_loaded) print '... project data to desired area ', area fns = global_data_RGBforecast.project(area) else: fns = deepcopy(data_CTP["CTP"].data) return fns[rgb].data
def load_channels(datetime): """ Load channel data into an mpop scene object :param year: desired year integer :param month: desired month integer :param day: desired day integer :param hour: desired day integer :param minute: desired minute integer :param second: desired second integer :return global_data: an mpop scene object with data from IR channels 12.0, 10.8, 8.7 for the desired time :return time_slot: a datetime object corresponding to the desired timestep """ # Create a datetime object time_slot = datetime # Create a scene object and load channels global_data = GeostationaryFactory.create_scene("Meteosat-9", "", "seviri", time_slot) global_data.load([12.0, 10.8, 8.7]) return global_data, time_slot
def plot_msg_minus_cosmo(in_msg): # do statistics for the last full hour (minutes=0, seconds=0) in_msg.datetime = datetime(in_msg.datetime.year, in_msg.datetime.month, in_msg.datetime.day, in_msg.datetime.hour, 0, 0) area_loaded = choose_area_loaded_msg(in_msg.sat, in_msg.sat_nr, in_msg.datetime) # define contour write for coasts, borders, rivers cw = ContourWriterAGG(in_msg.mapDir) # check if input data is complete if in_msg.verbose: print("*** check input data for ", in_msg.sat_str()) RGBs = check_input(in_msg, in_msg.sat_str(layout="%(sat)s") + in_msg.sat_nr_str(), in_msg.datetime) # in_msg.sat_nr might be changed to backup satellite if in_msg.verbose: print('*** Create plots for ') print(' Satellite/Sensor: ' + in_msg.sat_str()) print(' Satellite number: ' + in_msg.sat_nr_str() + ' // ' + str(in_msg.sat_nr)) print(' Satellite instrument: ' + in_msg.instrument) print(' Date/Time: ' + str(in_msg.datetime)) print(' RGBs: ', in_msg.RGBs) print(' Area: ', in_msg.areas) print(' reader level: ', in_msg.reader_level) # define satellite data object #global_data = GeostationaryFactory.create_scene(in_msg.sat, in_msg.sat_nr_str(), "seviri", in_msg.datetime) global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), in_msg.instrument, in_msg.datetime) # global_data = GeostationaryFactory.create_scene("msg-ot", "", "Overshooting_Tops", in_msg.datetime) if len(RGBs) == 0 and len(in_msg.postprocessing_areas) == 0: return RGBs if in_msg.verbose: print( "*** load satellite channels for " + in_msg.sat_str() + in_msg.sat_nr_str() + " ", global_data.fullname) # initialize processed RGBs RGBs_done = [] # ------------------------------------------------------------------- # load reflectivities, brightness temperatures, NWC-SAF products ... # ------------------------------------------------------------------- area_loaded = load_products(global_data, RGBs, in_msg, area_loaded) cosmo_input_file = "input_cosmo_cronjob.py" print("... read COSMO input file: ", cosmo_input_file) in_cosmo = parse_commandline_and_read_inputfile( input_file=cosmo_input_file) # add composite in_msg.scpOutput = True in_msg.resize_montage = 70 in_msg.postprocessing_montage = [[ "MSG_IR-108cpc", "COSMO_SYNMSG-BT-CL-IR10.8", "MSG_IR-108-COSMO-minus-MSGpc" ]] in_msg.scpProducts = [[ "MSG_IR-108cpc", "COSMO_SYNMSG-BT-CL-IR10.8", "MSG_IR-108-COSMO-minus-MSGpc" ]] #in_msg.scpProducts = ["all"] # define satellite data object cosmo_data = GeostationaryFactory.create_scene(in_cosmo.sat_str(), in_cosmo.sat_nr_str(), in_cosmo.instrument, in_cosmo.datetime) area_loaded_cosmo = load_products(cosmo_data, ['SYNMSG_BT_CL_IR10.8'], in_cosmo, area_loaded) # preprojecting the data to another area # -------------------------------------- if len(RGBs) > 0: for area in in_msg.areas: print("") obj_area = get_area_def(area) if area != 'ccs4': print("*** WARNING, diff MSG-COSMO only implemented for ccs4") continue # reproject data to new area print(area_loaded) if obj_area == area_loaded: if in_msg.verbose: print("*** Use data for the area loaded: ", area) #obj_area = area_loaded data = global_data resolution = 'l' else: if in_msg.verbose: print("*** Reproject data to area: ", area, "(org projection: ", area_loaded.name, ")") obj_area = get_area_def(area) # PROJECT data to new area data = global_data.project(area, precompute=True) resolution = 'i' if in_msg.parallax_correction: loaded_products = [chn.name for chn in data.loaded_channels()] if 'CTH' not in loaded_products: print("*** Error in plot_msg (" + inspect.getfile(inspect.currentframe()) + ")") print( " Cloud Top Height is needed for parallax correction " ) print( " either load CTH or specify the estimation of the CTH in the input file (load 10.8 in this case)" ) quit() if in_msg.verbose: print( " perform parallax correction for loaded channels: ", loaded_products) data = data.parallax_corr(fill=in_msg.parallax_gapfilling, estimate_cth=in_msg.estimate_cth, replace=True) # save reprojected data if area in in_msg.save_reprojected_data: save_reprojected_data(data, area, in_msg) # apply a mask to the data (switched off at the moment) if False: mask_data(data, area) # save average values if in_msg.save_statistics: mean_array = zeros(len(RGBs)) #statisticFile = '/data/COALITION2/database/meteosat/ccs4/'+yearS+'/'+monthS+'/'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt' statisticFile = './' + yearS + '-' + monthS + '-' + dayS + '/MSG_' + area + '_' + yearS[ 2:] + monthS + dayS + '.txt' if in_msg.verbose: print("*** write statistics (average values) to " + statisticFile) f1 = open(statisticFile, 'a') # mode append i_rgb = 0 for rgb in RGBs: if rgb in products.MSG_color: mean_array[i_rgb] = data[rgb.replace("c", "")].data.mean() i_rgb = i_rgb + 1 # create string to write str2write = dateS + ' ' + hourS + ' : ' + minS + ' UTC ' for mm in mean_array: str2write = str2write + ' ' + "%7.2f" % mm str2write = str2write + "\n" f1.write(str2write) f1.close() # creating plots/images if in_msg.make_plots: # choose map resolution in_msg.resolution = choose_map_resolution( area, in_msg.mapResolution) # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) RGBs = ['IR_108-COSMO-minus-MSG'] print(data['IR_108'].data.shape) print(cosmo_data['SYNMSG_BT_CL_IR10.8'].data.shape) diff_MSG_COSMO = cosmo_data['SYNMSG_BT_CL_IR10.8'].data - data[ 'IR_108'].data HRV_enhance_str = '' # add IR difference as "channel object" to satellite regional "data" object data.channels.append( Channel(name=RGBs[0], wavelength_range=[0., 0., 0.], resolution=data['IR_108'].resolution, data=diff_MSG_COSMO)) for rgb in RGBs: if not check_loaded_channels(rgb, data): continue PIL_image = create_PIL_image(rgb, data, in_msg, obj_area=obj_area) # !!! in_msg.colorbar[rgb] is initialized inside (give attention to rgbs) !!! add_borders_and_rivers(PIL_image, cw, area_tuple, add_borders=in_msg.add_borders, border_color=in_msg.border_color, add_rivers=in_msg.add_rivers, river_color=in_msg.river_color, resolution=in_msg.resolution, verbose=in_msg.verbose) # indicate mask if in_msg.indicate_mask: PIL_image = indicate_mask(rgb, PIL_image, data, in_msg.verbose) #if area.find("EuropeCanary") != -1 or area.find("ccs4") != -1: dc = DecoratorAGG(PIL_image) # add title to image if in_msg.add_title: add_title(PIL_image, in_msg.title, HRV_enhance_str + rgb, in_msg.sat_str(), data.sat_nr(), in_msg.datetime, area, dc, in_msg.font_file, in_msg.verbose, title_color=in_msg.title_color, title_y_line_nr=in_msg.title_y_line_nr ) # !!! needs change # add MeteoSwiss and Pytroll logo if in_msg.add_logos: if in_msg.verbose: print('... add logos') dc.align_right() if in_msg.add_colorscale: dc.write_vertically() if PIL_image.mode != 'L': height = 60 # height=60.0 normal resolution dc.add_logo(in_msg.logos_dir + "/pytroll3.jpg", height=height) # height=60.0 dc.add_logo(in_msg.logos_dir + "/meteoSwiss3.jpg", height=height) dc.add_logo( in_msg.logos_dir + "/EUMETSAT_logo2_tiny_white_square.png", height=height) # height=60.0 # add colorscale if in_msg.add_colorscale and in_msg.colormap[rgb] != None: if rgb in products.MSG_color: unit = data[rgb.replace("c", "")].info['units'] #elif rgb in products.MSG or rgb in products.NWCSAF or rgb in products.HSAF: # unit = data[rgb].info['units'] else: unit = None loaded_channels = [ chn.name for chn in data.loaded_channels() ] if rgb in loaded_channels: if hasattr(data[rgb], 'info'): print(" hasattr(data[rgb], 'info')", list(data[rgb].info.keys())) if 'units' in list(data[rgb].info.keys()): print( "'units' in data[rgb].info.keys()") unit = data[rgb].info['units'] print("... units = ", unit) add_colorscale(dc, rgb, in_msg, unit=unit) if in_msg.parallax_correction: parallax_correction_str = 'pc' else: parallax_correction_str = '' rgb += parallax_correction_str # create output filename outputDir = format_name( in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change outputFile = outputDir + "/" + format_name( in_msg.outputFile, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change # check if output directory exists, if not create it path = dirname(outputFile) if not exists(path): if in_msg.verbose: print('... create output directory: ' + path) makedirs(path) # save file if exists(outputFile) and not in_msg.overwrite: if stat(outputFile).st_size > 0: print('... outputFile ' + outputFile + ' already exists (keep old file)') else: print( '*** Warning, outputFile' + outputFile + ' already exists, but is empty (overwrite file)' ) PIL_image.save(outputFile, optimize=True ) # optimize -> minimize file size chmod( outputFile, 0o777 ) ## FOR PYTHON3: 0o664 # give access read/write access to group members else: if in_msg.verbose: print('... save final file: ' + outputFile) PIL_image.save( outputFile, optimize=True) # optimize -> minimize file size chmod( outputFile, 0o777 ) ## FOR PYTHON3: 0o664 # give access read/write access to group members if in_msg.compress_to_8bit: if in_msg.verbose: print('... compress to 8 bit image: display ' + outputFile.replace(".png", "-fs8.png") + ' &') subprocess.call( "/usr/bin/pngquant -force 256 " + outputFile + " 2>&1 &", shell=True) # 256 == "number of colors" #if in_msg.verbose: # print " add coastlines to "+outputFile ## alternative: reopen image and modify it (takes longer due to additional reading and saving) #cw.add_rivers_to_file(img, area_tuple, level=5, outline='blue', width=0.5, outline_opacity=127) #cw.add_coastlines_to_file(outputFile, obj_area, resolution=resolution, level=4) #cw.add_borders_to_file(outputFile, obj_area, outline=outline, resolution=resolution) # secure copy file to another place if in_msg.scpOutput: if (rgb in in_msg.scpProducts) or ('all' in [ x.lower() for x in in_msg.scpProducts if type(x) == str ]): scpOutputDir = format_name(in_msg.scpOutputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy " + outputFile.replace( ".png", "-fs8.png") + " to " + scpOutputDir) subprocess.call( "scp " + in_msg.scpID + " " + outputFile.replace(".png", "-fs8.png") + " " + scpOutputDir + " 2>&1 &", shell=True) else: if in_msg.verbose: print("... secure copy " + outputFile + " to " + scpOutputDir) subprocess.call("scp " + in_msg.scpID + " " + outputFile + " " + scpOutputDir + " 2>&1 &", shell=True) if in_msg.scpOutput and in_msg.scpID2 != None and in_msg.scpOutputDir2 != None: if (rgb in in_msg.scpProducts2) or ('all' in [ x.lower() for x in in_msg.scpProducts2 if type(x) == str ]): scpOutputDir2 = format_name(in_msg.scpOutputDir2, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy " + outputFile.replace( ".png", "-fs8.png") + " to " + scpOutputDir2) subprocess.call( "scp " + in_msg.scpID2 + " " + outputFile.replace(".png", "-fs8.png") + " " + scpOutputDir2 + " 2>&1 &", shell=True) else: if in_msg.verbose: print("... secure copy " + outputFile + " to " + scpOutputDir2) subprocess.call("scp " + in_msg.scpID2 + " " + outputFile + " " + scpOutputDir2 + " 2>&1 &", shell=True) if 'ninjotif' in in_msg.outputFormats: ninjotif_file = format_name(outputDir + '/' + in_msg.ninjotifFilename, data.time_slot, sat_nr=data.sat_nr(), RSS=in_msg.RSS, area=area, rgb=rgb) from plot_coalition2 import pilimage2geoimage GEO_image = pilimage2geoimage(PIL_image, obj_area, data.time_slot) GEO_image.save(ninjotif_file, fformat='mpop.imageo.formats.ninjotiff', ninjo_product_name=rgb, chan_id=products.ninjo_chan_id[ rgb.replace("_", "-") + "_" + area], nbits=8) chmod(ninjotif_file, 0o777) print(("... save ninjotif image: display ", ninjotif_file, " &")) if rgb not in RGBs_done: RGBs_done.append(rgb) ## start postprocessing for area in in_msg.postprocessing_areas: postprocessing(in_msg, global_data.time_slot, int(data.sat_nr()), area) if in_msg.verbose: print(" ") return RGBs_done
def load_input(sat_nr, time_slot, par_fill, read_HSAF=True): ######### # time_slot: time in UTC # par_fill: parallax corr gap filler: choose between 'False', 'nearest', and 'bilinear' ######### # RADAR prop_rad = 'RATE' # SATELLITE channel_sat = [ 'WV_062', 'WV_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134' ] # Cloud Mask prop_cma = 'CMa' pge_cma = get_NWC_pge_name(prop_cma) # cloud type prop_ct = 'CT' pge_ct = get_NWC_pge_name(prop_ct) # cloud phase prop_ctph = 'CT_PHASE' pge_ctph = get_NWC_pge_name(prop_ctph) # cloud top temperature prop_ctt = 'CTT' pge_ctt = get_NWC_pge_name(prop_ctt) # cloud top pressure prop_ctp = 'CTP' pge_ctp = get_NWC_pge_name(prop_ctp) # put all the strings I want to load in the same obj together prop_nwc = [prop_cma, prop_ct, prop_ctph, prop_ctt, prop_ctp] pge_nwc = [pge_cma, pge_ct, pge_ctph, pge_ctt, pge_ctp] # Cloud height prop_cth = 'CTH' pge_cth = get_NWC_pge_name( prop_cth ) # separate so can correct all others before also correcting it # hsaf prop_hsaf = 'h03b' # estimated rain rate in mm/h see http://hsaf.meteoam.it/precipitation.php?tab=5 print('=========================') print('start:', time_slot) print('read Odyssey radar composite') global_radar = GeostationaryFactory.create_scene("odyssey", "", "radar", time_slot) global_radar.load([prop_rad]) print(global_radar) print('=========================') print('read satellite data') try: global_sat = GeostationaryFactory.create_scene("meteosat", sat_nr, "seviri", time_slot) global_sat.load(channel_sat) #global_sat.load(channel_sat, reader_level="seviri-level2") print(global_sat) print('=========================') except AttributeError: date_missed = time_slot #sys.exit() # move on to the next iteration if the NWCSAF does not have a product at this time instance text = ['skipped because SEVIRI product missing'] return date_missed, text if read_HSAF: print('read HSAF data') try: global_hsaf = GeostationaryFactory.create_scene( "meteosat", sat_nr, "seviri", time_slot) global_hsaf.load([prop_hsaf], reader_level='seviri-level10') print('=========================') except ValueError: date_missed = time_slot text = ['skipped because HSAF product missing'] #sys.exit() # move on to the next iteration if the NWCSAF does not have a product at this time instance return date_missed, text else: global_hsaf = None print('read Nowcasting SAF data') global_nwc = GeostationaryFactory.create_scene("meteosat", sat_nr, "seviri", time_slot) nwcsaf_calibrate = True # converts data into physical units global_nwc.load(pge_nwc, calibrate=nwcsaf_calibrate, reader_level="seviri-level3") print("=========================") print('read CTH data') global_cth = GeostationaryFactory.create_scene("meteosat", sat_nr, "seviri", time_slot) nwcsaf_calibrate = True # converts data into physical units global_cth.load([pge_cth], calibrate=nwcsaf_calibrate, reader_level="seviri-level3") print('=========================') # convert NWCSAF input to channels to be able to carry out parallax corr try: for var in prop_nwc: convert_NWCSAF_to_radiance_format(global_nwc, None, var, False, True) convert_NWCSAF_to_radiance_format(global_cth, None, prop_cth, False, True) except KeyError: date_missed = time_slot text = ['skipped because NWC SAF product missing'] #sys.exit() # move on to the next iteration if the NWCSAF does not have a product at this time instance return date_missed, text return global_radar, global_sat, global_nwc, global_cth, global_hsaf
def plot_msg(in_msg): # get date of the last SEVIRI observation if in_msg.datetime == None: in_msg.get_last_SEVIRI_date() yearS = str(in_msg.datetime.year) #yearS = yearS[2:] monthS = "%02d" % in_msg.datetime.month dayS = "%02d" % in_msg.datetime.day hourS = "%02d" % in_msg.datetime.hour minS = "%02d" % in_msg.datetime.minute dateS=yearS+'-'+monthS+'-'+dayS timeS=hourS+'-'+minS if in_msg.sat_nr==None: in_msg.sat_nr=choose_msg(in_msg.datetime,in_msg.RSS) if in_msg.datetime.year > 2012: if in_msg.sat_nr == 8: area_loaded = get_area_def("EuropeCanary35") elif in_msg.sat_nr == 9: # rapid scan service satellite area_loaded = get_area_def("EuropeCanary95") elif in_msg.sat_nr == 10: # default satellite area_loaded = get_area_def("met09globeFull") # full disk service, like EUMETSATs NWC-SAF products elif in_msg.sat_nr == 0: # fake satellite for reprojected ccs4 data in netCDF area_loaded = get_area_def("ccs4") # #area_loaded = get_area_def("EuropeCanary") #area_loaded = get_area_def("alps") # new projection of SAM else: print("*** Error, unknown satellite number ", in_msg.sat_nr) area_loaded = get_area_def("hsaf") # else: if in_msg.sat_nr == 8: area_loaded = get_area_def("EuropeCanary95") elif in_msg.sat_nr == 9: # default satellite area_loaded = get_area_def("EuropeCanary") # define contour write for coasts, borders, rivers cw = ContourWriterAGG(in_msg.mapDir) if type(in_msg.sat_nr) is int: sat_nr_str = str(in_msg.sat_nr).zfill(2) elif type(in_msg.sat_nr) is str: sat_nr_str = in_msg.sat_nr else: print("*** Waring, unknown type of sat_nr", type(in_msg.sat_nr)) sat_nr_str = in_msg.sat_nr if in_msg.verbose: print('*** Create plots for ') print(' Satellite/Sensor: '+in_msg.sat + ' ' + sat_nr_str) print(' Date/Time: '+dateS +' '+hourS+':'+minS+'UTC') print(' RGBs: ', in_msg.RGBs) print(' Area: ', in_msg.areas) # check if input data is complete if in_msg.verbose: print("*** check input data") RGBs = check_input(in_msg, in_msg.sat+sat_nr_str, in_msg.datetime) if len(RGBs) != len(in_msg.RGBs): print("*** Warning, input not complete.") print("*** Warning, process only: ", RGBs) # define satellite data object global_data = GeostationaryFactory.create_scene(in_msg.sat, sat_nr_str, "seviri", in_msg.datetime) # print "type(global_data) ", type(global_data) # <class 'mpop.scene.SatelliteInstrumentScene'> # print "dir(global_data)", dir(global_data) [..., '__init__', ... 'area', 'area_def', 'area_id', 'channel_list', 'channels', # 'channels_to_load', 'check_channels', 'fullname', 'get_area', 'image', 'info', 'instrument_name', 'lat', 'load', 'loaded_channels', # 'lon', 'number', 'orbit', 'project', 'remove_attribute', 'satname', 'save', 'set_area', 'time_slot', 'unload', 'variant'] ## define satellite data object one scan before #if in_msg.RSS: # scan_time = 5 # min #else: # scan_time = 15 # min scan_time = 15 # min datetime_m1 = in_msg.datetime - timedelta(minutes=scan_time) global_data_m1 = GeostationaryFactory.create_scene(in_msg.sat, sat_nr_str, "seviri", datetime_m1) if len(RGBs) == 0: return RGBs if in_msg.verbose: print("*** load satellite channels for "+in_msg.sat+sat_nr_str+" ", global_data.fullname) # initialize processed RGBs RGBs_done=[] # load reflectivities, brightness temperatures, NWC-SAF products ... print("*** read ", str(in_msg.datetime)) area_loaded = load_products(global_data, RGBs, in_msg, area_loaded) #print "*** read ", str(datetime_m1) #area_loaded = load_products(global_data_m1, RGBs, in_msg, area_loaded) # check if all prerequisites are loaded #rgb_complete = [] #for rgb in RGBs: # all_loaded = True # if rgb in products.RGBs_buildin or rgb in products.RGB_user: # obj_image = get_image(global_data, rgb) # for pre in obj_image.prerequisites: # if pre not in loaded_channels: # all_loaded = False # elif rgb in products.MSG_color: # if rgb.replace("c","") not in loaded_channels: # all_loaded = False # else: # if rgb not in loaded_channels: # all_loaded = False # if all_loaded: # rgb_complete.append(rgb) #print "rgb_complete", rgb_complete # preprojecting the data to another area # -------------------------------------- for area in in_msg.areas: print("") obj_area = get_area_def(area) # reproject data to new area if obj_area == area_loaded: if in_msg.verbose: print("*** Use data for the area loaded: ", area) #obj_area = area_loaded data = global_data data_m1 = global_data_m1 resolution='l' else: if in_msg.verbose: print("*** Reproject data to area: ", area, "(org projection: ", area_loaded.name, ")") obj_area = get_area_def(area) # PROJECT data to new area data = global_data.project(area, precompute=True) data_m1 = global_data_m1.project(area, precompute=True) resolution='i' loaded_products = [chn.name for chn in data.loaded_channels()] print(loaded_products) #loaded_products_m1 = [chn.name for chn in data_m1.loaded_channels()] #print loaded_products_m1 #for prod in loaded_products: # print "xxx ", prod # print data_m1[prod] # data[prod] = data[prod] - data_m1[prod] # # save reprojected data if area in in_msg.save_reprojected_data: save_reprojected_data(data, area, in_msg) # apply a mask to the data (switched off at the moment) if False: mask_data(data, area) # save average values if in_msg.save_statistics: mean_array = zeros(len(RGBs)) #statisticFile = '/data/COALITION2/database/meteosat/ccs4/'+yearS+'/'+monthS+'/'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt' statisticFile = './'+yearS+'-'+monthS+'-'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt' if in_msg.verbose: print("*** write statistics (average values) to "+statisticFile) f1 = open(statisticFile,'a') # mode append i_rgb=0 for rgb in RGBs: if rgb in products.MSG_color: mean_array[i_rgb]=data[rgb.replace("c","")].data.mean() i_rgb=i_rgb+1 # create string to write str2write = dateS +' '+hourS+' : '+minS+' UTC ' for mm in mean_array: str2write = str2write+' '+ "%7.2f" % mm str2write = str2write+"\n" f1.write(str2write) f1.close() # creating plots/images if in_msg.make_plots: # choose map resolution resolution = choose_map_resolution(area, in_msg.mapResolution) # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) for rgb in RGBs: PIL_image = create_PIL_image(rgb, data, in_msg) # !!! in_msg.colorbar[rgb] is initialized inside (give attention to rgbs) !!! if in_msg.add_rivers: if in_msg.verbose: print(" add rivers to image (resolution="+resolution+")") cw.add_rivers(PIL_image, area_tuple, outline='blue', resolution=resolution, outline_opacity=127, width=0.5, level=5) # if in_msg.verbose: print(" add lakes to image (resolution="+resolution+")") cw.add_coastlines(PIL_image, area_tuple, outline='blue', resolution=resolution, outline_opacity=127, width=0.5, level=2) #, outline_opacity=0 if in_msg.add_borders: if in_msg.verbose: print(" add coastlines to image (resolution="+resolution+")") cw.add_coastlines(PIL_image, area_tuple, outline=(255, 0, 0), resolution=resolution, width=1) #, outline_opacity=0 if in_msg.verbose: print(" add borders to image (resolution="+resolution+")") cw.add_borders(PIL_image, area_tuple, outline=(255, 0, 0), resolution=resolution, width=1) #, outline_opacity=0 #if area.find("EuropeCanary") != -1 or area.find("ccs4") != -1: dc = DecoratorAGG(PIL_image) # add title to image if in_msg.add_title: add_title(PIL_image, rgb, int(data.number), dateS, hourS, minS, area, dc, in_msg.font_file, in_msg.verbose ) # add MeteoSwiss and Pytroll logo if in_msg.add_logos: if in_msg.verbose: print('... add logos') dc.align_right() if in_msg.add_colorscale: dc.write_vertically() dc.add_logo("../logos/meteoSwiss3.jpg",height=60.0) dc.add_logo("../logos/pytroll3.jpg",height=60.0) # add colorscale if in_msg.add_colorscale and in_msg.colormap[rgb] != None: add_colorscale(dc, rgb, in_msg) # create output filename outputDir = format_name(in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat_nr=data.number) outputFile = outputDir + format_name(in_msg.outputFile, data.time_slot, area=area, rgb=rgb, sat_nr=data.number) # check if output directory exists, if not create it path= dirname(outputFile) if not exists(path): if in_msg.verbose: print('... create output directory: ' + path) makedirs(path) # save file if in_msg.verbose: print('... save final file :' + outputFile) PIL_image.save(outputFile, optimize=True) # optimize -> minimize file size if in_msg.compress_to_8bit: if in_msg.verbose: print('... compress to 8 bit image: display '+outputFile.replace(".png","-fs8.png")+' &') subprocess.call("/usr/bin/pngquant -force 256 "+outputFile+" 2>&1 &", shell=True) # 256 == "number of colors" #if in_msg.verbose: # print " add coastlines to "+outputFile ## alternative: reopen image and modify it (takes longer due to additional reading and saving) #cw.add_rivers_to_file(img, area_tuple, level=5, outline='blue', width=0.5, outline_opacity=127) #cw.add_coastlines_to_file(outputFile, obj_area, resolution=resolution, level=4) #cw.add_borders_to_file(outputFile, obj_area, outline=outline, resolution=resolution) # copy to another place if in_msg.scpOutput: if in_msg.verbose: print("... secure copy "+outputFile+ " to "+in_msg.scpOutputDir) subprocess.call("scp "+in_msg.scpID+" "+outputFile+" "+in_msg.scpOutputDir+" 2>&1 &", shell=True) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy "+outputFile.replace(".png","-fs8.png")+ " to "+in_msg.scpOutputDir) subprocess.call("scp "+in_msg.scpID+" "+outputFile.replace(".png","-fs8.png")+" "+in_msg.scpOutputDir+" 2>&1 &", shell=True) if rgb not in RGBs_done: RGBs_done.append(rgb) ## start postprocessing if area in in_msg.postprocessing_areas: postprocessing(in_msg, global_data.time_slot, data.number, area) if in_msg.verbose: print(" ") return RGBs_done
#yearS = yearS[2:] monthS = "%02d" % month dayS = "%02d" % day hourS = "%02d" % hour minS = "%02d" % minute dateS = yearS + '-' + monthS + '-' + dayS timeS = hourS + ':' + minS + 'UTC' #import sys, string, os #sys.path.insert(0, "/opt/users/mbc/pytroll/install/lib/python2.6/site-packages") debug_on() time_slot = datetime.datetime(year, month, day, hour, minute) print("... process date: ", str(time_slot)) global_data = GeostationaryFactory.create_scene("odyssey", "", "radar", time_slot) global_data.load([prop_str]) color_mode = 'RainRate' outputDir = "/data/cinesat/out/" #outputFile = "/tmp/test1."+prop_str+".png" #print "global_data[prop_str].product_name=",global_data[prop_str].product_name #area='odyssey' area = 'odysseyS25' reproject = True if reproject:
from copy import deepcopy if __name__ == "__main__": from get_input_msg import get_input_msg input_file = sys.argv[1] if input_file[-3:] == '.py': input_file = input_file[:-3] in_msg = get_input_msg(input_file) rgb = ["CTP"] time_slot = datetime(2015, 10, 15, 5, 0) global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", time_slot) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data, ['CTP'], in_msg, area_loaded) data = global_data.project("ccs4") data_flat = data[rgb[0]].data.flatten() num_bins = 100 fig = plt.figure() n, bins, patches = plt.hist(data_flat[data_flat > 0], num_bins, normed=1, facecolor='blue',
# Geolocate and resample microphysic parameters from pyresample import utils area_id = 'CPP_cmsaf' area_name = 'Gridded cloud physical properties from CMSAF' proj_id = 'CPP_cmsaf' x_size = cot.shape[0] y_size = cot.shape[1] cpp_area = utils.get_area_def(area_id, area_name, proj_id, proj4, x_size, y_size, extent) cot_fd = image.ImageContainerQuick(cot, cpp_area) reff_fd = image.ImageContainerQuick(reff, cpp_area) cwp_fd = image.ImageContainerQuick(cwp, cpp_area) # Fog example germ_scene = GeostationaryFactory.create_scene(satname="meteosat", satnumber='10', instrument="seviri", time_slot=time) germ_scene.load(germ_scene.image.fls_day.prerequisites.add('HRV'), area_extent=ger_extent) #germ_scene.project('euro4', mode="nearest") #germ_scene.image[0.6].show() germ_area = germ_scene[10.8].area_def # Resample fls input elevation_ger = elevation.resample(germ_area) cot_ger = cot_fd.resample(germ_area) reff_ger = reff_fd.resample(germ_area) cwp_ger = cwp_fd.resample(germ_area)
def scatter_rad_rcz(in_msg): # get date of the last SEVIRI observation if in_msg.datetime is None: in_msg.get_last_SEVIRI_date() yearS = str(in_msg.datetime.year) #yearS = yearS[2:] monthS = "%02d" % in_msg.datetime.month dayS = "%02d" % in_msg.datetime.day hourS = "%02d" % in_msg.datetime.hour minS = "%02d" % in_msg.datetime.minute dateS = yearS + '-' + monthS + '-' + dayS timeS = hourS + '-' + minS if in_msg.sat_nr is None: in_msg.sat_nr = choose_msg(in_msg.datetime, in_msg.RSS) # check if PyResample is loaded try: # Work around for on demand import of pyresample. pyresample depends # on scipy.spatial which memory leaks on multiple imports IS_PYRESAMPLE_LOADED = False from pyresample import geometry from mpop.projector import get_area_def IS_PYRESAMPLE_LOADED = True except ImportError: LOGGER.warning( "pyresample missing. Can only work in satellite projection") if in_msg.datetime.year > 2012: if in_msg.sat_nr == 8: area_loaded = get_area_def("EuropeCanary35") elif in_msg.sat_nr == 9: # rapid scan service satellite area_loaded = get_area_def("EuropeCanary95") elif in_msg.sat_nr == 10: # default satellite area_loaded = get_area_def( "met09globeFull" ) # full disk service, like EUMETSATs NWC-SAF products elif in_msg.sat_nr == 0: # fake satellite for reprojected ccs4 data in netCDF area_loaded = get_area_def("ccs4") # #area_loaded = get_area_def("EuropeCanary") #area_loaded = get_area_def("alps") # new projection of SAM else: print("*** Error, unknown satellite number ", in_msg.sat_nr) area_loaded = get_area_def("hsaf") # else: if in_msg.sat_nr == 8: area_loaded = get_area_def("EuropeCanary95") elif in_msg.sat_nr == 9: # default satellite area_loaded = get_area_def("EuropeCanary") # define contour write for coasts, borders, rivers cw = ContourWriterAGG(in_msg.mapDir) if type(in_msg.sat_nr) is int: sat_nr_str = str(in_msg.sat_nr).zfill(2) elif type(in_msg.sat_nr) is str: sat_nr_str = in_msg.sat_nr else: print("*** Waring, unknown type of sat_nr", type(in_msg.sat_nr)) sat_nr_str = in_msg.sat_nr if in_msg.verbose: print('*** Create plots for ') print(' Satellite/Sensor: ' + in_msg.sat + ' ' + sat_nr_str) print(' Date/Time: ' + dateS + ' ' + hourS + ':' + minS + 'UTC') print(' RGBs: ', in_msg.RGBs) print(' Area: ', in_msg.areas) # check if input data is complete if in_msg.verbose: print("*** check input data") RGBs = check_input(in_msg, in_msg.sat + sat_nr_str, in_msg.datetime) if len(RGBs) != len(in_msg.RGBs): print("*** Warning, input not complete.") print("*** Warning, process only: ", RGBs) # define time and data object global_data = GeostationaryFactory.create_scene(in_msg.sat, sat_nr_str, "seviri", in_msg.datetime) # print "type(global_data) ", type(global_data) # <class 'mpop.scene.SatelliteInstrumentScene'> # print "dir(global_data)", dir(global_data) [..., '__init__', ... 'area', 'area_def', 'area_id', 'channel_list', 'channels', # 'channels_to_load', 'check_channels', 'fullname', 'get_area', 'image', 'info', 'instrument_name', 'lat', 'load', 'loaded_channels', # 'lon', 'number', 'orbit', 'project', 'remove_attribute', 'satname', 'save', 'set_area', 'time_slot', 'unload', 'variant'] global_data_radar = GeostationaryFactory.create_scene( "swissradar", "", "radar", in_msg.datetime) global_data_radar.load(['precip']) if len(RGBs) == 0: return RGBs if in_msg.verbose: print( "*** load satellite channels for " + in_msg.sat + sat_nr_str + " ", global_data.fullname) # initialize processed RGBs RGBs_done = [] # load all channels / information for rgb in RGBs: if in_msg.verbose: print(" load prerequisites for: ", rgb) if rgb in products.MSG or rgb in products.MSG_color: for channel in products.MSG: if rgb.find( channel ) != -1: # if a channel name (IR_108) is in the rgb name (IR_108c) if in_msg.verbose: print(" load prerequisites by name: ", channel) if in_msg.reader_level is None: global_data.load( [channel], area_extent=area_loaded.area_extent ) # try all reader levels load the corresponding data else: global_data.load([channel], area_extent=area_loaded.area_extent, reader_level=in_msg.reader_level ) # load the corresponding data if rgb in products.RGBs_buildin or rgb in products.RGBs_user: obj_image = get_image(global_data, rgb) # find corresponding RGB image object if in_msg.verbose: print(" load prerequisites by function: ", obj_image.prerequisites) global_data.load( obj_image.prerequisites, area_extent=area_loaded.area_extent) # load prerequisites if rgb in products.CMa or rgb in products.CT or rgb in products.CTTH or rgb in products.SPhR: if rgb in products.CMa: pge = "CloudMask" elif rgb in products.CT: pge = "CloudType" elif rgb in products.CTTH: pge = "CTTH" elif rgb in products.SPhR: pge = "SPhR" else: print("*** Error in scatter_rad_rcz (" + inspect.getfile(inspect.currentframe()) + ")") print(" unknown NWC-SAF PGE ", rgb) quit() if in_msg.verbose: print(" load NWC-SAF product: " + pge) global_data.load( [pge], calibrate=in_msg.nwcsaf_calibrate, reader_level="seviri-level3" ) # False, area_extent=area_loaded.area_extent (difficulties to find correct h5 input file) #print global_data.loaded_channels() #loaded_channels = [chn.name for chn in global_data.loaded_channels()] #if pge not in loaded_channels: # return [] if area_loaded != global_data[pge].area: print("*** Warning: NWC-SAF input file on a differnt grid (" + global_data[pge].area.name + ") than suggested input area (" + area_loaded.name + ")") print(" use " + global_data[pge].area.name + " as standard grid") area_loaded = global_data[pge].area convert_NWCSAF_to_radiance_format(global_data, area_loaded, rgb, IS_PYRESAMPLE_LOADED) if rgb in products.HSAF: if in_msg.verbose: print(" load hsaf product by name: ", rgb) global_data.load( [rgb] ) # , area_extent=area_loaded.area_extent load the corresponding data if in_msg.HRV_enhancement: # load also the HRV channel (there is a check inside in the load function, if the channel is already loaded) if in_msg.verbose: print( " load additionally the HRV channel for HR enhancement") global_data.load(["HRV"], area_extent=area_loaded.area_extent) # loaded_channels = [chn.name for chn in global_data.loaded_channels()] # print loaded_channels # check if all prerequisites are loaded #rgb_complete = [] #for rgb in RGBs: # all_loaded = True # if rgb in products.RGBs_buildin or rgb in products.RGB_user: # obj_image = get_image(global_data, rgb) # for pre in obj_image.prerequisites: # if pre not in loaded_channels: # all_loaded = False # elif rgb in products.MSG_color: # if rgb.replace("c","") not in loaded_channels: # all_loaded = False # else: # if rgb not in loaded_channels: # all_loaded = False # if all_loaded: # rgb_complete.append(rgb) #print "rgb_complete", rgb_complete # preprojecting the data to another area # -------------------------------------- for area in in_msg.areas: print("") obj_area = get_area_def(area) if obj_area == area_loaded: if in_msg.verbose: print("*** Use data for the area loaded: ", area) #obj_area = area_loaded data = global_data resolution = 'l' else: if in_msg.verbose: print("*** Reproject data to area: ", area, "(org projection: ", area_loaded.name, ")") obj_area = get_area_def(area) # PROJECT data to new area data = global_data.project(area) resolution = 'i' if in_msg.mapResolution is None: if area.find("EuropeCanary") != -1: resolution = 'l' if area.find("ccs4") != -1: resolution = 'i' if area.find("ticino") != -1: resolution = 'h' else: resolution = in_msg.mapResolution # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) # save reprojected data if area in in_msg.save_reprojected_data: # and area != area_loaded _sat_nr = int(data.number) - 7 if int(data.number) - 7 > 0 else 0 nc_dir = ( global_data.time_slot.strftime(in_msg.reprojected_data_dir) % { "area": area, "msg": "MSG" + str(_sat_nr) }) nc_file = (global_data.time_slot.strftime( in_msg.reprojected_data_filename) % { "area": area, "msg": "MSG" + str(_sat_nr) }) ncOutputFile = nc_dir + nc_file # check if output directory exists, if not create it path = dirname(ncOutputFile) if not exists(path): if in_msg.verbose: print('... create output directory: ' + path) makedirs(path) if in_msg.verbose: print("... save reprojected data: ncview " + ncOutputFile + " &") #data.save(ncOutputFile, to_format="netcdf4", compression=False) data.save(ncOutputFile, band_axis=0, concatenate_bands=False) # mask for the cloud depths tests (masked data) #if area == 'ccs4': if area == False: print('... apply convective mask') mask_depth = data.image.mask_clouddepth() #print type(mask_depth.max) #print dir(mask_depth.max) index = where( mask_depth < 5) # less than 5 (of 6) tests successfull -> mask out for rgb in RGBs: if rgb in products.MSG_color: rgb2 = rgb.replace("c", "") data[rgb2].data.mask[index] = True fill_value = data[rgb2].data.fill_value #data["IR_108"].data[index] = fill_value #print "data[IR_108].data.min/max ", data["IR_108"].data.min(), data["IR_108"].data.max() #if rgb == "IR_108c": # print type(data["IR_108"].data) # print dir(data["IR_108"].data) #print data["IR_108"].data.mask # save average values if in_msg.save_statistics: mean_array = zeros(len(RGBs)) #statisticFile = '/data/COALITION2/database/meteosat/ccs4/'+yearS+'/'+monthS+'/'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt' statisticFile = './' + yearS + '-' + monthS + '-' + dayS + '/MSG_' + area + '_' + yearS[ 2:] + monthS + dayS + '.txt' if in_msg.verbose: print("*** write statistics (average values) to " + statisticFile) f1 = open(statisticFile, 'a') # mode append i_rgb = 0 for rgb in RGBs: if rgb in products.MSG_color: mean_array[i_rgb] = data[rgb.replace("c", "")].data.mean() i_rgb = i_rgb + 1 # create string to write str2write = dateS + ' ' + hourS + ' : ' + minS + ' UTC ' for mm in mean_array: str2write = str2write + ' ' + "%7.2f" % mm str2write = str2write + "\n" f1.write(str2write) f1.close() print("y.shape ", global_data_radar['precip'].data.shape) from numpy import copy y = copy(global_data_radar['precip'].data) y = y.ravel() print("y.shape ", y.shape) if 1 == 0: if 'X' in locals(): del X from numpy import column_stack, append, concatenate for rgb in RGBs: # poor mans parallax correction if rgb in products.MSG_color: rgb2 = rgb.replace("c", "") else: rgb2 = rgb x1 = data[rgb2].data.ravel() if 'X' not in locals(): X = x1 X = [X] else: concatenate((X, [x1]), axis=0) print("X.shape ", X.shape) X = append(X, [[1] * len(x1)], axis=1) print("y.shape ", y.shape) #theta = np.linalg.lstsq(X,y)[0] return ind_gt_1 = y > 1 x = x[ind_gt_1] y = y[ind_gt_1] ind_lt_200 = y < 200 x = x[ind_lt_200] y = y[ind_lt_200] #ind_gt_0 = x>0 #x = x[ind_gt_0] #y = y[ind_gt_0] #X = np.column_stack(x+[[1]*len(x[0])]) #beta_hat = np.linalg.lstsq(X,y)[0] #print beta_hat #X_hat= np.dot(X,theta) #y_hat = X_hat.reshape((640, 710)) # creating plots/images if in_msg.make_plots: ind_cloudy = data['CTH'].data > 0 ind_clear = data['CTH'].data <= 0 ind_cloudy = ind_cloudy.ravel() for rgb in RGBs: if rgb in products.MSG_color: rgb2 = rgb.replace("c", "") else: rgb2 = rgb if rgb == 'ir108': rgb2 = 'IR_108' # poor mans parallax correction if 1 == 0: print("... poor mans parallax correction") data[rgb2].data[25:640, :] = data[rgb2].data[0:615, :] #data[rgb2].data[15:640,:] = data[rgb2].data[0:625,:] data[rgb2].data[:, 0:700] = data[rgb2].data[:, 10:710] # create output filename outputDir = format_name(in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat_nr=data.number) outputFile = outputDir + format_name(in_msg.outputFile, data.time_slot, area=area, rgb=rgb, sat_nr=data.number) PIL_image = create_PIL_image( rgb, data, in_msg ) # !!! in_msg.colorbar[rgb] is initialized inside (give attention to rgbs) !!! if 1 == 1: y = copy(global_data_radar['precip'].data) ind_gt_300 = y > 300 # replace no rain marker with 0mm/h y[ind_gt_300] = 0 y = y.ravel() print("y.shape ", y.shape) x = copy(data[rgb2].data) x = x.ravel() ## get rid of clear sky x = x[ind_cloudy] y = y[ind_cloudy] #ind_gt_01 = x>0.1 #x = x[ind_gt_01] #y = y[ind_gt_01] # get rid of no rain limits for rainfall ind_gt_01 = y > 0.1 x = x[ind_gt_01] y = y[ind_gt_01] ind_lt_300 = y < 300 x = x[ind_lt_300] y = y[ind_lt_300] plt.figure() plt.title('Scatterplot precipitation - radiance') plt.xlabel(rgb) plt.ylabel('precipitation in mm/h') plt.scatter(x, y) #, s=area, c=colors, alpha=0.5 outputDir = format_name(in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat_nr=data.number) outputFileScatter = outputDir + format_name( 'scatterplot_%(area)s_%Y%m%d%H%M_%(rgb)s_precip_pc.png', data.time_slot, area=area, rgb=rgb, sat_nr=data.number) #plt.show() from numpy import arange x_line = arange(x.min(), x.max(), 1) print("*** display " + outputFileScatter + " &") from numpy import ones, linalg, array print(x.min(), x.max(), y.min(), y.max()) A = array([x, ones(x.size)]) w = linalg.lstsq(A.T, y)[0] # obtaining the parameters y_line = w[0] * x_line + w[1] # regression line #--- #from scipy import stats #slope, intercept, r_value, p_value, std_err = stats.linregress(x,y) #print "slope, intercept, r_value, p_value, std_err" #print slope, intercept, r_value, p_value, std_err #y_line = slope*x_line + intercept from pylab import plot plot(x_line, y_line, 'r-') plt.savefig(outputFileScatter) y_hat = w[0] * data[rgb2].data + w[1] print("y_hat.shape: ", y_hat.shape) # set clear sky to 0 y_hat[ind_clear] = 0 y_hat = ma.asarray(y_hat) y_hat.mask = (y_hat == 9999.9) | (y_hat <= 0.0001) from trollimage.colormap import RainRate colormap = rainbow min_data = 0.0 #max_data=y_hat.max() max_data = 8 colormap.set_range(min_data, max_data) #colormap = RainRate in_msg.colormap[rgb] = colormap units = 'mm/h' img = trollimage(y_hat, mode="L") img.colorize(in_msg.colormap[rgb]) in_msg.colormap[rgb] = colormap.reverse() PIL_image = img.pil_image() outputFile = outputDir + format_name( 'fit_%(area)s_%Y%m%d%H%M_%(rgb)s_precip.png', data.time_slot, area=area, rgb=rgb, sat_nr=data.number) #PIL_image.save(outputFile) ## add coasts, borders, and rivers, database is heree ## http://www.soest.hawaii.edu/pwessel/gshhs/index.html ## possible resolutions ## f full resolution: Original (full) data resolution. ## h high resolution: About 80 % reduction in size and quality. ## i intermediate resolution: Another ~80 % reduction. ## l low resolution: Another ~80 % reduction. ## c crude resolution: Another ~80 % reduction. if in_msg.add_rivers: if in_msg.verbose: print(" add rivers to image (resolution=" + resolution + ")") cw.add_rivers(PIL_image, area_tuple, outline='blue', resolution=resolution, outline_opacity=127, width=0.5, level=5) # if in_msg.verbose: print(" add lakes to image (resolution=" + resolution + ")") cw.add_coastlines(PIL_image, area_tuple, outline='blue', resolution=resolution, outline_opacity=127, width=0.5, level=2) #, outline_opacity=0 if in_msg.add_borders: if in_msg.verbose: print(" add coastlines to image (resolution=" + resolution + ")") cw.add_coastlines(PIL_image, area_tuple, outline=(255, 0, 0), resolution=resolution, width=1) #, outline_opacity=0 if in_msg.verbose: print(" add borders to image (resolution=" + resolution + ")") cw.add_borders(PIL_image, area_tuple, outline=(255, 0, 0), resolution=resolution, width=1) #, outline_opacity=0 #if area.find("EuropeCanary") != -1 or area.find("ccs4") != -1: dc = DecoratorAGG(PIL_image) # add title to image if in_msg.add_title: PIL_image = add_title(PIL_image, rgb, int(data.number), dateS, hourS, minS, area, dc, in_msg.font_file, in_msg.verbose) # add MeteoSwiss and Pytroll logo if in_msg.add_logos: if in_msg.verbose: print('... add logos') dc.align_right() if in_msg.add_colorscale: dc.write_vertically() dc.add_logo("../logos/meteoSwiss3.jpg", height=60.0) dc.add_logo("../logos/pytroll3.jpg", height=60.0) # add colorscale if in_msg.add_colorscale and in_msg.colormap[rgb] is not None: dc.align_right() dc.write_vertically() font_scale = aggdraw.Font( "black", "/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif-Bold.ttf", size=16) # get tick marks tick_marks = 20 # default minor_tick_marks = 5 # default if rgb in list(in_msg.tick_marks.keys()): tick_marks = in_msg.tick_marks[rgb] if rgb in list(in_msg.minor_tick_marks.keys()): minor_tick_marks = in_msg.minor_tick_marks[rgb] if rgb.find( "-" ) != -1: # for channel differences use tickmarks of 1 tick_marks = 1 minor_tick_marks = 1 tick_marks = 2 # default minor_tick_marks = 1 # default if in_msg.verbose: print('... add colorscale') dc.add_scale(in_msg.colormap[rgb], extend=True, tick_marks=tick_marks, minor_tick_marks=minor_tick_marks, font=font_scale, line_opacity=100) #, unit='T / K' ## test to plot a wind barb #import matplotlib.pyplot as plt #ax = plt.axes(PIL_image) #ax.barbs(0, 0, 20, 20, length=8, pivot='middle', barbcolor='red') #ax.barbs(8, 46, 20, 20, length=8, pivot='middle', barbcolor='red') # check if output directory exists, if not create it path = dirname(outputFile) if not exists(path): if in_msg.verbose: print('... create output directory: ' + path) makedirs(path) # save file if in_msg.verbose: print('... save final file :' + outputFile) PIL_image.save(outputFile, optimize=True) # optimize -> minimize file size if in_msg.compress_to_8bit: if in_msg.verbose: print('... compress to 8 bit image: display ' + outputFile.replace(".png", "-fs8.png") + ' &') subprocess.call("/usr/bin/pngquant -force 256 " + outputFile + " 2>&1 &", shell=True) # 256 == "number of colors" #if in_msg.verbose: # print " add coastlines to "+outputFile ## alternative: reopen image and modify it (takes longer due to additional reading and saving) #cw.add_rivers_to_file(img, area_tuple, level=5, outline='blue', width=0.5, outline_opacity=127) #cw.add_coastlines_to_file(outputFile, obj_area, resolution=resolution, level=4) #cw.add_borders_to_file(outputFile, obj_area, outline=outline, resolution=resolution) # copy to another place if in_msg.scpOutput: if in_msg.verbose: print("... secure copy " + outputFile + " to " + in_msg.scpOutputDir) subprocess.call("scp " + in_msg.scpID + " " + outputFile + " " + in_msg.scpOutputDir + " 2>&1 &", shell=True) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy " + outputFile.replace(".png", "-fs8.png") + " to " + in_msg.scpOutputDir) subprocess.call( "scp " + in_msg.scpID + " " + outputFile.replace(".png", "-fs8.png") + " " + in_msg.scpOutputDir + " 2>&1 &", shell=True) if rgb not in RGBs_done: RGBs_done.append(rgb) ## start postprocessing if area in in_msg.postprocessing_areas: postprocessing(in_msg, global_data.time_slot, data.number, area) if in_msg.verbose: print(" ") return RGBs_done
def properties_cells(t1, tStop, current_labels=None, metadata=None, labels_dir=None, outputDir_labels=None, in_msg=None, sat_data=None): rgb_load = [ 'WV_062', 'WV_073', 'IR_039', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134' ] #,'CTP','CTT'] #rgb_out = 'WV_062minusIR_108' only_obs_noForecast = False rapid_scan_mode = True #if only_obs_noForecast == True: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski_obs/cosmo/Channels/labels/' #elif rapid_scan_mode == True: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski_RapidScan/cosmo/Channels/labels//' #else: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski/cosmo/Channels/labels/' # load a few standard things if in_msg is None: print("*** Error, in property_cells (property_cells)") print(" no input class passed as argument") quit() from get_input_msg import get_input_msg in_msg = get_input_msg('input_template') in_msg.resolution = 'i' in_msg.sat_nr = 9 in_msg.add_title = False in_msg.outputDir = './pics/' in_msg.outputFile = 'WS_%(rgb)s-%(area)s_%y%m%d%H%M' in_msg.fill_value = [0, 0, 0] # black in_msg.reader_level = "seviri-level4" # satellite for HRW winds sat_nr = "08" #in_windshift.sat_nr area = "ccs4" #c2"#"ccs4" #in_windshift.ObjArea # define area object obj_area = get_area_def(area) #(in_windshift.ObjArea) # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) mean_108_evolution = [] area34 = [] split34 = [] merge34 = [] t_start34 = 0 t_end34 = 0 lonely_cells = 0 cell_interesting = 77 count_double = 0 #labels_dir = '/data/cinesat/out/labels/' if labels_dir is None: labels_dir = '/opt/users/' + in_msg.user + '/PyTroll/scripts/labels/' #compatible to all users print("... use default directory to save labels: " + labels_dir) # loop over time while t1 <= tStop: print(in_msg.sat, str(in_msg.sat_nr), "seviri", t1) if sat_data is None: # now read the data we would like to forecast global_data = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr), "seviri", t1) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data, rgb_load, in_msg, area_loaded) print('... project data to desired area ', area) data = global_data.project(area, precompute=True) else: data = sat_data yearS = str(t1.year) monthS = "%02d" % t1.month dayS = "%02d" % t1.day hourS = "%02d" % t1.hour minS = "%02d" % t1.minute nx, ny = data[rgb_load[0]].data.shape # create array for all channel values values_rgb = np.zeros((len(rgb_load), nx, ny)) # copy all observations/channels into one large numpy array for rrgb in range(len(rgb_load)): values_rgb[rrgb, :, :] = deepcopy( data[rgb_load[rrgb]].data) #-data_108[rgb_load[1]].data if current_labels is None: print("--- reading labels from shelve files") filename = labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS) myShelve = shelve.open(filename) data1 = deepcopy(myShelve['labels']) metadata = deepcopy(myShelve['metadata']) myShelve.close() else: print("--- recieving labels from plot_coaltion2") data1 = deepcopy(current_labels) data_new = np.zeros(data1.shape) all_cells = {} # t0 is 5min before t1 t0 = t1 - timedelta(minutes=5) year0S = str(t0.year) month0S = "%02d" % t0.month day0S = "%02d" % t0.day hour0S = "%02d" % t0.hour min0S = "%02d" % t0.minute file_previous_labels = labels_dir + 'Labels_%s*' % ( year0S + month0S + day0S + hour0S + min0S) filename1 = glob.glob(file_previous_labels) print("the previous filename is: ", filename1) if t0.hour == 0 and t0.minute == 0: check_date = True else: check_date = False if len(filename1) > 0 or check_date: first_time_step = False else: first_time_step = True if first_time_step: # these labels are random numbers assigned in COALITION2 (different number for each cell) data0 = np.array(data1, 'uint32') labels0 = np.unique(data0[data0 > 0]) id_data = yearS + monthS + dayS + hourS + minS #list_id = [] # loop over all cell labels for i in range(1, len(labels0) + 1): #create a mask which has 1s only where the current cell is mask_current_label = np.zeros(data1.shape) mask_current_label = np.where(data1 == i, 1, 0) # calculate: coordinates center of mass center = ndimage.measurements.center_of_mass( mask_current_label) center = np.rint(center) # calculate means of the satellite channels (brightness temperatures) values1 = [] for rrgb in range(len(rgb_load)): these = values_rgb[rrgb, :, :] values_cell = these[np.where(mask_current_label == 1)] values1.append(values_cell.mean()) # take i as cell id and save cells properties all_cells["ID" + str(i)] = Cells() all_cells["ID" + str(i)].t_start = [ t1.year, t1.month, t1.day, t1.hour, t1.minute ] # True all_cells["ID" + str( i )].origin = "t0" # "start_programm", "day_before", "merge", "split", "enters_area", "appear" all_cells["ID" + str(i)].mean108 = values1 all_cells["ID" + str(i)].area_px = sum(sum(mask_current_label)) data_new = deepcopy(data0) else: # read cell labels from previous time step t0 id_data0 = year0S + month0S + day0S + hour0S + min0S file_previous_labels = labels_dir + 'Labels_%s.shelve' % ( year0S + month0S + day0S + hour0S + min0S) myShelve = shelve.open(file_previous_labels) data0 = deepcopy(myShelve['labels']) myShelve.close() # extract unique cell labels corresponding to the ID at t0 data0 = np.array(data0, 'uint32') labels0 = np.unique( data0[data0 > 0]) # this might be an empty tuple [] !HAU! print("this should match with output previous step \n", labels0) connections = [] for con in labels0: connections.append(["ID" + str(con)]) # total number of cell at t0 if len(labels0) == 0: new_id_num = 0 else: new_id_num = labels0.max() + 1 # this does not work for [] #these labels are random numbers assigned in COALITION2 (different number for each cell) data1 = np.array(data1, 'uint32') labels1 = np.unique(data1) # this might be an empty [] !HAU! # new id number for the new cells at t1 if labels0.size == 0: new_id_num = 1 else: try: new_id_num = labels0.max() + 1 except ValueError: print("labels0: ", labels0) print(type(labels0)) print("quitting in properties_cells line 397") quit() #list to make sure you record every split list_previous = [] # loop through cells at t1 for i in labels1: #range(1,len(labels1)+1): if i != 0: #required to correct the output "data_new" if the ID of a cell changes because a bigger cell takes it!!! correct_id_already_created = 0 #create a mask which has 1s only where the current cell is mask_current_label = np.zeros(data1.shape) mask_current_label = np.where(data1 == i, 1, 0) #store coordinates center of mass center = ndimage.measurements.center_of_mass( mask_current_label) center = np.rint(center) values1 = [] for rrgb in range(len(rgb_load)): these = values_rgb[rrgb, :, :] values_cell = these[np.where(mask_current_label == 1)] values1.append(values_cell.mean()) ## put calculation of mean value in a function (and also consider more properties later) #take the values of the 10.8 channel for the current cell #values1 = values_interest[np.where(mask_current_label == 1)] # consider the area of the current cell in the previous time step (TEST OVERLAPPING) previous_t = data0 * mask_current_label # store the ID number of all the overlapping cells at t0 !!! (change to minimum overlapping to consider them) labels_previous = np.unique(previous_t[previous_t > 0]) ##### new cell with no correspondence in previous time step ##### if len(labels_previous) == 0: #Store the values for the current cell, with the new ID all_cells["ID" + str(new_id_num)] = Cells() all_cells["ID" + str(new_id_num)].t_start = [ t1.year, t1.month, t1.day, t1.hour, t1.minute ] # True #check if the cell appeared in the middle of the area or came from outside the domain if check_position(mask_current_label): all_cells["ID" + str(new_id_num)].origin = "from_outside" else: all_cells["ID" + str(new_id_num)].origin = "appear" all_cells["ID" + str(new_id_num)].mean108 = values1 #.mean() all_cells["ID" + str(new_id_num)].area_px = sum( sum(mask_current_label)) #store the ID number which will be used to create the data_new (with numbers corresponding to ID cell) for next time step label_current = new_id_num new_id_num += 1 ##### cell with one correspondence in previous time step ##### elif len(labels_previous) == 1: #check if a cell exists at current time already with the same ID (derived from same cell at previous time step) if check_cell_same_ID( all_cells, "ID" + str(labels_previous[0]) ): #if "ID" + str(labels_previous[0]) in all_cells.keys(): id_current, id_samePrevious, correct_id_already_created, label_current, all_cells = define_IDs_cell_same_ID( all_cells, mask_current_label, labels_previous[0], new_id_num) #if correct_id_already_created != 0: # connections = correct_connections(connections, id_samePrevious, all_cells, id_current) new_id_num += 1 # If there is no cell with that ID yet, the current cell gets it else: id_current = "ID" + str(labels_previous[0]) all_cells[id_current] = Cells() #store the ID number which will be used to create the data_new (with numbers corresponding to ID cell) for next time step label_current = labels_previous[0] #Store the values for the current cell all_cells[id_current].origin = "from_previous" all_cells[id_current].id_prev = [ "ID" + str(labels_previous[0]) ] all_cells[id_current].area_px = sum( sum(mask_current_label)) all_cells[id_current].mean108 = values1 #.mean() """ lc=0 for con in range(len(connections)): if connections[con][0] == "ID" + str(labels_previous[0]): print "id_current",id_current lc+=1 connections[con].append(id_current) if lc == 0: lonely_cells+=1 """ #add the label of the previous cell (t0) which will be used at the end to make sure all split are recognized list_previous.append(labels_previous[0]) ##### cell with more then one correspondence in previous time step ##### else: largest_previous = labels_previous[0] max_tot_px = 0 #scan through the cells the current comes from and look for the biggest (you'll use that ID num) for h in range(len(labels_previous)): current_label = labels_previous[h] count_px = np.where(data0 == current_label, 1, 0) tot_px = sum(sum(count_px)) if tot_px > max_tot_px: largest_previous = current_label max_tot_px = tot_px #add the label of the previous cell (t0) which will be used at the end to make sure all split are recognized list_previous.append(current_label) """ lc = 0 for con in range(len(connections)): if connections[con][0] == "ID" + str(labels_previous[h]): connections[con].append("ID" + str(current_label)) lc +=1 if lc == 0: lonely_cells +=1 """ id_current = "ID" + str(largest_previous) if check_cell_same_ID( all_cells, id_current ): #if "ID" + str(labels_previous[0]) in all_cells.keys(): id_current, id_samePrevious, correct_id_already_created, label_current, all_cells = define_IDs_cell_same_ID( all_cells, mask_current_label, largest_previous, new_id_num) #if correct_id_already_created != 0: # connections = correct_connections(connections, id_samePrevious, all_cells, id_current) new_id_num = new_id_num + 1 else: label_current = largest_previous id_current = "ID" + str(largest_previous) all_cells[id_current] = Cells() all_cells[id_current].mean108 = values1 #.mean() all_cells[id_current].origin = "merge" all_cells[id_current].area_px = sum( sum(mask_current_label)) all_cells[id_current].id_prev = [ "ID" + str(labels_previous[lp]) for lp in range(len(labels_previous)) ] print("more correspondence ", ("ID" + str(largest_previous)), "coming from ", [ "ID" + str(labels_previous[lp]) for lp in range(len(labels_previous)) ]) if correct_id_already_created != 0: data_new[data_new == label_current] = correct_id_already_created data_new[mask_current_label == 1] = label_current all_cells["ID" + str(label_current)].center = center #identify labels the current cells are created from that are repeated (meaning the cell split) labels_repeated = np.unique([ "ID" + str(x) for x in list_previous if list_previous.count(x) > 1 ]) #make sure that the cells that come from splitting cells get a split for items in all_cells: item = all_cells[items] if item.split != 1: for n_prev in range(len(item.id_prev)): if item.id_prev[n_prev] in labels_repeated: item.split = 1 labels, numobjects = ndimage.label(data_new) print("....starting updating cells") if outputDir_labels is not None: make_figureLabels(deepcopy(data_new), all_cells, obj_area, outputDir_labels, colorbar=False, vmin=False, vmax=False, white_background=True, t=t1) data_new = data_new.astype( 'uint32' ) #unsigned char int https://docs.python.org/2/library/array.html filename = labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS) myShelve = shelve.open(filename) myShelve['labels'] = deepcopy(data_new) myShelve.close() filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (yearS + monthS + dayS + hourS + minS)) for file_per in filenames_for_permission: print(("modified permission: ", file_per)) os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 print(("....updated cells labels", filename)) list_cells = list(all_cells.keys()) for cell_connection in list_cells: ancestors = all_cells[cell_connection].id_prev for ancestor in ancestors: for con in range(len(connections)): if connections[con][0] == ancestor: connections[con].append(cell_connection) filename = labels_dir + 'Labels_%s.shelve' % ( year0S + month0S + day0S + hour0S + min0S) d = shelve.open(filename) d['connections'] = deepcopy(connections) d.close() print(("....updated cells connections", labels_dir + 'Labels_%s.shelve' % (year0S + month0S + day0S + hour0S + min0S))) filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (year0S + month0S + day0S + hour0S + min0S)) for file_per in filenames_for_permission: os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 print("....starting updating cells") filename = create_dir(labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS)) myShelve = shelve.open(filename) dict_cells = { 'cells': all_cells, 'labels': data_new, 'metadata': metadata } myShelve.update(dict_cells) # close the shelve myShelve.close() print("....updated all cells") filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (yearS + monthS + dayS + hourS + minS)) for file_per in filenames_for_permission: print(("modified permission: ", file_per)) os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 t1 = t1 + timedelta(minutes=5) return data_new, first_time_step
def plot_forecast_area(ttt, model, outputDir, current_labels = None, t_stop=None, BackgroundFile=None, ForeGroundRGBFile=None, labels_dir = '/opt/users/'+getpass.getuser()+'/PyTroll/scripts/labels/', in_msg = None): verbose = True if t_stop is None: t_stop = ttt ylabel = "area" while ttt <= t_stop: yearS, monthS, dayS, hourS, minS = string_date(ttt) if verbose: print("******** read cell properties from shelve") if current_labels is None: yearS, monthS, dayS, hourS, minS = string_date(ttt) filename = 'Labels_%s.shelve'%(yearS+monthS+dayS+hourS+minS) myShelve = shelve.open(filename) labels_all = deepcopy(myShelve['labels']) else: labels_all = deepcopy(current_labels) if verbose: print(labels_all) unique_labels = np.unique(labels_all[labels_all>0]) if verbose: print(("... cells with unique labels: ", unique_labels)) forecasted_labels = {} forecasted_areas = [] at_least_one_cell = False if verbose: print("*** computing history backward (", labels_dir, ")") for interesting_cell in unique_labels: forecasted_labels["ID"+str(interesting_cell)]=[] # calculate backward history for 1 hour and save it in labels_dir ind, area, displacement, time, center = history_backward(ttt, interesting_cell, True, in_msg, ttt-timedelta(hours = 1), labels_dir=labels_dir) #-timedelta(minutes = 10)) # current time, cell_id, backward? time_stop if area is None or len(area)<=1: if verbose: print("new cell or cell with COM outside domain") continue at_least_one_cell = True if len(area)<=3: # if history is too short, use linear extrapolation t, y = future_properties(time, area, ylabel, "linear") else: t, y = future_properties(time, area, ylabel, model) if False: ind1, area1, displacement1, time1, center = history_backward(ttt, interesting_cell, False, ttt+timedelta(hours=1), labels_dir=labels_dir) print("******** computed history forward") t2 = time1 #[::-1] y2 = area1 #[::-1] nx,ny = labels_all.shape #if verbose: # print(nx,ny) label_cell = np.zeros(labels_all.shape) label_cell[labels_all==interesting_cell] = 1 #pickle.dump(label_cell, open("test_label.p", "wb" ) ) #quit() dt = 0 if False: figure_labels(label_cell, outputDir, ttt, dt, area_plot="ccs4", add_name = "_ID"+str(interesting_cell), verbose=verbose) area_current = sum(sum(label_cell)) forecasted_areas.append(area_current) indx = np.where(t==ttt)[0] + 1 if verbose: print("*** compute displacement ") if displacement.shape[1]==2: if len(displacement) == 0: dx = 0 dy = 0 else: try: dx = int(round(displacement[:,0].mean())) dy = int(round(displacement[:,1].mean())) except ValueError: print("VALUE ERROR") print(displacement) quit() print(" computed displacement dx, dy = ", dx, dy) else: print("wrong displacement") quit() labels_in_time={} index_stop = 12 print(("*** calculate forecasts for cell ID"+str(interesting_cell))) if verbose: print("index time area growth") print("----------------------------") for i in range(13): dt += 5 #if verbose: # print("... for time ", dt ,", index ", indx + i) if indx+i >= len(y): index_stop = deepcopy(i) break else: area_new = y[indx+i] area_prev = y[indx+i-1] #if verbose: # print("area px that will be grown ", area_current) # print("area forecasted ", area_new) # print("area forecasted prev ", area_prev) ###growth = sqrt(float(area_new)/float(area_current)) if area_new < 0 or len(area_new)==0 or len(area_prev)==0: if verbose: print("the cell is predicted to disappear") index_stop = deepcopy(i) break growth = sqrt(float(area_new)/float(area_prev)) #if verbose: # print("growing by ", growth) # print("dx ", dx) # print("dy ", dy) if verbose: print((indx + i, dt, area_new, growth)) #figure_labels(label_cell, outputDir, ttt, dt, area_plot="ccs4", add_name = "before") shifted_label = resize_array(label_cell, dx, dy, nx, ny) #figure_labels(shifted_label, outputDir, ttt, dt, area_plot="ccs4", add_name = "before_shifted") #quit() if verbose: print((" after shift ", sum(sum(shifted_label)))) if sum(sum(shifted_label))==0: #the cell is outside the domain break #center of mass before resizing center_before = ndimage.measurements.center_of_mass(shifted_label) center_before = np.rint(center_before) #if verbose: # print(" after shift ", sum(sum(shifted_label))) resized_label = scipy.misc.imresize(shifted_label,float(growth),'nearest') resized_label[resized_label >0] = 1 temp_label = np.zeros((nx,ny)) #after resizing, the array is larger/smaller than nx,ny --> create new array that contains all the label region if resized_label.shape[0]<nx: temp_label[0:resized_label.shape[0],0:resized_label.shape[1]] = deepcopy(resized_label) else: x_start = max(min(np.nonzero(resized_label)[0])-1,0) y_start = max(min(np.nonzero(resized_label)[1])-1,0) temp_label[0:min(nx,resized_label.shape[0]-x_start),0:min(ny,resized_label.shape[1]-y_start)] = deepcopy(resized_label[x_start:min(x_start+nx,resized_label.shape[0]),y_start:min(y_start+ny,resized_label.shape[1])]) #if verbose: # print(np.unique(temp_label)) # print(" after resize ", sum(sum(temp_label))) #figure_labels(resized_label, outputDir, ttt, dt, area_plot="ccs4", add_name = "before_shifted_resized") #center of mass after resizing center_after = ndimage.measurements.center_of_mass(temp_label) center_after = np.rint(center_after) dx_new,dy_new = center_before - center_after shifted_label = resize_array(temp_label,dx_new,dy_new, nx, ny) #if verbose: # print(" after shift2 ", sum(sum(shifted_label))) label_cell = np.zeros((nx,ny)) label_cell[0:,0:] = shifted_label[0:nx,0:ny] if label_cell.shape[0] != nx or label_cell.shape[1] != ny: print("incorrect size") quit() forecasted_labels["ID"+str(interesting_cell)].append(deepcopy(label_cell)) #indx+=1 label_cell = shifted_label #???????????????????????????????????? area_current = sum(sum(label_cell)) if verbose: print(("end ", area_current)) forecasted_areas.append(area_current) #add check to make sure the area you produced is more or less correct t_temp = deepcopy(ttt) forecasted_time = [] for gg in range(len(forecasted_areas)): forecasted_time.append(t_temp) t_temp+=timedelta(minutes = 5) """ if verbose: print("******** produce images") if False: t_composite = deepcopy(ttt) for i in range(min(len(y),index_stop)): yearSf, monthSf, daySf, hourSf, minSf = string_date(t_composite) contour_file = outputDir + "Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID"+str(interesting_cell)+".png" type_image = "_HRV" #background_file = "/data/COALITION2/PicturesSatellite//"+yearS+"-"+monthS+"-"+dayS+"/"+yearS+"-"+monthS+"-"+dayS+type_image+"_"+"ccs4"+"/MSG"+type_image+"-"+"ccs4"+"_"+yearS[2:]+monthS+dayS+hourS+minS+".png" background_file = "/data/COALITION2/PicturesSatellite/LEL_results_wind/"+yearS+"-"+monthS+"-"+dayS+"/RGB-HRV_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*.png" out_file1 = create_dir( outputDir+"/Contours/")+"Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID"+str(interesting_cell)+".png" if verbose: print("... create composite "+contour_file+" "+background_file+" "+out_file1) #subprocess.call("/usr/bin/composite "+contour_file+" "+background_file+" "+out_file1, shell=True) if verbose: print("... saved composite: display ", out_file1, " &") t_composite+=timedelta(minutes=5) """ """ if False: fig, ax = plt.subplots() ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M')) ax.plot_date(t, y, 'o',label="Fit and extrapolation") ax.plot_date(forecasted_time, forecasted_areas, '*',label="forecasted") ax.plot_date(t2, y2, '*', label="Observations") #ax.set_xlim([t[0]-timedelta(minutes = 5), t2[-1]+timedelta(minutes = 5)]) ax.set_ylabel("area") ax.legend(loc="best"); fig.savefig(yearS+monthS+dayS+"_"+hourS+minS+"_AreaInTime"+"ID"+str(interesting_cell)+".png") plt.close( fig) """ t_composite = deepcopy(ttt) # merge coalition2 file with if ForeGroundRGBFile is None: currentRGB_im_filename = "/opt/users/"+getpass.getuser()+"/PyTroll/scripts/Mecikalski/cosmo/Channels/indicators_in_time/RGB_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*ccs4.png" else: currentRGB_im_filename = ForeGroundRGBFile currentRGB_im = glob.glob(currentRGB_im_filename) if len(currentRGB_im)<1: print("No file found:", currentRGB_im_filename) # get background file if BackgroundFile is None: background_im_filename = '/data/COALITION2/PicturesSatellite/LEL_results_wind/'+yearS+'-'+monthS+'-'+dayS+'/RGB-HRV_dam/'+yearS+monthS+dayS+'_'+hourS+minS+'*.png' else: background_im_filename = BackgroundFile background_im = glob.glob(background_im_filename) if len(background_im)>0: im = plt.imread(background_im[0]) back_exists = True else: back_exists = False #img1 = Image.imread(currentRGB_im[0]) obj_area = get_area_def("ccs4") fig,ax = prepare_figure(obj_area) if in_msg.nrt == False: if back_exists: plt.imshow(np.flipud(im)) else: # now read the data we would like to forecast global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", ttt) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area2load = "EuropeCanary95" #"ccs4" #c2"#"ccs4" #in_windshift.ObjArea area_loaded = get_area_def(area2load )#(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data, ['IR_108'], in_msg, area_loaded ) data = global_data.project("ccs4") plt.imshow(np.flipud(data['IR_108'].data),cmap = pylab.gray()) # background file form function argument or default if BackgroundFile is None: background_im_filename = '/data/COALITION2/PicturesSatellite/LEL_results_wind/'+yearS+'-'+monthS+'-'+dayS+'/RGB-HRV_dam/'+yearS+monthS+dayS+'_'+hourS+minS+'*.png' else: if verbose: print("... BackgroundFile ", BackgroundFile) background_im_filename = BackgroundFile # searching background file (wildcards are possible) background_im = glob.glob(background_im_filename) if len(background_im) == 0: print("*** Error in plot_forecast_area (test_forecast.py)") print(" no background file found: ", background_im_filename) quit() elif len(background_im) > 1: print("*** Warning in plot_forecast_area (test_forecast.py)") print(" several background files found: ", background_im) # read background file im = plt.imread(background_im[0]) #img1 = Image.imread(currentRGB_im[0]) obj_area = get_area_def("ccs4") fig,ax = prepare_figure(obj_area) #plt.imshow(np.flipud(im)) # plot contour lines for all cells if at_least_one_cell: time_wanted_minutes = [5,20,40,60] time_wanted = [] color_wanted = [] vmax = 70 for t_want in time_wanted_minutes: time_wanted.append((t_want-5)/5) tmp = (mpl.cm.Blues(float(t_want)/vmax)) tmp1 = [tmp] color_wanted.append(tmp1) all_labels_in_time = np.zeros((nx,ny)) for i in range(len(time_wanted)-1,-1,-1): ind_time = time_wanted [i] for key, forc_labels in forecasted_labels.items(): #forecasted_labels["ID"+str(interesting_cell)]=[] if len(forc_labels)>ind_time: #plt.contour(np.flipud(forc_labels[ind_time]),[0.5],colors = color_wanted_cont[i]) #colors='w') # all_labels_in_time[forc_labels[ind_time]>0] = time_wanted_minutes[i] forc_labels_tmp = np.ma.masked_where(all_labels_in_time==0,all_labels_in_time) plt.contourf(np.flipud(forc_labels_tmp), cmap="Blues", vmin=0, vmax=vmax) if False: for i in range(len(time_wanted)): ind_time = time_wanted [i] for key, forc_labels in forecasted_labels.items(): #forecasted_labels["ID"+str(interesting_cell)]=[] if len(forc_labels)>ind_time: plt.contour(np.flipud(forc_labels[ind_time]),[0.5],colors = color_wanted[i]) #colors='w') # else: print("*** Warning, no COALITION2 cell detected ") print(" produce empty figure ...") PIL_image = fig2img ( fig ) standardOutputName = in_msg.standardOutputName.replace('%y%m%d%H%M',strftime('%y%m%d%H%M',ttt.timetuple())) #PIL_image.paste(img1, (0, 0), img1) if in_msg is None: PIL_image.save(create_dir(outputDir)+"Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+".png") path = (outputDir)+yearS+monthS+dayS+hourS+minS+"Forecast.png" else: # dic_figure={} # if in_msg.nrt == True: # dic_figure['rgb']= 'Forecast' #'C2rgbForecastTMP-IR-108' # else: # dic_figure['rgb']= 'Forecast-C2rgb' # dic_figure['area']='ccs4' # PIL_image.save(create_dir(outputFile)+standardOutputName%dic_figure) # path = (outputFile)+standardOutputName%dic_figure # if in_msg.nrt == False: # dic_figure={} # dic_figure['rgb']= 'C2rgb-Forecast-HRV' #'C2rgbForecastTMP-IR-108' # dic_figure['area']='ccs4' # path_output = (outputFile)+standardOutputName%dic_figure # print ("creating composite: ",currentRGB_im[0],"+",path) # subprocess.call("/usr/bin/composite "+currentRGB_im[0]+" "+path+" "+path_output, shell=True) #print ("... display ",path_output," &") #dic_figure={} #dic_figure['rgb']= 'Forecast' #'C2rgbForecastTMP-IR-108' #dic_figure['area']='ccs4' outputFile = format_name(create_dir(outputDir)+in_msg.outputFile, ttt, rgb='Forecast', area='ccs4', sat_nr=int(in_msg.sat_nr)) #PIL_image.save(create_dir(outputDir)+in_msg.outputFile%dic_figure) PIL_image.save(outputFile) #path = (outputDir)+in_msg.outputFile%dic_figure path = outputFile print("... display ",path," &") plt.close( fig) if True: if verbose: print("path foreground", currentRGB_im[0]) if in_msg is None: path_composite = (outputFile)+yearS+monthS+dayS+"_Obs"+hourS+minS+"Forecast_composite.png" else: # dic_figure={} # dic_figure['rgb']='C2rgb-Forecast-HRV' # dic_figure['area']='ccs4' # path_composite = (outputFile)+standardOutputName%dic_figure #dic_figure = {} #dic_figure['rgb'] = "_HRV" #'IR-108' #dic_figure['area']='ccs4' #path_IR108 = (outputFile)+standardOutputName%dic_figure #dic_figure={} #dic_figure['rgb'] = 'C2rgbForecast-IR-108' #dic_figure['area'] = 'ccs4' #path_composite = (outputDir) + in_msg.outputFile%dic_figure path_composite = format_name( outputDir+in_msg.outputFile, ttt, rgb='C2rgbForecast-IR-108', area='ccs4', sat_nr=int(in_msg.sat_nr)) #dic_figure = {} #dic_figure['rgb'] = 'IR-108' #dic_figure['area']='ccs4' #path_IR108 = (outputDir) + in_msg.outputFile%dic_figure path_IR108 = format_name( outputDir+in_msg.outputFile, ttt, rgb='IR-108', area='ccs4', sat_nr=int(in_msg.sat_nr)) if in_msg.nrt == True: if verbose: print("---starting post processing") #if area in in_msg.postprocessing_areas: in_msg.postprocessing_composite = deepcopy(in_msg.postprocessing_composite2) postprocessing(in_msg, ttt, in_msg.sat_nr, "ccs4") #print ("... display",path_composite,"&") if in_msg.scpOutput and in_msg.nrt == True and False: #not necessary because already done within postprocessing print("... secure copy "+path_composite+ " to "+in_msg.scpOutputDir) # subprocess.call("scp "+in_msg.scpID+" "+path_composite +" "+in_msg.scpOutputDir+" 2>&1 &", shell=True) #BackgroundFile # if False: for i in range(12): contour_files = glob.glob(outputDir + "Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID*.png") if verbose: print(("Files found: ",contour_files)) if len(contour_files)>0: background_file = "/data/COALITION2/PicturesSatellite/LEL_results_wind/"+yearS+"-"+monthS+"-"+dayS+"/RGB-HRV_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*.png" out_file1 = create_dir( outputDir+"/Contours/")+"Obs"+hourS+minS+"_Forc"+hourSf+minSf+".png" t_composite+=timedelta(minutes=5) ttt += timedelta(minutes = 5)
month = int(sys.argv[2]) day = int(sys.argv[3]) hour = int(sys.argv[4]) minute = int(sys.argv[5]) tslot = datetime(year, month, day, hour, minute) else: print("\n*** Error, wrong number of input arguments") print(" usage:") print(" python "+inspect.getfile(inspect.currentframe())) print(" or") print(" python "+inspect.getfile(inspect.currentframe())+" 2017 2 17 14 35\n") quit() print ("*** plot overshooting top detection for ", str(tslot)) glbd = GeostationaryFactory.create_scene("msg-ot", "", "Overshooting_Tops", tslot) print ("... load sat data") # vars_1d = ['latitude','longitude','time'] # vars_3d = ['ir_brightness_temperature', # 'ot_rating_ir', # 'ot_id_number', # 'ot_anvilmean_brightness_temperature_difference', # 'ir_anvil_detection', # 'visible_reflectance', # 'ot_rating_visible', # 'ot_rating_shadow', # 'ot_probability', # 'surface_based_cape', # 'most_unstable_cape',
def nostradamus_rain(in_msg): if in_msg.datetime is None: in_msg.get_last_SEVIRI_date() if in_msg.end_date is None: in_msg.end_date = in_msg.datetime #in_msg.end_date = in_msg.datetime + timedelta(15) delta = timedelta(minutes=15) # automatic choise of the FULL DISK SERVICE Meteosat satellite if in_msg.datetime < datetime(2008, 5, 13, 0, 0): # before 13.05.2008 only nominal MSG1 (meteosat8), no Rapid Scan Service yet sat_nr = "08" elif in_msg.datetime < datetime(2013, 2, 27, 9, 0): # 13.05.2008 ... 27.02.2013 sat_nr = "09" # MSG-2 (meteosat9) became nominal satellite, MSG-1 (meteosat8) started RSS elif in_msg.datetime < datetime(2018, 3, 9, 0, 0): # 27.02.2013 9:00UTC ... 09.03.2013 sat_nr = "10" # MSG-3 (meteosat10) became nominal satellite, MSG-2 started RSS (MSG1 is backup for MSG2) else: sat_nr = "11" print ("... work with Meteosat"+str(sat_nr)) print ("") if in_msg.verbose: print ('*** Create plots for ') print (' Satellite/Sensor: ' + in_msg.sat_str()) print (' Satellite number: ' + in_msg.sat_nr_str() +' // ' +str(in_msg.sat_nr)) print (' Satellite instrument: ' + in_msg.instrument) print (' Start Date/Time: '+ str(in_msg.datetime)) print (' End Date/Time: '+ str(in_msg.datetime)) print (' Areas: ', in_msg.areas) for area in in_msg.plots.keys(): print (' plots['+area+']: ', in_msg.plots[area]) #print (' parallax_correction: ', in_msg.parallax_correction) #print (' reader level: ', in_msg.reader_level) ## read in all the constants files print('=================================') print('*** load the constant fields (radar mask, viewing geometry, and land/sea mask plus surface elevation)') global_radar_mask, global_vg, global_ls_ele = load_constant_fields(sat_nr) ############################################### ## load the mlp for the precip detection (pd) # ############################################### if in_msg.model == 'mlp': dir_start_pd= './models/precipitation_detection/mlp/2hl_100100hu_10-7alpha_log/' dir_start_rr= './models/precipitation_rate/mlp/2hl_5050hu_10-2alpha_log/' if not in_msg.read_from_netCDF: clf_pd = joblib.load(dir_start_pd+'clf.pkl') scaler_pd = joblib.load(dir_start_pd+'scaler.pkl') feature_list_pd = joblib.load(dir_start_pd+'featurelist.pkl') thres_pd=np.load(dir_start_pd+'opt_orig_posteriorprobab_thres.npy') ######################################### ## load the mlp for the rain rates (rr) # ######################################### reg_rr = joblib.load(dir_start_rr+'reg.pkl') scaler_rr = joblib.load(dir_start_rr+'scaler.pkl') feature_list_rr = joblib.load(dir_start_rr+'featurelist.pkl') #################################### ## load the reference sets for a climatological probab matching (pm) if requested #################################### if in_msg.probab_match: # load in the ref data sets created with the script: rr_probab_matching_create_refset.ipynb ody_rr_ref=np.load(dir_start_rr+'pm_valid_data_ody_rr_ref.npy') pred_rr_ref=np.load(dir_start_rr+'pm_valid_data_pred_rr_ref.npy') # initialize processed RGBs plots_done={} time_slot = copy.deepcopy(in_msg.datetime) while time_slot <= in_msg.end_date: print('... processing for time: ', time_slot) ################################################ ## CHOOSE THE SETUP (time_slot, area, model) ################################################ ########################## ## LOAD THE NEEDED INPUTS ########################## if not in_msg.read_from_netCDF: ## read observations at the specific time print('=================================') print('*** load the time slot specific fields with in_msg.parallax_gapfilling:', in_msg.parallax_gapfilling) global_radar, global_sat, global_nwc, global_cth, global_hsaf = load_input(sat_nr, time_slot, in_msg.parallax_gapfilling, read_HSAF=in_msg.read_HSAF) # def load_input(sat_nr, time_slot, par_fill, read_HSAF=True): else: print('read Odyssey radar composite') from mpop.satellites import GeostationaryFactory global_radar = GeostationaryFactory.create_scene("odyssey", "", "radar", time_slot) global_radar.load(['RATE']) print(global_radar) print('=========================') for area in in_msg.areas: print ("================================") print ("*** PROCESSING FOR AREA: "+area) # declare "precipitation detection" and "rainrate dictionary", the applied model (e.g. MLP) is used as key pd = {} rr = {} plots_done[area]=[] if in_msg.read_from_netCDF: # reproject Odyssey radar mask to area of interest #radar_mask = global_radar_mask.project(area, precompute=True) data_radar = global_radar.project(area, precompute=True) # radar mask to see where odyssey ground truth exists mask_r = data_radar['RATE-MASK'].data.data==False rr['ody'] = copy.deepcopy(data_radar['RATE'].data.data) # do not trust values below 0.3 & above 130 -> do not consider it as rain and set all values to 0 rr['ody'][np.logical_or(rr['ody'] < 0.3,rr['ody'] >= 130.0)] = 0.0 print (rr['ody'].min(), rr['ody'].max(), rr['ody'].shape, type(rr['ody'])) from netCDF4 import Dataset # read from file outdir_netCDF = time_slot.strftime('/data/COALITION2/database/meteosat/nostradamus_RR/%Y/%m/%d/') file_netCDF = time_slot.strftime('MSG_rr-'+in_msg.model+'-'+area+'_%Y%m%d%H%M.nc') print ("*** read precip prediction from", outdir_netCDF+"/"+file_netCDF) ncfile = Dataset(outdir_netCDF+"/"+file_netCDF,'r') rr_tmp = ncfile.variables['rainfall_rate'][:,:] ### now, we read radar data directly from odyssey file #rr['ody'] = ncfile.variables['rainfall_rate (odyssey)'][:,:] #print (rr['ody'].min(), rr['ody'].max(), rr['ody'].shape, type(rr['ody'])) ### now, we read radar mask directly from odyssey file #mask_r = ncfile.variables['odyssey_mask'][:,:] #print ("... convert mask_r (1, 0) from int to bolean (True, False)") #mask_r = (mask_r == 1) # create fake mask_h (where rainfall is larger than 0 mm/h) mask_h = rr_tmp>0 pd[in_msg.model] = rr_tmp>0 rr_tmp = rr_tmp.flatten() # remove 0 entries rr_tmp = rr_tmp [ rr_tmp != 0 ] if False: import matplotlib.pyplot as plt #fig = plt.figure() fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(6,6)) plt.subplot(2, 1, 1) plt.imshow(mask_h) #plt.colorbar() plt.subplot(2, 1, 2) plt.imshow(mask_r) #plt.colorbar() fig.savefig("mask_h_mask_r_netCDF.png") print("... display mask_h_mask_r_netCDF.png &") #plt.show() #quit() else: ## project all data to the desired projection radar_mask, vg, ls_ele, data_radar, data_sat, data_nwc, data_cth, data_hsaf = \ project_data(area, global_radar_mask, global_vg, global_ls_ele, global_radar, global_sat, global_nwc, global_cth, global_hsaf, read_HSAF=in_msg.read_HSAF) ########################################################### ## SINGLE TIME SLOT TO CARRY OUT A FULL RAIN RATE RETRIEVAL ########################################################### # preprocess the data # mask_h: field indicating where NWCSAF products are available & thus where predictions are carried out: True if NWCSAF products available # mask_r: field indicating where radar products are available: True if radar product is available # mask_rnt: field indicating where radar product available but not trustworthy: i.e. in threshold_mask, 0<rr<0.3, rr>130 overlaid: True if radar product is NOT trustworthy all_data, all_data_names, mask_h, mask_r, mask_rnt, rr['ody'], rr['hsaf'], lon, lat = \ pd_rr_preprocess_data_single_scene( area, time_slot, radar_mask, vg, ls_ele, data_radar, data_sat, data_nwc, data_cth, data_hsaf, in_msg.parallax_gapfilling, 'rr', read_HSAF=in_msg.read_HSAF) #pd_rr_preprocess_data_single_scene( sat_nr, area, time_slot, 'nearest', 'rr', read_HSAF=False) if False: import matplotlib.pyplot as plt #fig = plt.figure() fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(6,6)) plt.subplot(2, 1, 1) plt.imshow(mask_h) #plt.colorbar() plt.subplot(2, 1, 2) plt.imshow(mask_r) #plt.colorbar() fig.savefig("mask_h_mask_r.png") print("... display mask_h_mask_r.png &") #plt.show() #quit() del rr['hsaf'] # since not actually needed in this script # project all data to desired projection # ... print('... predictions at ' + str(mask_h.sum())+' out of ' +str(mask_h.flatten().shape[0])+ ' points') #################################### ## precip detection #################################### # create y_pd, y_hsaf_pd, X_raw_pd y_pd_vec, y_hsaf_pd_vec, X_raw, feature_list = pd_rr_create_y_yhsaf_Xraw( all_data, all_data_names, 'pd', cut_precip=False ) del y_pd_vec, y_hsaf_pd_vec # (since not actually ever needed in this script) if in_msg.remove_vg==True: print('... remove viewing geometry from predictors') feature_list = np.append(feature_list[:6],feature_list[8:]) X_raw = np.hstack([X_raw[:,:6],X_raw[:,8:]]) print(' new X_raw.shape:', X_raw.shape) feature_list # check features if np.array_equal(feature_list, feature_list_pd): print('OK, input features correspond to input features required by the loaded model') else: print('ATTENTION, input features do not correspond to input features required by the loaded model') quit() # create X_pd X_pd=scaler_pd.transform(X_raw) # create final precip detection fields: opera + hsaf pd['ody']=rr['ody']>=0.3 # make precip detection predictions print ("*** make precip detection predictions") pd_probab = clf_pd.predict_proba(X_pd)[:,1] # probab precip balanced classes pd_vec_h = pd_probab>=thres_pd pd[in_msg.model] = np.zeros(lon.shape,dtype=bool) pd[in_msg.model][mask_h] = pd_vec_h #################################### ## rain rate on above identified precipitating pixels #################################### # reduce X_raw to the points where rain was predicted by the mlp X_raw= X_raw[pd_vec_h,:] # check, if read features correspond to the trained model if np.array_equal(feature_list, feature_list_rr): print('OK, input features correspond to input features required by the loaded model') else: print('ATTENTION, input features do not correspond to input features required by the loaded model') quit() # create X_rr X_rr=scaler_rr.transform(X_raw) # rain rate prediction at places where precip detected by mlp rr_tmp=reg_rr.predict(X_rr) # carry out a probability machting if requested if in_msg.probab_match: print("... do probability matching for:", in_msg.model) pm_str = str(in_msg.model)+'_pm' rr_tmp_pm = probab_match_rr_refprovide(ody_rr_ref,pred_rr_ref,rr_tmp) #rr[pm_str] = np.zeros_like(lon) rr[pm_str] = np.zeros_like(rr['ody']) # also casts the type float rr[pm_str][pd[in_msg.model]]=rr_tmp_pm print("... probability matching done for:", in_msg.model) # copy rainrate data to the final place # replace all prediction lower than precipitation detection threshold with threhold rain rate rr_tmp[rr_tmp<0.3]=0.3 # correct upward all too low predictions (i.e. the ones below the precip detection threshold) rr[in_msg.model] = np.zeros_like(rr['ody']) rr[in_msg.model][pd[in_msg.model]]=rr_tmp ##################################### ## SAVE RESULT AS NETCDF ##################################### if area in in_msg.save_netCDF and (not in_msg.read_from_netCDF): outdir_netCDF = time_slot.strftime(in_msg.outdir_netCDF) file_netCDF = time_slot.strftime(in_msg.file_netCDF) file_netCDF = file_netCDF.replace("%(area)s", area) file_netCDF = file_netCDF.replace("%(model)s", in_msg.model) #save_RR_as_netCDF(outdir_netCDF, file_netCDF, rr[in_msg.model], save_rr_ody=True, rr_ody=rr['ody'], save_ody_mask=True, ody_mask=mask_r, zlib=True) save_RR_as_netCDF(outdir_netCDF, file_netCDF, rr[in_msg.model]) ##################################### ## SINGLE TIME SLOT TO DRAW THE MAPS ##################################### print ("*** start to create plots") #################################### ## plot precip detection #################################### if 'pdMlp' in in_msg.plots[area]: mask_rt = np.logical_and(mask_r, mask_rnt==False) # trusted radar i.e. True where I have a trustworthy radar product available mod_ss = [in_msg.model] + ['ody'] # ver for verification; ver={} for x in mod_ss: ver[x]=np.zeros_like(lon) # sat: no ver[x][pd[x]>0] = 1 # sat: yes ver[x][np.logical_and(ver[x]==0,mask_rnt)] = 2 # sat: no (rad clutter) ver[x][np.logical_and(ver[x]==1,mask_rnt)] = 3 # sat: yes (rad clutter) ver[x][np.logical_and(mask_rt,np.logical_and(pd[x]==1,pd['ody']==1))] = 4 # hit ver[x][np.logical_and(mask_rt,np.logical_and(pd[x]==1,pd['ody']==0))] = 5 # false alarm ver[x][np.logical_and(mask_rt,np.logical_and(pd[x]==0,pd['ody']==0))] = 6 # correct reject ver[x][np.logical_and(mask_rt,np.logical_and(pd[x]==0,pd['ody']==1))] = 7 # miss # define colorkey v_pd=np.array([-0.5,0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5]) cmap_pd, norm_pd = from_levels_and_colors(v_pd, colors =['darkgrey', '#984ea3','lightgrey','plum', '#377eb8', '#e41a1c','ivory','#ff7f00'], extend='neither') plot_precipitation_detection=False if plot_precipitation_detection: # single prediction plot #fig,ax= plt.subplots(figsize=(20, 10)) #plt.rcParams.update({'font.size': 16}) fig,ax= plt.subplots(figsize=(10, 5)) plt.rcParams.update({'font.size': 8}) plt.rcParams.update({'mathtext.default':'regular'}) m = map_plot(axis=ax,area=area) m.ax.set_title('precip detection based on sat vs opera') # plot sat precip detection product against opera product tick_label_pd_nr=np.array([0,1,2,3,4,5,6,7]) tick_label_pd=['sat: no','sat: yes','sat: no (rad unr)','sat: yes (rad unr)','hit','false alarm','correct reject','miss'] im=m.pcolormesh( lon, lat, ver['mlp'], cmap=cmap_pd, norm=norm_pd, latlon=True ) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="4%", pad=0.05) cbar = fig.colorbar(im, cax=cax, ticks=tick_label_pd_nr, spacing='uniform') a=cbar.ax.set_yticklabels(tick_label_pd) outfile= 'precip_detection_sat'+in_msg.model+'_vs_opera_%s' fig.savefig((in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M')), dpi=300, bbox_inches='tight') print('... create figure: display ' + in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M') + '.png') plots_done[area].append('pdMlp') #################################### ## plot rain rate with matplotlib #################################### if 'rrMatplotlib' in in_msg.plots[area]: # create the combi rr field rr['combi']=copy.deepcopy(rr[in_msg.model+'_pm']) rr['combi'][mask_r]=rr['ody'][mask_r] # determine where I have >0.3 mm/h precip on the permanent mask -> overlay end picture with a pink(?) color there pd_nt=np.logical_and(mask_rnt,pd['ody']>=0.3) #precip detected but not trusted t = time.time() #fig, axes = plt.subplots(1, 2,figsize=(19, 6)) #plt.rcParams.update({'font.size': 16}) fig, axes = plt.subplots(1, 2,figsize=(9.5, 3)) plt.rcParams.update({'font.size': 8}) plt.rcParams.update({'mathtext.default':'regular'}) ## 1st subplot m = map_plot(axis=axes[0],area=area) m.ax.set_title('Rain Rate (opera + MSG ANN), '+str(time_slot)) # plot a white colored background where I have data available v_pd_nt=np.array([0.5,1.5]) cmap_pd_nt, norm_pd_nt = from_levels_and_colors(v_pd_nt, colors=['white'], extend='neither') im4=m.pcolormesh(lon,lat,np.ones(lon.shape),cmap=cmap_pd_nt,norm=norm_pd_nt,latlon=True) # plot mask which contains no rad & not trusted rad values nr_ntr = copy.deepcopy(ver['ody']) nr_ntr=np.ma.masked_greater(nr_ntr,2) nr_ntr=np.ma.masked_equal(nr_ntr,1) im2=m.pcolormesh(lon,lat,nr_ntr,cmap=cmap_pd,norm=norm_pd,latlon=True) # plot combined precip opera + sat v_rr = [0.3,0.6,1.2,2.4,4.8,9.6] cmap_rr,norm_rr=smart_colormap(v_rr,name='coolwarm',extend='max') im=m.pcolormesh(lon,lat,rr['combi'],cmap=cmap_rr,norm=norm_rr,latlon=True) # plot pink pixels everywhere on permanently not trusted radar mask where we observe > 0.3 mm/h precip v_pd_nt=np.array([0.5,1.5]) cmap_pd_nt, norm_pd_nt = from_levels_and_colors(v_pd_nt, colors=['plum'], extend='neither') im3=m.pcolormesh(lon,lat,pd_nt,cmap=cmap_pd_nt,norm=norm_pd_nt,latlon=True) ## 2nd subplot # plot purely satellite based precip product m = map_plot(axis=axes[1],area=area) m.ax.set_title('Rain Rate (MSG ANN), '+str(time_slot)) if in_msg.IR_108 and not in_msg.read_from_netCDF: # plot the IR_108 channel clevs = np.arange(225,316,10) cmap_sat,norm_sat=smart_colormap(clevs,name='Greys',extend='both') im4 = m.pcolormesh(lon,lat,data_sat['IR_108_PC'].data,cmap=cmap_sat,norm=norm_sat,latlon=True) else: # plot a white surface to distinguish between the regions where the produ v_pd_nt=np.array([0.5,1.5]) cmap_pd_nt, norm_pd_nt = from_levels_and_colors(v_pd_nt, colors=['white'], extend='neither') im4=m.pcolormesh(lon,lat,np.ones(lon.shape),cmap=cmap_pd_nt,norm=norm_pd_nt,latlon=True) if in_msg.probab_match: im=m.pcolormesh(lon, lat,rr[in_msg.model+'_pm'], cmap=cmap_rr, norm=norm_rr, latlon=True) else: im=m.pcolormesh(lon, lat,rr[in_msg.model], cmap=cmap_rr, norm=norm_rr, latlon=True) if in_msg.IR_108 and in_msg.probab_match: outfile= 'rr_combioperasat'+in_msg.model+'pm_satIR108'+in_msg.model+'pm_%s' elif in_msg.IR_108 and (in_msg.probab_match==False): outfile= 'rr_combioperasat'+in_msg.model+'_satIR108'+in_msg.model+'_%s' elif (in_msg.IR_108==False) and in_msg.probab_match: outfile= 'rr_combioperasat'+in_msg.model+'pm_sat'+in_msg.model+'pm_%s' elif (in_msg.IR_108==False) and (in_msg.probab_match==False): outfile= 'rr_combioperasat'+in_msg.model+'_sat'+in_msg.model+'_%s' fig.subplots_adjust(bottom=0.15) cbar_ax = fig.add_axes([0.25, 0.05, 0.5, 0.05]) cbar=fig.colorbar(im, cax=cbar_ax, orientation='horizontal') cbar.set_label('$mm\,h^{-1}$') fig.savefig((in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M')), dpi=300, bbox_inches='tight') print('... create figure: display ' + in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M') + '.png') elapsed = time.time() - t print("... elapsed time for creating the rainrate image in seconds: "+str(elapsed)) plots_done[area].append('rrMatplotlib') #################################### ## plot rain rate with trollimage #################################### plot_trollimage=True if plot_trollimage: from plotting_tools import create_trollimage from plot_msg import add_title print ("*** create plot with trollimage") from copy import deepcopy from trollimage.colormap import RainRate colormap = deepcopy(RainRate) # define contour write for coasts, borders, rivers from pycoast import ContourWriterAGG cw = ContourWriterAGG(in_msg.mapDir) from plot_msg import choose_map_resolution resolution = choose_map_resolution(area, None) #resolution='l' # odyssey, europe #resolution='i' # ccs4 print (" resolution=", resolution) IR_file=time_slot.strftime(in_msg.outputDir+'MSG_IR-108-'+area+'_%Y%m%d%H%M.png') if 'IR_108' in in_msg.plots[area] and not in_msg.read_from_netCDF: # create black white background #img_IR_108 = data_sat.image.channel_image('IR_108_PC') img_IR_108 = data_sat.image.ir108() img_IR_108.save(IR_file) for rgb in in_msg.plots[area]: if rgb == 'RATE': prop = np.ma.masked_equal(rr['ody'], 0) mask2plot=deepcopy(mask_r) elif rgb =='rrMlp': prop = np.ma.masked_equal(rr[in_msg.model], 0) mask2plot=None elif rgb == 'rrMlpPm': prop = np.ma.masked_equal(rr[in_msg.model+'_pm'], 0) mask2plot=None elif rgb == 'rrOdyMlp': rr['combi']=copy.deepcopy(rr[in_msg.model]) rr['combi'][mask_r]=rr['ody'][mask_r] prop = np.ma.masked_equal(rr['combi'], 0) mask2plot=deepcopy(mask_r) elif rgb == 'rrOdyMlpPm': rr['combi']=copy.deepcopy(rr[in_msg.model+'_pm']) rr['combi'][mask_r] = rr['ody'][mask_r] prop = np.ma.masked_equal(rr['combi'], 0) mask2plot=deepcopy(mask_r) elif rgb == 'IR_108': continue else: "*** Error, unknown product requested" quit() filename = None if area in in_msg.postprocessing_composite: composite_file = in_msg.outputDir+"/"+'MSG_'+in_msg.postprocessing_composite[area][0]+"-"+area+'_%Y%m%d%H%M.png' composite_file = composite_file.replace("%(rgb)s", rgb) else: composite_file = None PIL_image = create_trollimage(rgb, prop, colormap, cw, filename, time_slot, area, composite_file=composite_file, background=IR_file, mask=mask2plot, resolution=resolution, scpOutput=in_msg.scpOutput) # add title to image dc = DecoratorAGG(PIL_image) if in_msg.add_title: add_title(PIL_image, in_msg.title, rgb, 'MSG', sat_nr, in_msg.datetime, area, dc, in_msg.font_file, True, title_color=in_msg.title_color, title_y_line_nr=in_msg.title_y_line_nr ) # !!! needs change # save image as file outfile = time_slot.strftime(in_msg.outputDir+"/"+in_msg.outputFile).replace("%(rgb)s", rgb).replace("%(area)s", area).replace("%(model)s", in_msg.model) PIL_image.save(outfile, optimize=True) if isfile(outfile): print ("... create figure: display "+outfile+" &") chmod(outfile, 0777) plots_done[area].append(rgb) else: print ("*** Error: "+outfile+" could not be generated") quit() print('=================================') ############################################## ## potential other map setups ############################################## ############################################## ############################################## ## opera composite vs the prediction... but I think it'd be less confusing to only show the prediction ############################################## if 'OdyVsRr' in in_msg.plots[area]: fig, axes = plt.subplots(1, 2,figsize=(23.5, 5)) # will be switched to basemap once have new training set together plt.rcParams.update({'font.size': 16}) plt.rcParams.update({'mathtext.default':'regular'}) # set up nn subplot m = map_plot(axis=axes[0],area=area) m.ax.set_title('precip detection based on mlp vs opera') v_pd=np.array([-0.5,0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5]) cmap_pd, norm_pd = from_levels_and_colors(v_pd, colors=['darkgrey', '#984ea3','lightgrey','plum', '#377eb8', '#e41a1c','ivory','#ff7f00'], extend='neither') tick_label_pd_nr=np.array([0,1,2,3,4,5,6,7]) tick_label_pd=['sat: no','sat: yes','sat: no (rad unr)','sat: yes (rad unr)','hit','false alarm','correct reject','miss'] im=m.pcolormesh(lon,lat,ver['mlp'],cmap=cmap_pd, norm=norm_pd, latlon=True) divider = make_axes_locatable(m.ax) cax = divider.append_axes("right", size="4%", pad=0.05) cbar = fig.colorbar(im,cax=cax, ticks=tick_label_pd_nr, spacing='uniform') cbar.ax.set_yticklabels(tick_label_pd, fontsize=14) m = map_plot(axis=axes[1],area=area) m.ax.set_title('precip detection based on opera vs opera') v_pd=np.array([-0.5,0.5,2.5,3.5,4.5,6.5]) cmap_pd, norm_pd = from_levels_and_colors(v_pd, colors=['darkgrey','lightgrey','plum', '#377eb8','ivory'], extend='neither') tick_label_pd_nr=np.array([0,1.5,3,4,5.5]) tick_label_pd=['no rad','rad clutter: no','rad clutter: yes','rad: yes','rad: no'] im=m.pcolormesh(lon,lat,ver['ody'],cmap=cmap_pd,norm=norm_pd,latlon=True) divider = make_axes_locatable(m.ax) cax = divider.append_axes("right", size="4%", pad=0.05) cbar = fig.colorbar(im,cax=cax,ticks=tick_label_pd_nr,spacing='uniform') a=cbar.ax.set_yticklabels(tick_label_pd,fontsize=14) outfile= 'test_%s' fig.savefig((in_msg.outputDir+ outfile %time_slot.strftime('%Y%m%d%H%M')), dpi=300, bbox_inches='tight') print('... create figure: display ' + in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M') + '.png') plots_done[area].append('OdyVsRr') ############################################## ## cth visualisation without parallax corr for a test ############################################## if 'CTH' in in_msg.plots[area]: fig, axes = plt.subplots(1, 1,figsize=(5, 3)) plt.rcParams.update({'font.size': 16}) plt.rcParams.update({'mathtext.default':'regular'}) ## 1st subplot m = map_plot(axis=axes,area=area) m.ax.set_title('CTH (without parallax corr)') v_rr = np.arange(6000,12001,1000) cmap_rr,norm_rr=smart_colormap(v_rr,name='coolwarm',extend='neither') im4 = m.pcolormesh(lon,lat,data_cth['CTTH'].height,cmap=cmap_rr,norm=norm_rr,latlon=True) fig.colorbar(im4) data_cth['CTTH'].height outfile= 'CTH_without_parallax_%s' fig.savefig((in_msg.outputDir+ outfile %time_slot.strftime('%Y%m%d%H%M')), dpi=300, bbox_inches='tight') print('... create figure: display ' + in_msg.outputDir+outfile %time_slot.strftime('%Y%m%d%H%M') + '.png') plots_done[area].append('CTH') # end of area loop ## start postprocessing for area in in_msg.postprocessing_areas: postprocessing(in_msg, time_slot, int(sat_nr), area) # increase the time by a time delta time_slot += delta # end of time loop return plots_done
month = int(sys.argv[2]) day = int(sys.argv[3]) hour = int(sys.argv[4]) minute = int(sys.argv[5]) time_slot = datetime.datetime(year, month, day, hour, minute) if len(sys.argv) == 7: out_path = sys.argv[6] elif len(sys.argv) >= 7: print("*** Error: Incorrect number of arguments ***") sys.exit() print(" ") print('*** load data for time:', str(time_slot)) #global_data = GeostationaryFactory.create_scene("Meteosat-10", "", "seviri", time_slot) global_data = GeostationaryFactory.create_scene("Meteosat-9", "", "seviri", time_slot) #global_data = GeostationaryFactory.create_scene("Meteosat-8", "", "seviri", time_slot) from my_composites import get_image obj_image = get_image(global_data, 'HRoverview') print(obj_image.prerequisites) parallax_correction = True if parallax_correction: global_data.load(obj_image.prerequisites, reader_level="seviri-level9") else: global_data.load(obj_image.prerequisites, reader_level="seviri-level8") print(" ") print('*** some info about the loaded data') print(global_data) # data is already in ccs4 projection, so we can skip this step
day = int(sys.argv[3]) hour = int(sys.argv[4]) minute = int(sys.argv[5]) tslot = datetime(year, month, day, hour, minute) else: print("\n*** Error, wrong number of input arguments") print(" usage:") print(" python demo_refl039.py") print(" or") print(" python demo_refl039.py 2017 2 17 14 35\n") quit() print("*** plot day microphysics RGB for ", str(tslot)) #glbd = GeostationaryFactory.create_scene("meteosat", "09", "seviri", tslot) glbd = GeostationaryFactory.create_scene("Meteosat-9", "", "seviri", tslot) print("... load sat data") glbd.load(['VIS006','VIS008','IR_016','IR_039','IR_108','IR_134'], area_extent=europe.area_extent) #area="EuropeCanaryS95" area="EuroMercator" # blitzortung projection local_data = glbd.project(area, precompute=True) print("... read responce functions") from pyspectral.near_infrared_reflectance import Calculator from pyspectral.solar import (SolarIrradianceSpectrum, TOTAL_IRRADIANCE_SPECTRUM_2000ASTM) solar_irr = SolarIrradianceSpectrum(TOTAL_IRRADIANCE_SPECTRUM_2000ASTM, dlambda=0.0005) #from pyspectral.seviri_rsr import load #seviri = load()
# define area object obj_area = get_area_def(area) #(in_windshift.ObjArea) size_x = obj_area.pixel_size_x size_y = obj_area.pixel_size_y # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) # read CTP to distinguish high, medium and low clouds global_data_CTP = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr).zfill(2), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data_CTP, ['CTP'], in_msg, area_loaded) data_CTP = global_data_CTP.project(area) [nx, ny] = data_CTP['CTP'].data.shape # read all rgbs global_data = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr).zfill(2), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot)
#time_slot = datetime.datetime(year, 12, 16, 13, 30) time_slot = datetime.datetime(year, month, day, hour, minute) load_radar = True load_sat = True #channel_list=['VIS006','VIS008','IR_016','IR_039','WV_062','WV_073','IR_087','IR_097','IR_108','IR_120','IR_134','HRV'] channel_list = [ 'VIS006', 'VIS008', 'IR_016', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134' ] #channel_list=['IR_108'] if load_radar: global_radar = GeostationaryFactory.create_scene("odyssey", "", "radar", time_slot) global_radar.load([prop_str]) print(global_radar) print("=========================") if load_sat: global_sat = GeostationaryFactory.create_scene("meteosat", "09", "seviri", time_slot) #global_sat.load(['IR_108'], reader_level="seviri-level2") global_sat.load(channel_list, reader_level="seviri-level2") print(global_sat) print("=========================") color_mode = 'RainRate' loutputDir = "/data/cinesat/out/"
count2NonZero = [] time1 = [] for i in range(5, 65, 5): leadS = "%02d" % i #diff["t"+leadS] = {} diff = [] diff1 = [] yearS, monthS, dayS, hourS, minS = string_date(time_slot0 + timedelta(minutes=i)) #print ("*** read data for ", in_msg.sat_str(),in_msg.sat_nr_str(), "seviri", time_slot0+timedelta(minutes = i)) global_data = GeostationaryFactory.create_scene( in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", time_slot0 + timedelta(minutes=i)) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data, ['CTT'], in_msg, area_loaded) data = global_data.project("ccs4") img_obs = deepcopy(data['CTT'].data) img_obs.mask[:, :] = False if True: print("pickles/" + year0S + month0S + day0S + "_" + hour0S + min0S + "_CTT_t" + leadS + "_1layer.p") tmp = pickle.load( open( "pickles/" + year0S + month0S + day0S + "_" + hour0S +
from mpop.satellites import GeostationaryFactory from mpop.projector import get_area_def import datetime from my_msg_module import get_last_SEVIRI_date from pycoast import ContourWriterAGG from mpop.projector import get_area_def from mpop.utils import debug_on debug_on() #time_slot = get_last_SEVIRI_date(False, delay=15) time_slot = datetime.datetime(2015, 12, 0o3, 3, 45) print(str(time_slot)) global_data = GeostationaryFactory.create_scene("volc", "10", "seviri", time_slot) #europe = get_area_def("EuropeCanaryS95") #channels = ['ash_loading'] channels = ['ash_height'] #channels = ['ash_height_quality_flag'] #channels = ['ash_effective_radius'] chn = channels[0] global_data.load(channels) # , area_extent=europe.area_extent print(global_data) #area="SeviriDiskFull00" #area="SeviriDiskFull00S4" area = "EuropeCanaryS95" #area="Etna" data = global_data.project(area, precompute=True) #data = global_data
year = 2014 # 2014 09 15 21 35 month = 7 # 2014 07 23 18 30 day = 23 hour = 18 minute = 35 time_slot = datetime(year, month, day, hour, minute) area = 'ccs4' #area='nrEURO1km' #area='nrEURO3km' #area='EuropeCanaryS95' obj_area = get_area_def(area) print("... read lightning data") global_data = GeostationaryFactory.create_scene("swisslightning", "", "thx", time_slot) global_data.load([prop_str], area=area) print("... global_data ") print(global_data) #plot.show_quicklook(ccs4, global_data['precip'].data ) #print "global_data[prop_str].data", global_data[prop_str].data print("... shape: ", global_data[prop_str].data.shape) print("... min/max: ", global_data[prop_str].data.min(), global_data[prop_str].data.max()) print("... dt: ", global_data.dt, " min") dt_str = ("%04d" % global_data.dt) + "min" yearS = str(year) #yearS = yearS[2:] monthS = "%02d" % month
############ DATA LOAD WITH PYTROLL############ #Sub section for data load with pytroll #Time_slot try: time_slot=datetime.datetime(YYYY,MM,DD,hh,mm) print '\n' print time_slot print '\n' except: print "\nTIME SLOT UNDEFINED" #Scene Configuration try: global_data=GeostationaryFactory.create_scene("meteosat","10","seviri",time_slot) except: print "\nSATELLITE DEFINITION LOAD FAILED, CHECK THAT meteosat10.cfg EXISTS IN THE MPOP FOLDER OR CHANGE ARGUMENT IF YOU USE ANOTHER SATELLITE DEFINITION." try: globe=get_area_def("AfSubSahara") except: print "\nAREA DEFINITION LOAD FAILED, CHECK THAT areas.def EXISTS IN THE MPOP FOLDER." #Data load try: if MSG_FILE_TYPE=='L': IRchannelList=['IR_039','IR_108'] global_data.load(IRchannelList,area_extent=globe.area_extent,calibrate=1) print global_data[3.9].data.min() print global_data[3.9].data.max()
from my_msg_module import get_last_SEVIRI_date datetime1 = get_last_SEVIRI_date(True) year = datetime1.year month = datetime1.month day = datetime1.day hour = datetime1.hour minute = datetime1.minute else: # fixed date for text reasons year = 2014 # 2014 09 15 21 35 month = 7 # 2014 07 23 18 30 day = 23 hour = 18 minute = 00 time_slot = datetime(year, month, day, hour, minute) global_data = GeostationaryFactory.create_scene("swisstrt", "04", "radar", time_slot) #cell='2014072316550030' #cell='2014072313000006' # max_rank if 'cell' in locals(): cell_ID = '_' + cell[8:] cell_dir = '/ID' + cell[8:] + '/' print("search cell id", cell_ID) global_data.load(['TRT'], cell=cell) else: cell_ID = '' cell_dir = '' global_data.load(['TRT']) # ,min_rank=8, cell="2018080710450054" #global_data.load(['TRT'],min_rank=28) # ,min_rank=8, cell="2018080710450054" #if hasattr(global_data, 'traj_IDs'):
def load_constant_fields(sat_nr): # radar threshold mask: radar_mask = GeostationaryFactory.create_scene("odyssey", "", "radar", datetime(1900, 1, 1, 0)) # reproject this to the desired area: mask_rad_thres = np.load( '../data/odyssey_mask/threshold_exceedance_mask_avg15cut2_cut04_cutmistral_201706_201707_201708.npy' ) from mpop.projector import get_area_def area_radar_mask = 'EuropeOdyssey00' radar_mask.channels.append( Channel(name='mask_radar', wavelength_range=[0., 0., 0.], data=mask_rad_thres[:, :])) radar_mask['mask_radar'].area = area_radar_mask radar_mask['mask_radar'].area_def = get_area_def(area_radar_mask) # nominal viewing geometry print('*** read nominal viewing geometry', "meteosat", sat_nr, "seviri") # time_slot has NO influence at all just goes looking for the nominal position file -> will use these fields for all dates vg = GeostationaryFactory.create_scene("meteosat", sat_nr, "seviri", datetime(1900, 1, 1, 0)) vg.load(['vaa', 'vza', 'lon', 'lat'], reader_level="seviri-level6") msg_area = deepcopy(vg['vaa'].area) msg_area_def = deepcopy(vg['vaa'].area_def) msg_resolution = deepcopy(vg['vaa'].resolution) # read land sea mask (full SEVIRI Disk seen from 0 degree East) ls_file = '../data/SEVIRI_data/LandSeaMask_SeviriDiskFull00.nc' fh = Dataset(ls_file, mode='r') lsmask = fh.variables['lsmask'][:] # read topography (full SEVIRI Disk seen from 0 degree East) ls_file = '../data/SEVIRI_data/SRTM_15sec_elevation_SeviriDiskFull00.nc' fh = Dataset(ls_file, mode='r') ele = fh.variables['elevation'][:] # create a dummy satellite object (to reproject the land/sea mask and elevation) ls_ele = GeostationaryFactory.create_scene("meteosat", sat_nr, "seviri", datetime(1900, 1, 1, 0)) #ls_ele.load(['CTTH'], calibrate=True, reader_level="seviri-level3") #convert_NWCSAF_to_radiance_format(ls_ele, None,'CTH', False, True) # add land sea mask as a dummy channel ls_ele.channels.append( Channel(name='lsmask', wavelength_range=[0., 0., 0.], resolution=msg_resolution, data=lsmask[::-1, :])) #ls_ele['lsmask'].area = ls_ele['CTH'].area #ls_ele['lsmask'].area_def = ls_ele['CTH'].area_def ls_ele['lsmask'].area = msg_area ls_ele['lsmask'].area_def = msg_area_def # add elevation as a dummy channel ls_ele.channels.append( Channel(name='ele', wavelength_range=[0., 0., 0.], resolution=msg_resolution, data=ele[::-1, :])) #ls_ele['ele'].area = ls_ele['CTH'].area #ls_ele['ele'].area_def = ls_ele['CTH'].area_def ls_ele['ele'].area = msg_area ls_ele['ele'].area_def = msg_area_def return radar_mask, vg, ls_ele
debug_on() from trollimage.colormap import rdbu, greys, rainbow, spectral from trollimage.image import Image as trollimage import datetime #SAFNWC_MSG2_CT___201412021350_alps________.h5 debug_on() time_slot = datetime.datetime(2015, 7, 9, 13, 00) #area = get_area_def("alps") global_data = GeostationaryFactory.create_scene("meteosat", "09", "seviri", time_slot) prod = "SPhR" global_data.load([prod], calibrate=False) global_data = global_data.project("ccs4", precompute=True) img = trollimage(global_data[prod].sphr_bl, mode="P", palette=global_data[prod].sphr_bl_palette) img.save('./SPHR_BL_test.png') img = trollimage(global_data[prod].sphr_hl, mode="P",
'visir_full': (0.6, 10.8,), 'germ': (0.6, 0.8, 1.6, 3.9, 6.2, 7.3, 8.7, 9.7, 10.8, 12.0, 13.4), 'ccs4': (0.6, 0.8, 1.6, 3.9, 6.2, 7.3, 8.7, 9.7, 10.8, 12.0, 13.4), 'hrv_north': ('HRV',) } BITS_PER_SAMPLE = 8 DO_GEOIMAGE = False DO_CONVECTION = False DO_TROLLIMAGE = True if DO_GEOIMAGE: for area_name, area_in, area_out in AREAS: global_data = GeostationaryFactory.create_scene("meteosat", SATNO, "seviri", #area=area_in, time_slot=TIMESLOT) # Load channel by channel (to save memory). for chn in CHANNEL_DICT[area_name]: global_data.load([chn]) chn_name = global_data[chn].name # Save 'unit' ... it seems to be lost somewhere. global_data[chn].unit = global_data[chn].info.get('units', 'None') # Resample to Plate Caree. scene = global_data.project(area_out, mode='quick', precompute=True) # Kelvin -> Celsius.