def load_rgb(satellite, satellite_nr, satellites_name, time_slot, rgb, area, in_msg, data_CTP): if rgb != 'CTP': # read the data we would like to forecast global_data_RGBforecast = GeostationaryFactory.create_scene( satellite, satellite_nr, satellites_name, time_slot) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data_RGBforecast, [rgb], in_msg, area_loaded) print '... project data to desired area ', area fns = global_data_RGBforecast.project(area) else: fns = deepcopy(data_CTP["CTP"].data) return fns[rgb].data
def plot_msg_minus_cosmo(in_msg): # do statistics for the last full hour (minutes=0, seconds=0) in_msg.datetime = datetime(in_msg.datetime.year, in_msg.datetime.month, in_msg.datetime.day, in_msg.datetime.hour, 0, 0) area_loaded = choose_area_loaded_msg(in_msg.sat, in_msg.sat_nr, in_msg.datetime) # define contour write for coasts, borders, rivers cw = ContourWriterAGG(in_msg.mapDir) # check if input data is complete if in_msg.verbose: print("*** check input data for ", in_msg.sat_str()) RGBs = check_input(in_msg, in_msg.sat_str(layout="%(sat)s") + in_msg.sat_nr_str(), in_msg.datetime) # in_msg.sat_nr might be changed to backup satellite if in_msg.verbose: print('*** Create plots for ') print(' Satellite/Sensor: ' + in_msg.sat_str()) print(' Satellite number: ' + in_msg.sat_nr_str() + ' // ' + str(in_msg.sat_nr)) print(' Satellite instrument: ' + in_msg.instrument) print(' Date/Time: ' + str(in_msg.datetime)) print(' RGBs: ', in_msg.RGBs) print(' Area: ', in_msg.areas) print(' reader level: ', in_msg.reader_level) # define satellite data object #global_data = GeostationaryFactory.create_scene(in_msg.sat, in_msg.sat_nr_str(), "seviri", in_msg.datetime) global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), in_msg.instrument, in_msg.datetime) # global_data = GeostationaryFactory.create_scene("msg-ot", "", "Overshooting_Tops", in_msg.datetime) if len(RGBs) == 0 and len(in_msg.postprocessing_areas) == 0: return RGBs if in_msg.verbose: print( "*** load satellite channels for " + in_msg.sat_str() + in_msg.sat_nr_str() + " ", global_data.fullname) # initialize processed RGBs RGBs_done = [] # ------------------------------------------------------------------- # load reflectivities, brightness temperatures, NWC-SAF products ... # ------------------------------------------------------------------- area_loaded = load_products(global_data, RGBs, in_msg, area_loaded) cosmo_input_file = "input_cosmo_cronjob.py" print("... read COSMO input file: ", cosmo_input_file) in_cosmo = parse_commandline_and_read_inputfile( input_file=cosmo_input_file) # add composite in_msg.scpOutput = True in_msg.resize_montage = 70 in_msg.postprocessing_montage = [[ "MSG_IR-108cpc", "COSMO_SYNMSG-BT-CL-IR10.8", "MSG_IR-108-COSMO-minus-MSGpc" ]] in_msg.scpProducts = [[ "MSG_IR-108cpc", "COSMO_SYNMSG-BT-CL-IR10.8", "MSG_IR-108-COSMO-minus-MSGpc" ]] #in_msg.scpProducts = ["all"] # define satellite data object cosmo_data = GeostationaryFactory.create_scene(in_cosmo.sat_str(), in_cosmo.sat_nr_str(), in_cosmo.instrument, in_cosmo.datetime) area_loaded_cosmo = load_products(cosmo_data, ['SYNMSG_BT_CL_IR10.8'], in_cosmo, area_loaded) # preprojecting the data to another area # -------------------------------------- if len(RGBs) > 0: for area in in_msg.areas: print("") obj_area = get_area_def(area) if area != 'ccs4': print("*** WARNING, diff MSG-COSMO only implemented for ccs4") continue # reproject data to new area print(area_loaded) if obj_area == area_loaded: if in_msg.verbose: print("*** Use data for the area loaded: ", area) #obj_area = area_loaded data = global_data resolution = 'l' else: if in_msg.verbose: print("*** Reproject data to area: ", area, "(org projection: ", area_loaded.name, ")") obj_area = get_area_def(area) # PROJECT data to new area data = global_data.project(area, precompute=True) resolution = 'i' if in_msg.parallax_correction: loaded_products = [chn.name for chn in data.loaded_channels()] if 'CTH' not in loaded_products: print("*** Error in plot_msg (" + inspect.getfile(inspect.currentframe()) + ")") print( " Cloud Top Height is needed for parallax correction " ) print( " either load CTH or specify the estimation of the CTH in the input file (load 10.8 in this case)" ) quit() if in_msg.verbose: print( " perform parallax correction for loaded channels: ", loaded_products) data = data.parallax_corr(fill=in_msg.parallax_gapfilling, estimate_cth=in_msg.estimate_cth, replace=True) # save reprojected data if area in in_msg.save_reprojected_data: save_reprojected_data(data, area, in_msg) # apply a mask to the data (switched off at the moment) if False: mask_data(data, area) # save average values if in_msg.save_statistics: mean_array = zeros(len(RGBs)) #statisticFile = '/data/COALITION2/database/meteosat/ccs4/'+yearS+'/'+monthS+'/'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt' statisticFile = './' + yearS + '-' + monthS + '-' + dayS + '/MSG_' + area + '_' + yearS[ 2:] + monthS + dayS + '.txt' if in_msg.verbose: print("*** write statistics (average values) to " + statisticFile) f1 = open(statisticFile, 'a') # mode append i_rgb = 0 for rgb in RGBs: if rgb in products.MSG_color: mean_array[i_rgb] = data[rgb.replace("c", "")].data.mean() i_rgb = i_rgb + 1 # create string to write str2write = dateS + ' ' + hourS + ' : ' + minS + ' UTC ' for mm in mean_array: str2write = str2write + ' ' + "%7.2f" % mm str2write = str2write + "\n" f1.write(str2write) f1.close() # creating plots/images if in_msg.make_plots: # choose map resolution in_msg.resolution = choose_map_resolution( area, in_msg.mapResolution) # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) RGBs = ['IR_108-COSMO-minus-MSG'] print(data['IR_108'].data.shape) print(cosmo_data['SYNMSG_BT_CL_IR10.8'].data.shape) diff_MSG_COSMO = cosmo_data['SYNMSG_BT_CL_IR10.8'].data - data[ 'IR_108'].data HRV_enhance_str = '' # add IR difference as "channel object" to satellite regional "data" object data.channels.append( Channel(name=RGBs[0], wavelength_range=[0., 0., 0.], resolution=data['IR_108'].resolution, data=diff_MSG_COSMO)) for rgb in RGBs: if not check_loaded_channels(rgb, data): continue PIL_image = create_PIL_image(rgb, data, in_msg, obj_area=obj_area) # !!! in_msg.colorbar[rgb] is initialized inside (give attention to rgbs) !!! add_borders_and_rivers(PIL_image, cw, area_tuple, add_borders=in_msg.add_borders, border_color=in_msg.border_color, add_rivers=in_msg.add_rivers, river_color=in_msg.river_color, resolution=in_msg.resolution, verbose=in_msg.verbose) # indicate mask if in_msg.indicate_mask: PIL_image = indicate_mask(rgb, PIL_image, data, in_msg.verbose) #if area.find("EuropeCanary") != -1 or area.find("ccs4") != -1: dc = DecoratorAGG(PIL_image) # add title to image if in_msg.add_title: add_title(PIL_image, in_msg.title, HRV_enhance_str + rgb, in_msg.sat_str(), data.sat_nr(), in_msg.datetime, area, dc, in_msg.font_file, in_msg.verbose, title_color=in_msg.title_color, title_y_line_nr=in_msg.title_y_line_nr ) # !!! needs change # add MeteoSwiss and Pytroll logo if in_msg.add_logos: if in_msg.verbose: print('... add logos') dc.align_right() if in_msg.add_colorscale: dc.write_vertically() if PIL_image.mode != 'L': height = 60 # height=60.0 normal resolution dc.add_logo(in_msg.logos_dir + "/pytroll3.jpg", height=height) # height=60.0 dc.add_logo(in_msg.logos_dir + "/meteoSwiss3.jpg", height=height) dc.add_logo( in_msg.logos_dir + "/EUMETSAT_logo2_tiny_white_square.png", height=height) # height=60.0 # add colorscale if in_msg.add_colorscale and in_msg.colormap[rgb] != None: if rgb in products.MSG_color: unit = data[rgb.replace("c", "")].info['units'] #elif rgb in products.MSG or rgb in products.NWCSAF or rgb in products.HSAF: # unit = data[rgb].info['units'] else: unit = None loaded_channels = [ chn.name for chn in data.loaded_channels() ] if rgb in loaded_channels: if hasattr(data[rgb], 'info'): print(" hasattr(data[rgb], 'info')", list(data[rgb].info.keys())) if 'units' in list(data[rgb].info.keys()): print( "'units' in data[rgb].info.keys()") unit = data[rgb].info['units'] print("... units = ", unit) add_colorscale(dc, rgb, in_msg, unit=unit) if in_msg.parallax_correction: parallax_correction_str = 'pc' else: parallax_correction_str = '' rgb += parallax_correction_str # create output filename outputDir = format_name( in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change outputFile = outputDir + "/" + format_name( in_msg.outputFile, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change # check if output directory exists, if not create it path = dirname(outputFile) if not exists(path): if in_msg.verbose: print('... create output directory: ' + path) makedirs(path) # save file if exists(outputFile) and not in_msg.overwrite: if stat(outputFile).st_size > 0: print('... outputFile ' + outputFile + ' already exists (keep old file)') else: print( '*** Warning, outputFile' + outputFile + ' already exists, but is empty (overwrite file)' ) PIL_image.save(outputFile, optimize=True ) # optimize -> minimize file size chmod( outputFile, 0o777 ) ## FOR PYTHON3: 0o664 # give access read/write access to group members else: if in_msg.verbose: print('... save final file: ' + outputFile) PIL_image.save( outputFile, optimize=True) # optimize -> minimize file size chmod( outputFile, 0o777 ) ## FOR PYTHON3: 0o664 # give access read/write access to group members if in_msg.compress_to_8bit: if in_msg.verbose: print('... compress to 8 bit image: display ' + outputFile.replace(".png", "-fs8.png") + ' &') subprocess.call( "/usr/bin/pngquant -force 256 " + outputFile + " 2>&1 &", shell=True) # 256 == "number of colors" #if in_msg.verbose: # print " add coastlines to "+outputFile ## alternative: reopen image and modify it (takes longer due to additional reading and saving) #cw.add_rivers_to_file(img, area_tuple, level=5, outline='blue', width=0.5, outline_opacity=127) #cw.add_coastlines_to_file(outputFile, obj_area, resolution=resolution, level=4) #cw.add_borders_to_file(outputFile, obj_area, outline=outline, resolution=resolution) # secure copy file to another place if in_msg.scpOutput: if (rgb in in_msg.scpProducts) or ('all' in [ x.lower() for x in in_msg.scpProducts if type(x) == str ]): scpOutputDir = format_name(in_msg.scpOutputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy " + outputFile.replace( ".png", "-fs8.png") + " to " + scpOutputDir) subprocess.call( "scp " + in_msg.scpID + " " + outputFile.replace(".png", "-fs8.png") + " " + scpOutputDir + " 2>&1 &", shell=True) else: if in_msg.verbose: print("... secure copy " + outputFile + " to " + scpOutputDir) subprocess.call("scp " + in_msg.scpID + " " + outputFile + " " + scpOutputDir + " 2>&1 &", shell=True) if in_msg.scpOutput and in_msg.scpID2 != None and in_msg.scpOutputDir2 != None: if (rgb in in_msg.scpProducts2) or ('all' in [ x.lower() for x in in_msg.scpProducts2 if type(x) == str ]): scpOutputDir2 = format_name(in_msg.scpOutputDir2, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) if in_msg.compress_to_8bit: if in_msg.verbose: print("... secure copy " + outputFile.replace( ".png", "-fs8.png") + " to " + scpOutputDir2) subprocess.call( "scp " + in_msg.scpID2 + " " + outputFile.replace(".png", "-fs8.png") + " " + scpOutputDir2 + " 2>&1 &", shell=True) else: if in_msg.verbose: print("... secure copy " + outputFile + " to " + scpOutputDir2) subprocess.call("scp " + in_msg.scpID2 + " " + outputFile + " " + scpOutputDir2 + " 2>&1 &", shell=True) if 'ninjotif' in in_msg.outputFormats: ninjotif_file = format_name(outputDir + '/' + in_msg.ninjotifFilename, data.time_slot, sat_nr=data.sat_nr(), RSS=in_msg.RSS, area=area, rgb=rgb) from plot_coalition2 import pilimage2geoimage GEO_image = pilimage2geoimage(PIL_image, obj_area, data.time_slot) GEO_image.save(ninjotif_file, fformat='mpop.imageo.formats.ninjotiff', ninjo_product_name=rgb, chan_id=products.ninjo_chan_id[ rgb.replace("_", "-") + "_" + area], nbits=8) chmod(ninjotif_file, 0o777) print(("... save ninjotif image: display ", ninjotif_file, " &")) if rgb not in RGBs_done: RGBs_done.append(rgb) ## start postprocessing for area in in_msg.postprocessing_areas: postprocessing(in_msg, global_data.time_slot, int(data.sat_nr()), area) if in_msg.verbose: print(" ") return RGBs_done
# define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) # read CTP to distinguish high, medium and low clouds global_data_CTP = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr).zfill(2), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data_CTP, ['CTP'], in_msg, area_loaded) data_CTP = global_data_CTP.project(area) [nx, ny] = data_CTP['CTP'].data.shape # read all rgbs global_data = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr).zfill(2), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data, rgbs, in_msg, area_loaded) data = global_data.project(area) if downscaling_data == True:
for i in range(5, 65, 5): leadS = "%02d" % i #diff["t"+leadS] = {} diff = [] diff1 = [] yearS, monthS, dayS, hourS, minS = string_date(time_slot0 + timedelta(minutes=i)) #print ("*** read data for ", in_msg.sat_str(),in_msg.sat_nr_str(), "seviri", time_slot0+timedelta(minutes = i)) global_data = GeostationaryFactory.create_scene( in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", time_slot0 + timedelta(minutes=i)) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data, ['CTT'], in_msg, area_loaded) data = global_data.project("ccs4") img_obs = deepcopy(data['CTT'].data) img_obs.mask[:, :] = False if True: print("pickles/" + year0S + month0S + day0S + "_" + hour0S + min0S + "_CTT_t" + leadS + "_1layer.p") tmp = pickle.load( open( "pickles/" + year0S + month0S + day0S + "_" + hour0S + min0S + "_CTT_t" + leadS + "_1layer.p", "rb")) tmp = (tmp[0] - img_obs) diff1.append(tmp) tmp = tmp.flatten()
def properties_cells(t1, tStop, current_labels=None, metadata=None, labels_dir=None, outputDir_labels=None, in_msg=None, sat_data=None): rgb_load = [ 'WV_062', 'WV_073', 'IR_039', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134' ] #,'CTP','CTT'] #rgb_out = 'WV_062minusIR_108' only_obs_noForecast = False rapid_scan_mode = True #if only_obs_noForecast == True: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski_obs/cosmo/Channels/labels/' #elif rapid_scan_mode == True: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski_RapidScan/cosmo/Channels/labels//' #else: # in_dir = '/opt/users/'+in_msg.user+'/PyTroll/scripts//Mecikalski/cosmo/Channels/labels/' # load a few standard things if in_msg is None: print("*** Error, in property_cells (property_cells)") print(" no input class passed as argument") quit() from get_input_msg import get_input_msg in_msg = get_input_msg('input_template') in_msg.resolution = 'i' in_msg.sat_nr = 9 in_msg.add_title = False in_msg.outputDir = './pics/' in_msg.outputFile = 'WS_%(rgb)s-%(area)s_%y%m%d%H%M' in_msg.fill_value = [0, 0, 0] # black in_msg.reader_level = "seviri-level4" # satellite for HRW winds sat_nr = "08" #in_windshift.sat_nr area = "ccs4" #c2"#"ccs4" #in_windshift.ObjArea # define area object obj_area = get_area_def(area) #(in_windshift.ObjArea) # define area proj4_string = obj_area.proj4_string # e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0' area_extent = obj_area.area_extent # e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) area_tuple = (proj4_string, area_extent) mean_108_evolution = [] area34 = [] split34 = [] merge34 = [] t_start34 = 0 t_end34 = 0 lonely_cells = 0 cell_interesting = 77 count_double = 0 #labels_dir = '/data/cinesat/out/labels/' if labels_dir is None: labels_dir = '/opt/users/' + in_msg.user + '/PyTroll/scripts/labels/' #compatible to all users print("... use default directory to save labels: " + labels_dir) # loop over time while t1 <= tStop: print(in_msg.sat, str(in_msg.sat_nr), "seviri", t1) if sat_data is None: # now read the data we would like to forecast global_data = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr), "seviri", t1) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data, rgb_load, in_msg, area_loaded) print('... project data to desired area ', area) data = global_data.project(area, precompute=True) else: data = sat_data yearS = str(t1.year) monthS = "%02d" % t1.month dayS = "%02d" % t1.day hourS = "%02d" % t1.hour minS = "%02d" % t1.minute nx, ny = data[rgb_load[0]].data.shape # create array for all channel values values_rgb = np.zeros((len(rgb_load), nx, ny)) # copy all observations/channels into one large numpy array for rrgb in range(len(rgb_load)): values_rgb[rrgb, :, :] = deepcopy( data[rgb_load[rrgb]].data) #-data_108[rgb_load[1]].data if current_labels is None: print("--- reading labels from shelve files") filename = labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS) myShelve = shelve.open(filename) data1 = deepcopy(myShelve['labels']) metadata = deepcopy(myShelve['metadata']) myShelve.close() else: print("--- recieving labels from plot_coaltion2") data1 = deepcopy(current_labels) data_new = np.zeros(data1.shape) all_cells = {} # t0 is 5min before t1 t0 = t1 - timedelta(minutes=5) year0S = str(t0.year) month0S = "%02d" % t0.month day0S = "%02d" % t0.day hour0S = "%02d" % t0.hour min0S = "%02d" % t0.minute file_previous_labels = labels_dir + 'Labels_%s*' % ( year0S + month0S + day0S + hour0S + min0S) filename1 = glob.glob(file_previous_labels) print("the previous filename is: ", filename1) if t0.hour == 0 and t0.minute == 0: check_date = True else: check_date = False if len(filename1) > 0 or check_date: first_time_step = False else: first_time_step = True if first_time_step: # these labels are random numbers assigned in COALITION2 (different number for each cell) data0 = np.array(data1, 'uint32') labels0 = np.unique(data0[data0 > 0]) id_data = yearS + monthS + dayS + hourS + minS #list_id = [] # loop over all cell labels for i in range(1, len(labels0) + 1): #create a mask which has 1s only where the current cell is mask_current_label = np.zeros(data1.shape) mask_current_label = np.where(data1 == i, 1, 0) # calculate: coordinates center of mass center = ndimage.measurements.center_of_mass( mask_current_label) center = np.rint(center) # calculate means of the satellite channels (brightness temperatures) values1 = [] for rrgb in range(len(rgb_load)): these = values_rgb[rrgb, :, :] values_cell = these[np.where(mask_current_label == 1)] values1.append(values_cell.mean()) # take i as cell id and save cells properties all_cells["ID" + str(i)] = Cells() all_cells["ID" + str(i)].t_start = [ t1.year, t1.month, t1.day, t1.hour, t1.minute ] # True all_cells["ID" + str( i )].origin = "t0" # "start_programm", "day_before", "merge", "split", "enters_area", "appear" all_cells["ID" + str(i)].mean108 = values1 all_cells["ID" + str(i)].area_px = sum(sum(mask_current_label)) data_new = deepcopy(data0) else: # read cell labels from previous time step t0 id_data0 = year0S + month0S + day0S + hour0S + min0S file_previous_labels = labels_dir + 'Labels_%s.shelve' % ( year0S + month0S + day0S + hour0S + min0S) myShelve = shelve.open(file_previous_labels) data0 = deepcopy(myShelve['labels']) myShelve.close() # extract unique cell labels corresponding to the ID at t0 data0 = np.array(data0, 'uint32') labels0 = np.unique( data0[data0 > 0]) # this might be an empty tuple [] !HAU! print("this should match with output previous step \n", labels0) connections = [] for con in labels0: connections.append(["ID" + str(con)]) # total number of cell at t0 if len(labels0) == 0: new_id_num = 0 else: new_id_num = labels0.max() + 1 # this does not work for [] #these labels are random numbers assigned in COALITION2 (different number for each cell) data1 = np.array(data1, 'uint32') labels1 = np.unique(data1) # this might be an empty [] !HAU! # new id number for the new cells at t1 if labels0.size == 0: new_id_num = 1 else: try: new_id_num = labels0.max() + 1 except ValueError: print("labels0: ", labels0) print(type(labels0)) print("quitting in properties_cells line 397") quit() #list to make sure you record every split list_previous = [] # loop through cells at t1 for i in labels1: #range(1,len(labels1)+1): if i != 0: #required to correct the output "data_new" if the ID of a cell changes because a bigger cell takes it!!! correct_id_already_created = 0 #create a mask which has 1s only where the current cell is mask_current_label = np.zeros(data1.shape) mask_current_label = np.where(data1 == i, 1, 0) #store coordinates center of mass center = ndimage.measurements.center_of_mass( mask_current_label) center = np.rint(center) values1 = [] for rrgb in range(len(rgb_load)): these = values_rgb[rrgb, :, :] values_cell = these[np.where(mask_current_label == 1)] values1.append(values_cell.mean()) ## put calculation of mean value in a function (and also consider more properties later) #take the values of the 10.8 channel for the current cell #values1 = values_interest[np.where(mask_current_label == 1)] # consider the area of the current cell in the previous time step (TEST OVERLAPPING) previous_t = data0 * mask_current_label # store the ID number of all the overlapping cells at t0 !!! (change to minimum overlapping to consider them) labels_previous = np.unique(previous_t[previous_t > 0]) ##### new cell with no correspondence in previous time step ##### if len(labels_previous) == 0: #Store the values for the current cell, with the new ID all_cells["ID" + str(new_id_num)] = Cells() all_cells["ID" + str(new_id_num)].t_start = [ t1.year, t1.month, t1.day, t1.hour, t1.minute ] # True #check if the cell appeared in the middle of the area or came from outside the domain if check_position(mask_current_label): all_cells["ID" + str(new_id_num)].origin = "from_outside" else: all_cells["ID" + str(new_id_num)].origin = "appear" all_cells["ID" + str(new_id_num)].mean108 = values1 #.mean() all_cells["ID" + str(new_id_num)].area_px = sum( sum(mask_current_label)) #store the ID number which will be used to create the data_new (with numbers corresponding to ID cell) for next time step label_current = new_id_num new_id_num += 1 ##### cell with one correspondence in previous time step ##### elif len(labels_previous) == 1: #check if a cell exists at current time already with the same ID (derived from same cell at previous time step) if check_cell_same_ID( all_cells, "ID" + str(labels_previous[0]) ): #if "ID" + str(labels_previous[0]) in all_cells.keys(): id_current, id_samePrevious, correct_id_already_created, label_current, all_cells = define_IDs_cell_same_ID( all_cells, mask_current_label, labels_previous[0], new_id_num) #if correct_id_already_created != 0: # connections = correct_connections(connections, id_samePrevious, all_cells, id_current) new_id_num += 1 # If there is no cell with that ID yet, the current cell gets it else: id_current = "ID" + str(labels_previous[0]) all_cells[id_current] = Cells() #store the ID number which will be used to create the data_new (with numbers corresponding to ID cell) for next time step label_current = labels_previous[0] #Store the values for the current cell all_cells[id_current].origin = "from_previous" all_cells[id_current].id_prev = [ "ID" + str(labels_previous[0]) ] all_cells[id_current].area_px = sum( sum(mask_current_label)) all_cells[id_current].mean108 = values1 #.mean() """ lc=0 for con in range(len(connections)): if connections[con][0] == "ID" + str(labels_previous[0]): print "id_current",id_current lc+=1 connections[con].append(id_current) if lc == 0: lonely_cells+=1 """ #add the label of the previous cell (t0) which will be used at the end to make sure all split are recognized list_previous.append(labels_previous[0]) ##### cell with more then one correspondence in previous time step ##### else: largest_previous = labels_previous[0] max_tot_px = 0 #scan through the cells the current comes from and look for the biggest (you'll use that ID num) for h in range(len(labels_previous)): current_label = labels_previous[h] count_px = np.where(data0 == current_label, 1, 0) tot_px = sum(sum(count_px)) if tot_px > max_tot_px: largest_previous = current_label max_tot_px = tot_px #add the label of the previous cell (t0) which will be used at the end to make sure all split are recognized list_previous.append(current_label) """ lc = 0 for con in range(len(connections)): if connections[con][0] == "ID" + str(labels_previous[h]): connections[con].append("ID" + str(current_label)) lc +=1 if lc == 0: lonely_cells +=1 """ id_current = "ID" + str(largest_previous) if check_cell_same_ID( all_cells, id_current ): #if "ID" + str(labels_previous[0]) in all_cells.keys(): id_current, id_samePrevious, correct_id_already_created, label_current, all_cells = define_IDs_cell_same_ID( all_cells, mask_current_label, largest_previous, new_id_num) #if correct_id_already_created != 0: # connections = correct_connections(connections, id_samePrevious, all_cells, id_current) new_id_num = new_id_num + 1 else: label_current = largest_previous id_current = "ID" + str(largest_previous) all_cells[id_current] = Cells() all_cells[id_current].mean108 = values1 #.mean() all_cells[id_current].origin = "merge" all_cells[id_current].area_px = sum( sum(mask_current_label)) all_cells[id_current].id_prev = [ "ID" + str(labels_previous[lp]) for lp in range(len(labels_previous)) ] print("more correspondence ", ("ID" + str(largest_previous)), "coming from ", [ "ID" + str(labels_previous[lp]) for lp in range(len(labels_previous)) ]) if correct_id_already_created != 0: data_new[data_new == label_current] = correct_id_already_created data_new[mask_current_label == 1] = label_current all_cells["ID" + str(label_current)].center = center #identify labels the current cells are created from that are repeated (meaning the cell split) labels_repeated = np.unique([ "ID" + str(x) for x in list_previous if list_previous.count(x) > 1 ]) #make sure that the cells that come from splitting cells get a split for items in all_cells: item = all_cells[items] if item.split != 1: for n_prev in range(len(item.id_prev)): if item.id_prev[n_prev] in labels_repeated: item.split = 1 labels, numobjects = ndimage.label(data_new) print("....starting updating cells") if outputDir_labels is not None: make_figureLabels(deepcopy(data_new), all_cells, obj_area, outputDir_labels, colorbar=False, vmin=False, vmax=False, white_background=True, t=t1) data_new = data_new.astype( 'uint32' ) #unsigned char int https://docs.python.org/2/library/array.html filename = labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS) myShelve = shelve.open(filename) myShelve['labels'] = deepcopy(data_new) myShelve.close() filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (yearS + monthS + dayS + hourS + minS)) for file_per in filenames_for_permission: print(("modified permission: ", file_per)) os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 print(("....updated cells labels", filename)) list_cells = list(all_cells.keys()) for cell_connection in list_cells: ancestors = all_cells[cell_connection].id_prev for ancestor in ancestors: for con in range(len(connections)): if connections[con][0] == ancestor: connections[con].append(cell_connection) filename = labels_dir + 'Labels_%s.shelve' % ( year0S + month0S + day0S + hour0S + min0S) d = shelve.open(filename) d['connections'] = deepcopy(connections) d.close() print(("....updated cells connections", labels_dir + 'Labels_%s.shelve' % (year0S + month0S + day0S + hour0S + min0S))) filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (year0S + month0S + day0S + hour0S + min0S)) for file_per in filenames_for_permission: os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 print("....starting updating cells") filename = create_dir(labels_dir + 'Labels_%s.shelve' % (yearS + monthS + dayS + hourS + minS)) myShelve = shelve.open(filename) dict_cells = { 'cells': all_cells, 'labels': data_new, 'metadata': metadata } myShelve.update(dict_cells) # close the shelve myShelve.close() print("....updated all cells") filenames_for_permission = glob.glob( labels_dir + 'Labels_%s*' % (yearS + monthS + dayS + hourS + minS)) for file_per in filenames_for_permission: print(("modified permission: ", file_per)) os.chmod(file_per, 0o664) ## FOR PYTHON3: 0o664 t1 = t1 + timedelta(minutes=5) return data_new, first_time_step
def plot_forecast_area(ttt, model, outputDir, current_labels = None, t_stop=None, BackgroundFile=None, ForeGroundRGBFile=None, labels_dir = '/opt/users/'+getpass.getuser()+'/PyTroll/scripts/labels/', in_msg = None): verbose = True if t_stop is None: t_stop = ttt ylabel = "area" while ttt <= t_stop: yearS, monthS, dayS, hourS, minS = string_date(ttt) if verbose: print("******** read cell properties from shelve") if current_labels is None: yearS, monthS, dayS, hourS, minS = string_date(ttt) filename = 'Labels_%s.shelve'%(yearS+monthS+dayS+hourS+minS) myShelve = shelve.open(filename) labels_all = deepcopy(myShelve['labels']) else: labels_all = deepcopy(current_labels) if verbose: print(labels_all) unique_labels = np.unique(labels_all[labels_all>0]) if verbose: print(("... cells with unique labels: ", unique_labels)) forecasted_labels = {} forecasted_areas = [] at_least_one_cell = False if verbose: print("*** computing history backward (", labels_dir, ")") for interesting_cell in unique_labels: forecasted_labels["ID"+str(interesting_cell)]=[] # calculate backward history for 1 hour and save it in labels_dir ind, area, displacement, time, center = history_backward(ttt, interesting_cell, True, in_msg, ttt-timedelta(hours = 1), labels_dir=labels_dir) #-timedelta(minutes = 10)) # current time, cell_id, backward? time_stop if area is None or len(area)<=1: if verbose: print("new cell or cell with COM outside domain") continue at_least_one_cell = True if len(area)<=3: # if history is too short, use linear extrapolation t, y = future_properties(time, area, ylabel, "linear") else: t, y = future_properties(time, area, ylabel, model) if False: ind1, area1, displacement1, time1, center = history_backward(ttt, interesting_cell, False, ttt+timedelta(hours=1), labels_dir=labels_dir) print("******** computed history forward") t2 = time1 #[::-1] y2 = area1 #[::-1] nx,ny = labels_all.shape #if verbose: # print(nx,ny) label_cell = np.zeros(labels_all.shape) label_cell[labels_all==interesting_cell] = 1 #pickle.dump(label_cell, open("test_label.p", "wb" ) ) #quit() dt = 0 if False: figure_labels(label_cell, outputDir, ttt, dt, area_plot="ccs4", add_name = "_ID"+str(interesting_cell), verbose=verbose) area_current = sum(sum(label_cell)) forecasted_areas.append(area_current) indx = np.where(t==ttt)[0] + 1 if verbose: print("*** compute displacement ") if displacement.shape[1]==2: if len(displacement) == 0: dx = 0 dy = 0 else: try: dx = int(round(displacement[:,0].mean())) dy = int(round(displacement[:,1].mean())) except ValueError: print("VALUE ERROR") print(displacement) quit() print(" computed displacement dx, dy = ", dx, dy) else: print("wrong displacement") quit() labels_in_time={} index_stop = 12 print(("*** calculate forecasts for cell ID"+str(interesting_cell))) if verbose: print("index time area growth") print("----------------------------") for i in range(13): dt += 5 #if verbose: # print("... for time ", dt ,", index ", indx + i) if indx+i >= len(y): index_stop = deepcopy(i) break else: area_new = y[indx+i] area_prev = y[indx+i-1] #if verbose: # print("area px that will be grown ", area_current) # print("area forecasted ", area_new) # print("area forecasted prev ", area_prev) ###growth = sqrt(float(area_new)/float(area_current)) if area_new < 0 or len(area_new)==0 or len(area_prev)==0: if verbose: print("the cell is predicted to disappear") index_stop = deepcopy(i) break growth = sqrt(float(area_new)/float(area_prev)) #if verbose: # print("growing by ", growth) # print("dx ", dx) # print("dy ", dy) if verbose: print((indx + i, dt, area_new, growth)) #figure_labels(label_cell, outputDir, ttt, dt, area_plot="ccs4", add_name = "before") shifted_label = resize_array(label_cell, dx, dy, nx, ny) #figure_labels(shifted_label, outputDir, ttt, dt, area_plot="ccs4", add_name = "before_shifted") #quit() if verbose: print((" after shift ", sum(sum(shifted_label)))) if sum(sum(shifted_label))==0: #the cell is outside the domain break #center of mass before resizing center_before = ndimage.measurements.center_of_mass(shifted_label) center_before = np.rint(center_before) #if verbose: # print(" after shift ", sum(sum(shifted_label))) resized_label = scipy.misc.imresize(shifted_label,float(growth),'nearest') resized_label[resized_label >0] = 1 temp_label = np.zeros((nx,ny)) #after resizing, the array is larger/smaller than nx,ny --> create new array that contains all the label region if resized_label.shape[0]<nx: temp_label[0:resized_label.shape[0],0:resized_label.shape[1]] = deepcopy(resized_label) else: x_start = max(min(np.nonzero(resized_label)[0])-1,0) y_start = max(min(np.nonzero(resized_label)[1])-1,0) temp_label[0:min(nx,resized_label.shape[0]-x_start),0:min(ny,resized_label.shape[1]-y_start)] = deepcopy(resized_label[x_start:min(x_start+nx,resized_label.shape[0]),y_start:min(y_start+ny,resized_label.shape[1])]) #if verbose: # print(np.unique(temp_label)) # print(" after resize ", sum(sum(temp_label))) #figure_labels(resized_label, outputDir, ttt, dt, area_plot="ccs4", add_name = "before_shifted_resized") #center of mass after resizing center_after = ndimage.measurements.center_of_mass(temp_label) center_after = np.rint(center_after) dx_new,dy_new = center_before - center_after shifted_label = resize_array(temp_label,dx_new,dy_new, nx, ny) #if verbose: # print(" after shift2 ", sum(sum(shifted_label))) label_cell = np.zeros((nx,ny)) label_cell[0:,0:] = shifted_label[0:nx,0:ny] if label_cell.shape[0] != nx or label_cell.shape[1] != ny: print("incorrect size") quit() forecasted_labels["ID"+str(interesting_cell)].append(deepcopy(label_cell)) #indx+=1 label_cell = shifted_label #???????????????????????????????????? area_current = sum(sum(label_cell)) if verbose: print(("end ", area_current)) forecasted_areas.append(area_current) #add check to make sure the area you produced is more or less correct t_temp = deepcopy(ttt) forecasted_time = [] for gg in range(len(forecasted_areas)): forecasted_time.append(t_temp) t_temp+=timedelta(minutes = 5) """ if verbose: print("******** produce images") if False: t_composite = deepcopy(ttt) for i in range(min(len(y),index_stop)): yearSf, monthSf, daySf, hourSf, minSf = string_date(t_composite) contour_file = outputDir + "Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID"+str(interesting_cell)+".png" type_image = "_HRV" #background_file = "/data/COALITION2/PicturesSatellite//"+yearS+"-"+monthS+"-"+dayS+"/"+yearS+"-"+monthS+"-"+dayS+type_image+"_"+"ccs4"+"/MSG"+type_image+"-"+"ccs4"+"_"+yearS[2:]+monthS+dayS+hourS+minS+".png" background_file = "/data/COALITION2/PicturesSatellite/LEL_results_wind/"+yearS+"-"+monthS+"-"+dayS+"/RGB-HRV_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*.png" out_file1 = create_dir( outputDir+"/Contours/")+"Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID"+str(interesting_cell)+".png" if verbose: print("... create composite "+contour_file+" "+background_file+" "+out_file1) #subprocess.call("/usr/bin/composite "+contour_file+" "+background_file+" "+out_file1, shell=True) if verbose: print("... saved composite: display ", out_file1, " &") t_composite+=timedelta(minutes=5) """ """ if False: fig, ax = plt.subplots() ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M')) ax.plot_date(t, y, 'o',label="Fit and extrapolation") ax.plot_date(forecasted_time, forecasted_areas, '*',label="forecasted") ax.plot_date(t2, y2, '*', label="Observations") #ax.set_xlim([t[0]-timedelta(minutes = 5), t2[-1]+timedelta(minutes = 5)]) ax.set_ylabel("area") ax.legend(loc="best"); fig.savefig(yearS+monthS+dayS+"_"+hourS+minS+"_AreaInTime"+"ID"+str(interesting_cell)+".png") plt.close( fig) """ t_composite = deepcopy(ttt) # merge coalition2 file with if ForeGroundRGBFile is None: currentRGB_im_filename = "/opt/users/"+getpass.getuser()+"/PyTroll/scripts/Mecikalski/cosmo/Channels/indicators_in_time/RGB_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*ccs4.png" else: currentRGB_im_filename = ForeGroundRGBFile currentRGB_im = glob.glob(currentRGB_im_filename) if len(currentRGB_im)<1: print("No file found:", currentRGB_im_filename) # get background file if BackgroundFile is None: background_im_filename = '/data/COALITION2/PicturesSatellite/LEL_results_wind/'+yearS+'-'+monthS+'-'+dayS+'/RGB-HRV_dam/'+yearS+monthS+dayS+'_'+hourS+minS+'*.png' else: background_im_filename = BackgroundFile background_im = glob.glob(background_im_filename) if len(background_im)>0: im = plt.imread(background_im[0]) back_exists = True else: back_exists = False #img1 = Image.imread(currentRGB_im[0]) obj_area = get_area_def("ccs4") fig,ax = prepare_figure(obj_area) if in_msg.nrt == False: if back_exists: plt.imshow(np.flipud(im)) else: # now read the data we would like to forecast global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", ttt) #global_data_RGBforecast = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) # area we would like to read area2load = "EuropeCanary95" #"ccs4" #c2"#"ccs4" #in_windshift.ObjArea area_loaded = get_area_def(area2load )#(in_windshift.areaExtraction) # load product, global_data is changed in this step! area_loaded = load_products(global_data, ['IR_108'], in_msg, area_loaded ) data = global_data.project("ccs4") plt.imshow(np.flipud(data['IR_108'].data),cmap = pylab.gray()) # background file form function argument or default if BackgroundFile is None: background_im_filename = '/data/COALITION2/PicturesSatellite/LEL_results_wind/'+yearS+'-'+monthS+'-'+dayS+'/RGB-HRV_dam/'+yearS+monthS+dayS+'_'+hourS+minS+'*.png' else: if verbose: print("... BackgroundFile ", BackgroundFile) background_im_filename = BackgroundFile # searching background file (wildcards are possible) background_im = glob.glob(background_im_filename) if len(background_im) == 0: print("*** Error in plot_forecast_area (test_forecast.py)") print(" no background file found: ", background_im_filename) quit() elif len(background_im) > 1: print("*** Warning in plot_forecast_area (test_forecast.py)") print(" several background files found: ", background_im) # read background file im = plt.imread(background_im[0]) #img1 = Image.imread(currentRGB_im[0]) obj_area = get_area_def("ccs4") fig,ax = prepare_figure(obj_area) #plt.imshow(np.flipud(im)) # plot contour lines for all cells if at_least_one_cell: time_wanted_minutes = [5,20,40,60] time_wanted = [] color_wanted = [] vmax = 70 for t_want in time_wanted_minutes: time_wanted.append((t_want-5)/5) tmp = (mpl.cm.Blues(float(t_want)/vmax)) tmp1 = [tmp] color_wanted.append(tmp1) all_labels_in_time = np.zeros((nx,ny)) for i in range(len(time_wanted)-1,-1,-1): ind_time = time_wanted [i] for key, forc_labels in forecasted_labels.items(): #forecasted_labels["ID"+str(interesting_cell)]=[] if len(forc_labels)>ind_time: #plt.contour(np.flipud(forc_labels[ind_time]),[0.5],colors = color_wanted_cont[i]) #colors='w') # all_labels_in_time[forc_labels[ind_time]>0] = time_wanted_minutes[i] forc_labels_tmp = np.ma.masked_where(all_labels_in_time==0,all_labels_in_time) plt.contourf(np.flipud(forc_labels_tmp), cmap="Blues", vmin=0, vmax=vmax) if False: for i in range(len(time_wanted)): ind_time = time_wanted [i] for key, forc_labels in forecasted_labels.items(): #forecasted_labels["ID"+str(interesting_cell)]=[] if len(forc_labels)>ind_time: plt.contour(np.flipud(forc_labels[ind_time]),[0.5],colors = color_wanted[i]) #colors='w') # else: print("*** Warning, no COALITION2 cell detected ") print(" produce empty figure ...") PIL_image = fig2img ( fig ) standardOutputName = in_msg.standardOutputName.replace('%y%m%d%H%M',strftime('%y%m%d%H%M',ttt.timetuple())) #PIL_image.paste(img1, (0, 0), img1) if in_msg is None: PIL_image.save(create_dir(outputDir)+"Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+".png") path = (outputDir)+yearS+monthS+dayS+hourS+minS+"Forecast.png" else: # dic_figure={} # if in_msg.nrt == True: # dic_figure['rgb']= 'Forecast' #'C2rgbForecastTMP-IR-108' # else: # dic_figure['rgb']= 'Forecast-C2rgb' # dic_figure['area']='ccs4' # PIL_image.save(create_dir(outputFile)+standardOutputName%dic_figure) # path = (outputFile)+standardOutputName%dic_figure # if in_msg.nrt == False: # dic_figure={} # dic_figure['rgb']= 'C2rgb-Forecast-HRV' #'C2rgbForecastTMP-IR-108' # dic_figure['area']='ccs4' # path_output = (outputFile)+standardOutputName%dic_figure # print ("creating composite: ",currentRGB_im[0],"+",path) # subprocess.call("/usr/bin/composite "+currentRGB_im[0]+" "+path+" "+path_output, shell=True) #print ("... display ",path_output," &") #dic_figure={} #dic_figure['rgb']= 'Forecast' #'C2rgbForecastTMP-IR-108' #dic_figure['area']='ccs4' outputFile = format_name(create_dir(outputDir)+in_msg.outputFile, ttt, rgb='Forecast', area='ccs4', sat_nr=int(in_msg.sat_nr)) #PIL_image.save(create_dir(outputDir)+in_msg.outputFile%dic_figure) PIL_image.save(outputFile) #path = (outputDir)+in_msg.outputFile%dic_figure path = outputFile print("... display ",path," &") plt.close( fig) if True: if verbose: print("path foreground", currentRGB_im[0]) if in_msg is None: path_composite = (outputFile)+yearS+monthS+dayS+"_Obs"+hourS+minS+"Forecast_composite.png" else: # dic_figure={} # dic_figure['rgb']='C2rgb-Forecast-HRV' # dic_figure['area']='ccs4' # path_composite = (outputFile)+standardOutputName%dic_figure #dic_figure = {} #dic_figure['rgb'] = "_HRV" #'IR-108' #dic_figure['area']='ccs4' #path_IR108 = (outputFile)+standardOutputName%dic_figure #dic_figure={} #dic_figure['rgb'] = 'C2rgbForecast-IR-108' #dic_figure['area'] = 'ccs4' #path_composite = (outputDir) + in_msg.outputFile%dic_figure path_composite = format_name( outputDir+in_msg.outputFile, ttt, rgb='C2rgbForecast-IR-108', area='ccs4', sat_nr=int(in_msg.sat_nr)) #dic_figure = {} #dic_figure['rgb'] = 'IR-108' #dic_figure['area']='ccs4' #path_IR108 = (outputDir) + in_msg.outputFile%dic_figure path_IR108 = format_name( outputDir+in_msg.outputFile, ttt, rgb='IR-108', area='ccs4', sat_nr=int(in_msg.sat_nr)) if in_msg.nrt == True: if verbose: print("---starting post processing") #if area in in_msg.postprocessing_areas: in_msg.postprocessing_composite = deepcopy(in_msg.postprocessing_composite2) postprocessing(in_msg, ttt, in_msg.sat_nr, "ccs4") #print ("... display",path_composite,"&") if in_msg.scpOutput and in_msg.nrt == True and False: #not necessary because already done within postprocessing print("... secure copy "+path_composite+ " to "+in_msg.scpOutputDir) # subprocess.call("scp "+in_msg.scpID+" "+path_composite +" "+in_msg.scpOutputDir+" 2>&1 &", shell=True) #BackgroundFile # if False: for i in range(12): contour_files = glob.glob(outputDir + "Forecast"+yearS+monthS+dayS+"_Obs"+hourS+minS+"_Forc"+hourSf+minSf+"_ID*.png") if verbose: print(("Files found: ",contour_files)) if len(contour_files)>0: background_file = "/data/COALITION2/PicturesSatellite/LEL_results_wind/"+yearS+"-"+monthS+"-"+dayS+"/RGB-HRV_dam/"+yearS+monthS+dayS+"_"+hourS+minS+"*.png" out_file1 = create_dir( outputDir+"/Contours/")+"Obs"+hourS+minS+"_Forc"+hourSf+minSf+".png" t_composite+=timedelta(minutes=5) ttt += timedelta(minutes = 5)
# read HRW wind vectors hrw_data = read_HRW("meteosat", sat_nr, "seviri", time_slot, ntimes, \ min_correlation=min_correlation, min_conf_nwp=min_conf_nwp, \ min_conf_no_nwp=min_conf_no_nwp, cloud_type=cloud_type) # now read the data we would like to forecast global_data = GeostationaryFactory.create_scene( in_msg.sat, str(in_msg.sat_nr).zfill(2), "seviri", time_slot) # area we would like to read area_loaded = get_area_def("EuropeCanary95") # load product area_loaded = load_products(global_data, [rgb], in_msg, area_loaded) #pge = get_NWC_pge_name(rgb) #print '...load satellite channel ', pge #global_data.load([pge], reader_level="seviri-level3") #if rgb=='CTT': # nwcsaf_calibrate=False #else: # nwcsaf_calibrate=False #convert_NWCSAF_to_radiance_format(global_data, area, rgb, nwcsaf_calibrate, True) #from trollimage.image import Image as trollimage #min_data = 0. #max_data = float(len(global_data[rgb].palette)-1) #if in_msg.verbose:
if len(RGBs) > 0: # exit loop, if input is found break else: # else wait 20s and try again import time time.sleep(25) print("*** read CTP for ", in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", str(time_slot)) global_data_CTP = GeostationaryFactory.create_scene( in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, in_msg.sat_nr_str(), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) #area_loaded = get_area_def("EuropeCanary95") #(in_windshift.areaExtraction) area_loaded_CTP = load_products(global_data_CTP, ['CTP'], in_msg, get_area_def("alps95")) data_CTP = global_data_CTP.project(area, precompute=True) [nx, ny] = data_CTP['CTP'].data.shape # read all rgbs print("*** read all other channels for ", in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", str(time_slot)) global_data = GeostationaryFactory.create_scene( in_msg.sat_str(), in_msg.sat_nr_str(), "seviri", time_slot) #global_data_CTP = GeostationaryFactory.create_scene(in_msg.sat, str(10), "seviri", time_slot) area_loaded = get_area_def( "EuropeCanary95") #(in_windshift.areaExtraction) area_loaded = load_products(global_data, rgbs, in_msg, area_loaded) data = global_data.project(area, precompute=True)