def cloud_mask_mw(i): """ Moving window cloud masking to be used by multiprocessing :return: """ #print '1' date = oneday_datetimes[i] window_datetime_lower = datetime.datetime(year_lower, month_lower, day_lower, hour_lower, minute_lower) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(year_upper, month_upper, day_upper, hour_upper, minute_upper) \ + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, date.hour, date.hour, date.minute, date.minute]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) bt_15day = np.zeros((datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0])) #print '2' # bt_15day[:] = np.nan cloudscreenedbt_15day = np.zeros((datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0])) # cloudscreenedbt_15day[:] = np.nan f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/' 'cloud_masked_bt_15d_' + oneday_datetimes[i].strftime( "%Y_%H_%M") + '.hdf', 'w') atom = tables.Atom.from_dtype(cloudscreenedbt_15day.dtype) filters = tables.Filters(complib='blosc', complevel=5) cs = f.create_carray(f.root, 'data', atom, cloudscreenedbt_15day.shape, filters=filters) pickup = False pickup_i = 0 #print '3' if pickup == False: g = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + oneday_datetimes[i].strftime( "%Y_%H_%M") + '.hdf', 'w') atom = tables.Atom.from_dtype(bt_15day.dtype) filters = tables.Filters(complib='blosc', complevel=5) bts = g.create_carray(g.root, 'data', atom, bt_15day.shape, filters=filters) else: g = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + oneday_datetimes[i].strftime( "%Y_%H_%M") + '.hdf', 'a') #print '4 - loop starting' # Loop through each day of the time window for this time of day for j in np.arange(pickup_i, len(datetimes_7dayw)): #for j in np.arange(pickup_i, pickup_i+1): #for j in np.arange(pickup_i, 1): print str(oneday_datetimes[i].hour) + str( oneday_datetimes[i].minute), 'day ', j+1 date_w = datetimes_7dayw[j] #print '5' # Extract BT data for this timestep filename = '/ouce-home/projects/seviri_dust/raw_seviri_data/bt_nc/' \ + str(date_w.strftime( '%B')) + str(date_w.year) + \ '/BT_' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d'))+ '.nc' try: btdata = Dataset(filename, 'r') except: print 'Found no BT data for ' + filename continue #print '6' # Extract cloud mask data for this timestep cloudfilename = \ '/ouce-home/projects/seviri_dust/raw_seviri_data/cloudmask_nc/' \ + str(date_w.strftime('%B')) + str(date_w.year) +\ '/cloudmask_' \ + str(date_w.strftime('%Y')) \ + str(date_w.strftime('%m')) \ + str(date_w.strftime('%d')) + '.nc' try: clouddata = Dataset(cloudfilename, 'r') except: print 'Found no cloud data for ' + cloudfilename continue #print '7' # Apply cloud screening cloudscreenedbt_15day_array, bt087, bt108, \ bt120 = sdf.cloud_screen_daily(btdata, clouddata, date_w) regcs_array = np.zeros((len(cloudscreenedbt_15day_array), ianlats.shape[ 0], ianlons.shape[0])) #print '8' for k in np.arange(0, len(cloudscreenedbt_15day_array)): regcs = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats, cloudscreenedbt_15day_array[k], mesh=False) regcs_array[k] = regcs #print '9' regbt087 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats, bt087, mesh=False) regbt108 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats, bt108, mesh=False) regbt120 = pinkdust.regrid_data(tchlons, tchlats, ianlons, ianlats, bt120, mesh=False) #print '10' cs[j] = regcs_array bts[j, 0] = regbt087 bts[j, 1] = regbt108 bts[j, 2] = regbt120 btdata.close() #print g.root.data[0] #print f.root.data[0] #print '11' # Save cloud masked data for this time of day to file f.close() # Save cloud masked data for this time of day to file g.close()
def sdf_wrapper(i): print SDF_datetimes[i] # Read in the cloud mask data for that time of day from file # Remove the file after use as it is no longer needed # print 'here 1' window_datetime_lower = datetime.datetime(year_lower, month_lower, day_lower, hour_lower, minute_lower) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(year_upper, month_upper, day_upper, hour_upper, minute_upper) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = SDF_datetimes[i] - datetime.timedelta(days=7) BT_15_day_upper_bound = SDF_datetimes[i] + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, SDF_datetimes[i].hour, SDF_datetimes[i].hour, SDF_datetimes[i].minute, SDF_datetimes[i].minute]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/' 'cloud_masked_bt_15d_' + SDF_datetimes[i].strftime( "%Y_%H_%M") + '.hdf') # arrobj = f.get_node('/data') # Take out indices since pytables doesn't seem to support fancy # indexing with booleans... indices = np.arange(0, len(datetimes_7dayw)) lower_ind = datetimes_7dayw == BT_15_day_lower_bound lower_ind = indices[lower_ind][0] upper_ind = datetimes_7dayw == BT_15_day_upper_bound upper_ind = indices[upper_ind][0] current_ind = datetimes_7dayw == SDF_datetimes[i] current_ind = indices[current_ind][0] BT_15_days_screened = f.root.data[lower_ind:upper_ind] BT_screened = f.root.data[current_ind] # cloudscreenedbt_15day = arrobj.read() # print 'here 3' f.close() f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + SDF_datetimes[i].strftime( "%Y_%H_%M") + '.hdf') arrobj = f.get_node('/data') bt_15day = f.root.data[current_ind] # bt_15day = arrobj.read() f.close() # print 'here 4' # BT_15_days_screened = \ # cloudscreenedbt_15day[np.asarray([j >= BT_15_day_lower_bound # and j <= BT_15_day_upper_bound # for j in datetimes_7dayw])] # print 'here 5' # Get the mean of the 15 day window of cloud screened data cloudscreenedbt_15daymean_108_87 = \ sdf.extract_15day_mean(BT_15_days_screened) dust_mask_108, \ dust_mask_108_87, \ dust_mask_120_108, \ dust_mask_108_87_anom_screened = \ sdf.generate_dust_mask( bt_15day, BT_screened, cloudscreenedbt_15daymean_108_87) ofilename = '/soge-home/projects/seviri_dust/sdf/'+SDF_datetimes[ i].strftime("%B%Y")+'/SDF_v2.' + \ SDF_datetimes[i].strftime( "%Y%m%d%H%M") + '.nc' SDF = sdf.generate_SDF(dust_mask_108, dust_mask_108_87, dust_mask_120_108, dust_mask_108_87_anom_screened, ofilename, SDF_datetimes[i], ianlats, ianlons)
day_upper, hour_upper, minute_upper) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = SDF_datetimes[i] - datetime.timedelta(days=7) BT_15_day_upper_bound = SDF_datetimes[i] + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, SDF_datetimes[i].hour, SDF_datetimes[i].hour, SDF_datetimes[i].minute, SDF_datetimes[i].minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/' 'cloud_masked_bt_15d_' + SDF_datetimes[i].strftime("%Y_%H_%M") + '.hdf') #arrobj = f.get_node('/data') # Take out indices since pytables doesn't seem to support fancy # indexing with booleans... indices = np.arange(0, len(datetimes_7dayw)) lower_ind = datetimes_7dayw == BT_15_day_lower_bound lower_ind = indices[lower_ind][0] upper_ind = datetimes_7dayw == BT_15_day_upper_bound upper_ind = indices[upper_ind][0] current_ind = datetimes_7dayw == SDF_datetimes[i]
def cloud_mask_mw(i): """ Moving window cloud masking to be used by multiprocessing :return: """ date = oneday_datetimes[i] window_datetime_lower = datetime.datetime(year_lower, month_lower, day_lower, hour_lower, minute_lower) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(year_upper, month_upper, day_upper, hour_upper, minute_upper) \ + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, date.hour, date.hour, date.minute, date.minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects(time_params_7dayw) bt_15day = np.zeros( (datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0])) #bt_15day[:] = np.nan cloudscreenedbt_15day = np.zeros( (datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0])) #cloudscreenedbt_15day[:] = np.nan f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/' 'cloud_masked_bt_15d_' + oneday_datetimes[i].strftime("%Y_%H_%M") + '.hdf', 'w') atom = tables.Atom.from_dtype(cloudscreenedbt_15day.dtype) filters = tables.Filters(complib='blosc', complevel=5) cs = f.create_carray(f.root, 'data', atom, cloudscreenedbt_15day.shape, filters=filters) g = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + oneday_datetimes[i].strftime("%Y_%H_%M") + '.hdf', 'w') atom = tables.Atom.from_dtype(bt_15day.dtype) filters = tables.Filters(complib='blosc', complevel=5) bts = g.create_carray(g.root, 'data', atom, bt_15day.shape, filters=filters) # Loop through each day of the time window for this time of day for j in np.arange(0, len(datetimes_7dayw)): print str(oneday_datetimes[i].hour) + str( oneday_datetimes[i].minute), 'day ', j + 1 date_w = datetimes_7dayw[j] # Extract BT data for this timestep filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \ 'native/bt/nc' \ '/' + str(date_w.strftime( '%B').upper()) + str(date_w.year) + \ '/H-000-MSG2__-MSG2________-' \ 'IR_BrightnessTemperatures___-000005___-' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d')) + str(date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '-__.nc' try: btdata = Dataset(filename, 'r') except: print 'Found no BT data for ' + filename continue # Extract cloud mask data for this timestep cloudfilename = '/ouce-home/data/satellite/meteosat/' \ 'seviri/15-min/' \ 'native/cloudmask/nc' \ '/' + str(date_w.strftime('%B').upper()) \ + str(date_w.year) + '_CLOUDS/eumetsat.cloud.' \ + str(date_w.strftime('%Y')) \ + str(date_w.strftime('%m')) \ + str(date_w.strftime('%d')) + str( date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '.nc' try: clouddata = Dataset(cloudfilename, 'r') except: print 'Found no cloud data for ' + cloudfilename continue # Apply cloud screening cloudscreenedbt_15day_array, bt087, bt108, \ bt120 = sdf.cloud_screen(btdata, clouddata) regcs_array = np.zeros((len(cloudscreenedbt_15day_array), ianlats.shape[0], ianlons.shape[0])) for k in np.arange(0, len(cloudscreenedbt_15day_array)): regcs = pinkdust.regrid_data(lons, lats, ianlons, ianlats, cloudscreenedbt_15day_array[k], mesh=False) regcs_array[k] = regcs regbt087 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, bt087, mesh=False) regbt108 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, bt108, mesh=False) regbt120 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, bt120, mesh=False) cs[j] = regcs_array bts[j, 0] = regbt087 bts[j, 1] = regbt108 bts[j, 2] = regbt120 btdata.close() # Save cloud masked data for this time of day to file f.close() # Save cloud masked data for this time of day to file g.close()
def wrapper(yearmonth): year_lower = yearmonth[0] year_upper = yearmonth[0] month_lower = yearmonth[-1][0] month_upper = yearmonth[-1][-1] day_lower = 1 day_upper = 31 hour_lower = 0 hour_upper = 23 minute_lower = 0 minute_upper = 45 time_params = np.array([ year_lower, year_upper, month_lower, month_upper, day_lower, day_upper, hour_lower, hour_upper, minute_lower, minute_upper ]) # Get datetime objects between the above bounds window_datetime_lower = datetime.datetime(year_lower, 6, 1, 0, 0) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(year_upper, 8, 31, 23, 45) \ + datetime.timedelta(days=7) datetimes = utilities.get_datetime_objects(time_params) datestrings = [j.strftime("%Y%m%d%H%M") for j in datetimes] # Get lats and lons sdf_test = Dataset( '/soge-home/data_not_backed_up/satellite/meteosat/seviri' '/15-min/0.03x0.03/sdf/nc/JUNE2010/SDF_v2/SDF_v2.' '201006031500.nc') lons, lats = np.meshgrid(sdf_test.variables['longitude'][:], sdf_test.variables['latitude'][:]) lonmask = lons > 360 latmask = lats > 90 lons = np.ma.array(lons, mask=lonmask) lats = np.ma.array(lats, mask=latmask) sdf_previous = None raw_sdf_prev = [] clouds_previous = None ids_previous = [] cloud_ids_previous = [] deep_conv_IDs_prev = None LLJ_plumes_IDs_prev = [] k = 0 available_colours = np.arange(0, 41) # To pick back up where you left off, simply add all the keys in plume # archive to used IDs here - perhaps include an option to do that used_ids = [] used_cloud_ids = [] used_colour_IDs = {} plume_objects = [] flicker_ids = [] reintroduced_ids = [] last_10_ids = [] last_10_ids = np.asarray(last_10_ids) prev_dust_assoc_clouds = None # Restrict the lons and lats to the CWS alone lonbool = np.asarray([j >= -20 and j <= 10 for j in lons[0]]) if run: plume_objects = {} plume_archive = shelve.open( '/soge-home/projects/seviri_dust/plumetracker/' 'plume_archive_flicker_v4_prob_v4_' '' + str(yearmonth[0])) if pickup: archived_ids = np.asarray([j for j in plume_archive]) for i in archived_ids: used_ids.append(int(i)) with open('date_i_' + str(yearmonth[0]) + '.txt', 'r') as f: pickup_date_i = f.read() datestrings = datestrings[int(pickup_date_i):] datetimes = datetimes[int(pickup_date_i):] for date_i in np.arange(0, len(datestrings)): runtime = datetimes[date_i] - datetimes[0] print '\n' + datestrings[date_i] + '\n' totaltest = datetime.datetime.now() if tch_sdfs == False: sdf_root = '/soge-home/data_not_backed_up/satellite/meteosat' \ '/seviri/15' \ '-min/0.03x0.03/sdf/nc/'+datetimes[date_i].strftime( '%B').upper()+str(datetimes[date_i].year)+'/SDF_v2/' else: sdf_root = '/soge-home/projects/seviri_dust/sdf/'\ +datetimes[date_i].strftime('%B')\ +str(datetimes[date_i].year)+'/' if os.path.isfile(sdf_root+'SDF_v2.' + \ datestrings[date_i] + '.nc'): sdf = Dataset( sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc') found_file = True #print sdf else: print 'No SDF file found for this date' found_file = False if daily_bt_files: # Pull BTs from daily nc files bt = Dataset(daily_bt_root + datetimes[date_i].strftime("%B%Y") + '/BT_' + datetimes[date_i].strftime("%Y%m%d") + '.nc') found_file = True else: try: bt = Dataset( '/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True #print bt12.shape #print clouds_now.shape #cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats) except: if os.path.isfile( '/ouce-home/data/satellite/meteosat/seviri/' '15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset( '/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True else: found_file = False if found_file: # If the SDF file has its own longitudes and latitudes, # use these rather than the preset if 'longitude' in sdf.variables: lons = sdf.variables['longitude'][:] lonmask = lons > 360 lons = np.ma.array(lons, mask=lonmask) if 'latitude' in sdf.variables: lats = sdf.variables['latitude'][:] latmask = lats > 90 lats = np.ma.array(lats, mask=latmask) lons, lats = np.meshgrid(lons, lats) # clouds_now = np.load('/ouce-home/students/hert4173/' # 'cloud_mask_numpy_files/cloudmask' # ''+datetimes[date_i] # .strftime( # "%Y%m%d%H%M%S")+'.npy') # Some SDF files have a time dimension. For these index it out. if tch_sdfs: sdf_now = sdf.variables['SDF'][:] elif 'time' in sdf.variables: sdf_now = sdf.variables['bt108'][0] else: sdf_now = sdf.variables['bt108'][:] if daily_bt_files: # We should get data from the HDF files instead time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, datetimes[date_i].hour, datetimes[date_i].hour, datetimes[date_i].minute, datetimes[date_i].minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) indices = np.arange(0, len(datetimes_7dayw)) current_ind = datetimes_7dayw == datetimes[date_i] current_ind = indices[current_ind][0] f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + datetimes[date_i].strftime("%Y_%H_%M") + '.hdf') bt_15day = f.root.data[current_ind] f.close() # We need to extract the right timestep from the daily # nc file bt_108 = bt_15day[1] # Instead get BTs from the intermediary hdf files ( # already regridded) elif 'time' in bt.variables: bt_108 = bt.variables['bt108'][:][0] else: bt_108 = bt.variables['bt108'][:] clouds = bt_108 < 270 # Add the reintroduced plumes back into the running for i in np.arange(0, len(reintroduced_ids)): #print 'Reintroducing plume', reintroduced_ids[i] sdf_previous[sdf_previous == flicker_ids[i]] = \ reintroduced_ids[i] ids_previous = np.append(ids_previous, reintroduced_ids[i]) # Get plumes first by scanning for them sdf_plumes, new_ids, plume_ids, merge_ids = plumes.\ scan_for_plumes( sdf_now, sdf_previous, used_ids, clouds) for i in np.arange(0, len(reintroduced_ids)): if reintroduced_ids[i] not in plume_ids: #print 'But plume '+str(reintroduced_ids[i])+' sadly ' \ # 'died.' pass # Clear flicker IDs and reintroduced IDs flicker_ids = [] reintroduced_ids = [] # We could here do infilling, then at least you'll have it for # the next iteration. But if you've labelled them already you # don't have a binary. # Get clouds by scanning for them # clouds, new_cloud_ids, cloud_ids, merge_cloud_ids = \ # plumes.scan_for_plumes(clouds_now, # clouds_previous, # used_cloud_ids) for i in new_ids: used_ids.append(i) # for i in new_cloud_ids: # used_cloud_ids.append(i) old_bool = np.asarray([j in ids_previous for j in plume_ids]) if len(old_bool) > 0: old_ids = plume_ids[old_bool] else: old_ids = [] # old_cloud_bool = np.asarray([j in clouds_previous for # j in cloud_ids]) # old_cloud_ids = cloud_ids[old_cloud_bool] # Then, for each new ID, we initialise plume objects for i in np.arange(0, len(new_ids)): if debug: print 'Creating new plume', new_ids[i] plume = plumes.Plume(new_ids[i], datetimes[date_i]) plume.update_position(lats, lons, sdf_plumes, new_ids[i]) plume.update_duration(datetimes[date_i]) #if plume.plume_id == 18: # print plume.dates_observed # print plume.dates_observed plume.update_bbox() plume.update_majorminor_axes(lons, lats) plume.update_area() plume.update_max_extent() plume.update_centroid_speed() plume.update_centroid_direction() plume.check_conv_distance(lats, lons, clouds) plume.update_most_likely_source() # plume.update_leading_edge_4(sdf_plumes, lons, lats) plume_objects[str(new_ids[i])] = plume if flicker: missing_plume, missing_id, flickered = \ plume.chain_flickerchecker( raw_sdf_prev) else: flickered = False missing_plume = [] if flickered: raise ValueError('Found an overlaping plume in the ' 'previous timestep larger than size ' '250 - a new plume should not be ' 'initiated here') steps_back = 1 # As long as there is an overlapping previous plume, # keep updating it back in time while len(missing_plume) > 0: if debug: print 'Rolling back plume', new_ids[i] # We can only step back to the first timestep and no # earlier if (date_i - steps_back) < 0: missing_plume = [] break else: missing_date = datetimes[date_i - steps_back] missing_sdf_plumes = np.zeros(missing_plume.shape) missing_plume = missing_plume == 1 missing_sdf_plumes[missing_plume] = missing_id # Run all the updates that would be used for a # new plume plume.update_position(lats, lons, missing_sdf_plumes, missing_id) plume.update_duration(missing_date) #if plume.plume_id == 18: # print plume.dates_observed plume.update_bbox() plume.update_majorminor_axes(lons, lats) plume.update_area() plume.update_max_extent() plume.update_centroid_speed() plume.update_centroid_direction() plume.check_conv_distance(lats, lons, clouds) plume.update_most_likely_source() plume.process_missing_plume() #print 'Updated missing plume back '+str( # steps_back)+' steps' steps_back += 1 if (date_i - steps_back) < 0: missing_plume = [] break # Pull out data from the timestep before to # continue the chain try: if tch_sdfs: raw_sdf_prev_prev_data = Dataset(sdf_root+'SDF_v2.' + \ datestrings[date_i-steps_back] + '.nc') else: raw_sdf_prev_prev_data = Dataset( '/soge-home/data_not_backed_up/satellite/' 'meteosat/' 'seviri/' '15-min/0.03x0.03/sdf/nc/' + datetimes[date_i-steps_back].strftime( "%B").upper( ) + str(datetimes[date_i-steps_back].year) + '/SDF_v2/SDF_v2.' + \ datestrings[date_i-steps_back] + '.nc') except: print 'Adding date to list of missing dates' with open('missing_dates.txt', 'a') as my_file: my_file.write('\n' + datestrings[date_i - steps_back]) break if tch_sdfs: raw_sdf_prev_prev = \ raw_sdf_prev_prev_data.variables['SDF'][:] elif 'time' in raw_sdf_prev_prev_data.variables: raw_sdf_prev_prev = \ raw_sdf_prev_prev_data.variables[ 'bt108'][0] else: raw_sdf_prev_prev = \ raw_sdf_prev_prev_data.variables[ 'bt108'][:] missing_plume, missing_id, flickered = \ plume.chain_flickerchecker( raw_sdf_prev_prev) if flickered: #print 'Found a flickered plume. Searching ' \ # 'for the corresponding archived plume.' # We have a plume in a previous timestep # which flickered plume_archive_keys = last_10_ids.astype(int) # Sort the keys in reverse order as the # plume we want is most likely to have a # high ID plume_archive_keys[::-1].sort() missing_plume = missing_plume == 1 missing_sdf_plumes = np.zeros( missing_plume.shape) missing_sdf_plumes[missing_plume] = missing_id plume_bool = missing_sdf_plumes == missing_id search_centroid_lon = \ np.nanmean(lons[plume_bool]) search_centroid_lat = \ np.nanmean(lats[plume_bool]) plume_lons = lons[plume_bool] plume_lats = lats[plume_bool] search_date = datetimes[date_i - steps_back] #print search_date found_plume = False for key in plume_archive_keys: #print 'Searching in plume archive' if search_centroid_lon == \ plume_archive[\ str(key)].centroid_lon and \ search_centroid_lat ==\ plume_archive[str( key)].centroid_lat and \ plume_archive[str( key)].dates_observed[-1] == \ search_date: #print 'Found it in plume archive. ' \ # 'ID is', \ # plume_archive[str(key)].plume_id found_plume = True correct_plume = plume_archive[str(key)] plume_to_append = plume # Append the flickered plume to the # old one which was archived correct_plume.append_missing_plume( plume_to_append) # Add it to plume objects and remove # it from archives plume_objects[str(key)] = correct_plume del plume_archive[str(key)] # Add it to old IDs, replacing the # ID of the plume which was found to be # flickered flicker_ids.append(plume.plume_id) reintroduced_ids.append(key) missing_plume = [] # Reintroduced plumes also get removed # from the record of the last 10 ids index = np.argwhere(last_10_ids == key) last_10_ids = np.delete( last_10_ids, index) break # If we didn't find the plume in the plume # archive, it must still be active if found_plume == False: plume_object_keys = np.asarray( [int(i) for i in plume_objects]) # Sort the keys in reverse order as the # plume we want is most likely to have a # high ID plume_object_keys[::-1].sort() for key in plume_object_keys: #print 'Searching in plume objects' if search_centroid_lon == \ plume_objects[ \ str( key)].centroid_lon and \ search_centroid_lat == \ plume_objects[str( key)].centroid_lat and \ plume_objects[str( key)].dates_observed[ -1] == \ search_date: #print 'Found it in plume ' \ # 'objects. ID is', \ # plume_archive[ # str(key)].plume_id found_plume = True correct_plume = plume_archive[str( key)] plume_to_append = plume # Append the flickered plume to the # old one which was archived correct_plume.append_missing_plume( plume_to_append) # Add it to plume objects and # remove it from archives plume_objects[str( key)] = correct_plume del plume_archive[str(key)] # Add it to old IDs, replacing the # ID of the plume which was found # to be flickered flicker_ids.append(plume.plume_id) reintroduced_ids.append(key) missing_plume = [] index = np.argwhere( last_10_ids == key) last_10_ids = np.delete( last_10_ids, index) break break # Remove any IDs which were actually flickers for i in np.arange(0, len(flicker_ids)): index = np.argwhere(new_ids == flicker_ids[i]) new_ids = np.delete(new_ids, index) index = np.argwhere(ids_previous == flicker_ids[i]) ids_previous = np.delete(ids_previous, index) index = np.argwhere(plume_ids == flicker_ids[i]) plume_ids = np.delete(plume_ids, index) del plume_objects[str(flicker_ids[i])] #For merged IDs, we move the tracks to pre-merge tracks for i in np.arange(0, len(merge_ids)): plume = plume_objects[str(merge_ids[i])] plume.merge() # For old IDs, we just run an update. for i in np.arange(0, len(old_ids)): if debug: print 'Updating plume', old_ids[i] plume = plume_objects[str(old_ids[i])] #if plume.plume_id == 2: # print plume.dates_observed plume.update_position(lats, lons, sdf_plumes, old_ids[i]) plume.update_duration(datetimes[date_i]) #if plume.plume_id == 18: # print plume.dates_observed plume.update_bbox() plume.update_majorminor_axes(lons, lats) plume.update_area() plume.update_centroid_speed() plume.update_centroid_direction() plume.update_max_extent() #plume_assoc_clouds, plume_check_bool \ # = plume.flag_dust_associated_convection( # clouds, prev_dust_assoc_clouds) #dust_assoc_clouds += plume_assoc_clouds #check_bool += plume_check_bool.astype(int) # plume.update_leading_edge_4(sdf_plumes, lons, lats) plume_objects[str(old_ids[i])] = plume # Plumes which no longer exist are removed and archived if len(ids_previous) == 0: removed_ids = [] else: removed_bool = np.asarray( [j not in plume_ids for j in ids_previous]) removed_ids = ids_previous[removed_bool] for i in np.arange(0, len(removed_ids)): if debug: print 'Archiving plume', removed_ids[i] plume = plume_objects[str(removed_ids[i])] plume.update_GPE_speed() # plume.update_mechanism_likelihood() plume.update_mean_axis_offset() plume.update_llj_probability(trace) plume_archive[str(removed_ids[i])] = plume del plume_objects[str(removed_ids[i])] last_10_ids = np.append(last_10_ids, removed_ids[i]) if len(last_10_ids) > 10: last_10_ids = last_10_ids[0:10] if len(np.unique(sdf_plumes)) < 2: sdf_previous = None ids_previous = [] raw_sdf_prev = [] else: sdf_previous = sdf_plumes ids_previous = plume_ids raw_sdf_prev = sdf_now else: print 'Adding date to list of missing dates' with open('missing_dates.txt', 'a') as my_file: my_file.write('\n' + datestrings[date_i]) # Print date_i so we know where we got up to in case we have to # restart with open('date_i_' + str(yearmonth[0]) + '.txt', 'w') as f: f.write('%d' % date_i) # After the script has finished, add remaining plumes to the plume archive for i in plume_objects: plume_archive[i] = plume_objects[i] plume_archive.close()
days=7) window_datetime_upper = datetime.datetime(year_upper, month_upper, day_upper, hour_upper, minute_upper) \ + datetime.timedelta( days=7) # Get datetime objects between the above bounds time_params = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, window_datetime_lower.hour, window_datetime_upper.hour, window_datetime_lower.minute, window_datetime_upper.minute ]) datetimes = utilities.get_daily_datetime_objects(time_params) datestrings = [j.strftime("%Y%m%d%H%M") for j in datetimes] # Get datetime objects without the 7 days on the ends time_params = np.array([ year_lower, year_upper, month_lower, month_upper, day_lower, day_upper, hour_lower, hour_upper, minute_lower, minute_upper ]) SDF_datetimes = utilities.get_datetime_objects(time_params) SDF_datestrings = [j.strftime("%Y%m%d%H%M") for j in SDF_datetimes] print 'Obtaining datetimes for the chosen time window:',\ datetime.datetime.now() - test1 test2 = datetime.datetime.now()
def cloud_mask_mw(i, datetimes, oneday_datetimes, ianlons, ianlats): """ Moving window cloud masking to be used by multiprocessing :return: """ date = oneday_datetimes[i] window_datetime_lower = datetime.datetime(datetimes[0].year, datetimes[0].month, datetimes[0].day, datetimes[0].hour, datetimes[0].minute) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(datetimes[-1].year, datetimes[-1].month, datetimes[-1].day, datetimes[-1].hour, datetimes[-1].minute) \ + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, date.hour, date.hour, date.minute, date.minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects(time_params_7dayw) bt_15day = np.zeros( (datetimes_7dayw.shape[0], 3, ianlats.shape[0], ianlons.shape[0])) print str(oneday_datetimes[i].hour) + '_' + str(oneday_datetimes[i].minute) g = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + str(oneday_datetimes[i].year) + '_' + str(oneday_datetimes[i].hour) + '_' + str(oneday_datetimes[i].minute) + '.hdf', 'w') atom = tables.Atom.from_dtype(bt_15day.dtype) filters = tables.Filters(complib='blosc', complevel=5) bts = g.create_carray(g.root, 'data', atom, bt_15day.shape, filters=filters) # Loop through each day of the time window for this time of day for j in np.arange(0, len(datetimes_7dayw)): date_w = datetimes_7dayw[j] # Extract BT data for this timestep filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \ '0.03x0.03/bt/nc' \ '/' + str(date_w.strftime( '%B').upper()) + str(date_w.year) + \ '/H-000-MSG2__-MSG2________-' \ 'IR_BrightnessTemperatures___-000005___-' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d')) + str(date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '-__.nc' if os.path.isfile(filename): btdata = Dataset(filename, 'r') else: filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \ '0.03x0.03/bt/nc' \ '/' + str(date_w.strftime( '%B').upper()) + str(date_w.year) + \ '/H-000-MSG1__-MSG1________-' \ 'IR_BrightnessTemperatures___-000005___-' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d')) + str(date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '-__.nc' if os.path.isfile(filename): btdata = Dataset(filename, 'r') else: filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \ '0.03x0.03/bt/nc' \ '/SEPT' + str(date_w.year) + \ '/H-000-MSG1__-MSG1________-' \ 'IR_BrightnessTemperatures___-000005___-' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d')) + str(date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '-__.nc' if os.path.isfile(filename): btdata = Dataset(filename, 'r') else: filename = '/ouce-home/data/satellite/meteosat/seviri/15-min/' \ '0.03x0.03/bt/nc' \ '/SEPT' + str(date_w.year) + \ '/H-000-MSG2__-MSG2________-' \ 'IR_BrightnessTemperatures___-000005___-' + str( date_w.strftime('%Y')) + str(date_w.strftime('%m')) + \ str(date_w.strftime('%d')) + str( date_w.strftime('%H')) \ + str(date_w.strftime('%M')) + '-__.nc' if os.path.isfile(filename): btdata = Dataset(filename, 'r') else: print 'Found no BT data for ' + filename bts[j, :] = np.nan continue bt087 = btdata.variables['bt087'][:][0] bt108 = btdata.variables['bt108'][:][0] bt120 = btdata.variables['bt120'][:][0] #bt087 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, # bt087, mesh=False) #bt108 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, # bt108, mesh=False) #bt120 = pinkdust.regrid_data(lons, lats, ianlons, ianlats, # bt120, mesh=False) bts[j, 0] = bt087 bts[j, 1] = bt108 bts[j, 2] = bt120 btdata.close() # Save cloud masked data for this time of day to file g.close()
def detect_cpo(btdiff_2_anom_prev, btdiff_2_anom_prev_2, btdiff_2_anom_prev_3, datetimes, datestrings, date_i, lons, lats, cloud_lons, cloud_lats, daily_clouds=False, double_digits=False, mesh=False, daily_bt=False): used_ids = [] runtime = datetimes[date_i] - datetimes[0] #print '\n' + datestrings[date_i] + '\n' totaltest = datetime.datetime.now() found_file = True if daily_bt == False: if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True else: found_file = False if daily_clouds: try: cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_' 'data/cloudmask_nc/' + datetimes[date_i].strftime("%B%Y") + '/cloudmask_' + datetimes[date_i].strftime("%Y%m%d") + '.nc') cloudmask_times = num2date(cloudmask.variables['time'][:], cloudmask.variables['time'].units) cloudmask_times = np.asarray([ datetime.datetime(j.year, j.month, j.day, j.hour, j.minute) for j in cloudmask_times ]) cloudmask_bool = cloudmask_times == datetimes[date_i] print np.all(cloudmask.variables['cloud_mask'][0] == \ cloudmask.variables['cloud_mask'][30]) clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0] found_file = True except: print 'Found no cloud mask file!' clouds_now = np.zeros(cloud_lons.shape) found_file = False else: try: cloudmask = Dataset( '/soge-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/cloudmask' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' + datestrings[date_i] + '.nc') clouds_now = cloudmask.variables['cmask'][:][0] cloud_lons = cloudmask.variables['lon'][:] cloud_lats = cloudmask.variables['lat'][:] except: clouds_now = np.zeros(cloud_lons.shape) found_file = False print 'Found no cloud mask file!' if found_file: if daily_bt == False: bt087 = bt.variables['bt087'][:][0] bt12 = bt.variables['bt120'][:][0] orig_lons = bt.variables['longitude'][:] orig_lats = bt.variables['latitude'][:] orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats) else: orig_lons = lons orig_lats = lats # print bt12.shape # print clouds_now.shape window_datetime_lower = datetime.datetime(datetimes[0].year, datetimes[0].month, datetimes[0].day, datetimes[0].hour, datetimes[0].minute) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(datetimes[-1].year, datetimes[-1].month, datetimes[-1].day, datetimes[-1].hour, datetimes[-1].minute) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7) BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, datetimes[date_i].hour, datetimes[date_i].hour, datetimes[date_i].minute, datetimes[date_i].minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) indices = np.arange(0, len(datetimes_7dayw)) lower_ind = datetimes_7dayw == BT_15_day_lower_bound lower_ind = indices[lower_ind][0] upper_ind = datetimes_7dayw == BT_15_day_upper_bound upper_ind = indices[upper_ind][0] current_ind = datetimes_7dayw == datetimes[date_i] current_ind = indices[current_ind][0] if double_digits: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() else: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + str(datetimes[date_i].year) + '_' + str(datetimes[date_i].hour) + '_' + str(datetimes[date_i].minute) + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() if daily_bt: bt087 = bt_data[0] bt12 = bt_data[2] bt_15day_087 = BT_15_days[:, 0] #bt_15day_108 = BT_15_days[:, 1] bt_15day_120 = BT_15_days[:, 2] bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0) #bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0) bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0) btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean if mesh: cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats) clouds_now_regridded = pinkdust.regrid_data(cloud_lons, cloud_lats, orig_lons, orig_lats, clouds_now, mesh=True) """ bt087_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, bt087, mesh=True) bt12_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, bt12, mesh=True) btdiff_2_15daymean_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, btdiff_2_15daymean, mesh=True) """ btdiff_2 = bt12 - bt087 btdiff_2_anom = btdiff_2 - btdiff_2_15daymean if btdiff_2_anom_prev_3 != None: btdiff_2_anom_diff = btdiff_2_anom - btdiff_2_anom_prev_3 btdiff_2_anom_diff += \ (btdiff_2_anom - btdiff_2_anom_prev_2) btdiff_2_anom_diff += \ (btdiff_2_anom - btdiff_2_anom_prev) else: btdiff_2_anom_diff = np.zeros((btdiff_2_anom.shape)) if date_i == 0: btdiff_2_anom_prev = deepcopy(btdiff_2_anom) elif date_i == 1: btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = deepcopy(btdiff_2_anom) elif date_i == 2: btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2) btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = deepcopy(btdiff_2_anom) elif date_i > 2: btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2) btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = deepcopy(btdiff_2_anom) if daily_clouds: clouds_now_regridded = clouds_now_regridded > 1 lat_grad, lon_grad = np.gradient(btdiff_2_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) convolution = scipy.signal.convolve2d(clouds_now_regridded, np.ones((5, 5)), mode='same') clouds_now = convolution > 0 total_grad[clouds_now == 1] = np.nan ### PASS I ### # In the FIRST PASS the LORD sayeth unto the image, 'Let all # whose BTD is below -10K be classified as CPOs, and remove the # tiny ones' # And those who fulfilled this condition were classified, # and it was good convolution = scipy.signal.convolve2d(clouds_now, np.ones((5, 5)), mode='same') clouds_now = convolution > 0 btdiff_2_anom_diff_um = deepcopy(btdiff_2_anom_diff) btdiff_2_anom_diff[clouds_now > 0] = np.nan cpo_mask_pass_1 = btdiff_2_anom_diff < -7 label_objects, nb_labels = ndi.label(cpo_mask_pass_1) sizes = np.bincount(label_objects.ravel()) # Set clusters smaller than size 20 to zero mask_sizes = sizes > 20 mask_sizes[0] = 0 cpo_mask_pass_1 = mask_sizes[label_objects] ### PASS II ### # In the SECOND PASS the LORD sayeth unto the image, 'Let all # those included in the first pass which contain pixels which # are below -15K be classified' # And those who fulfilled this condition were classified, # and it was better cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1) # Label the image and get all connected elements cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_pass_1) blob_ids = blob_ids[blob_ids != 0] for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_pass_1 == blob_ids[i] # First check if this region is within the original # time-gradient identified region (i.e. not one introduced # with the new generous gradient checker) if np.any(btdiff_2_anom_diff[target_region == 1] < -15): # Next check if there is a generous gradient contained # within this region somewhere pass else: cpo_mask_pass_2[target_region == 1] = 0 # For identified CPO regions, undo the convolution on the cloud # mask cpo_mask_um = btdiff_2_anom_diff_um < -7 # Label the image and get all connected elements cpo_mask_um, num = measurements.label(cpo_mask_um) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_um) blob_ids = blob_ids[blob_ids != 0] if 1 in cpo_mask_pass_2: for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_um == blob_ids[i] if 1 in cpo_mask_pass_2[target_region]: #if np.any(cpo_mask_pass_2[target_region == 1] == 1): cpo_mask_pass_2[target_region] = 1 extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats), np.max(orig_lats)) m = Basemap(projection='cyl', llcrnrlon=extent[0], urcrnrlon=extent[1], llcrnrlat=extent[2], urcrnrlat=extent[3], resolution='i') m.drawcoastlines(linewidth=0.5) m.drawcountries(linewidth=0.5) parallels = np.arange(10., 40, 2.) # labels = [left,right,top,bottom] m.drawparallels(parallels, labels=[False, True, True, False], linewidth=0.5) meridians = np.arange(-20., 17., 4.) m.drawmeridians(meridians, labels=[True, False, False, True], linewidth=0.5) min = 5 max = -15 levels = MaxNLocator(nbins=15).tick_values(min, max) cmap = cm.get_cmap('Blues_r') norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True) #print np.unique(btdiff_2_anom_diff[np.isfinite(btdiff_2_anom_diff)]) m.pcolormesh(orig_lons, orig_lats, btdiff_2_anom_diff, cmap=cmap, vmin=min, vmax=max, norm=norm) cbar = plt.colorbar(orientation='horizontal', fraction=0.056, pad=0.06) cbar.ax.set_xlabel('CPO mask pass 2') plt.tight_layout() plt.savefig('btdiff_' + datestrings[date_i] + '.png', bbox_inches='tight') plt.close() return cpo_mask_pass_2, btdiff_2_anom_prev, btdiff_2_anom_prev_2,\ btdiff_2_anom_prev_3 else: if mesh: empty_arr = np.zeros((lats.shape[0], lons.shape[1])) else: empty_arr = np.zeros((lats.shape[0], lons.shape[0])) empty_arr[:] = np.nan return empty_arr, None, None, None
def wrapper(bt_120_108_anom_m_prev_1, bt_120_108_anom_m_prev_2, bt_120_108_anom_m_prev_3, bt_108_087_anom_m_prev_1, bt_108_087_anom_m_prev_2, bt_108_087_anom_m_prev_3, bt_120_087_anom_m_prev_1, bt_120_087_anom_m_prev_2, bt_120_087_anom_m_prev_3, datetimes, datestrings, date_i, lons, lats, cloud_lons, cloud_lats, daily_clouds=False, double_digits=False, mesh=False, daily_bt=False): used_ids = [] runtime = datetimes[date_i] - datetimes[0] # print '\n' + datestrings[date_i] + '\n' totaltest = datetime.datetime.now() found_file = True if daily_bt == False: if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True else: found_file = False if daily_clouds: try: cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_' 'data/cloudmask_nc/' + datetimes[date_i].strftime("%B%Y") + '/cloudmask_' + datetimes[date_i].strftime("%Y%m%d") + '.nc') cloudmask_times = num2date(cloudmask.variables['time'][:], cloudmask.variables['time'].units) cloudmask_times = np.asarray([ datetime.datetime(j.year, j.month, j.day, j.hour, j.minute) for j in cloudmask_times ]) cloudmask_bool = cloudmask_times == datetimes[date_i] clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0] found_file = True except: print 'Found no cloud mask file!' clouds_now = np.zeros(cloud_lons.shape) found_file = False sdf_root = '/soge-home/projects/seviri_dust/sdf/' \ + datetimes[date_i].strftime('%B') \ + str(datetimes[date_i].year) + '/' if os.path.isfile(sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc'): sdf = Dataset( sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc') found_file = True # print sdf sdf_now = sdf.variables['SDF'][:] else: print 'No SDF file found for this date' found_file = False else: try: cloudmask = Dataset( '/soge-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/cloudmask' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' + datestrings[date_i] + '.nc') clouds_now = cloudmask.variables['cmask'][:][0] cloud_lons = cloudmask.variables['lon'][:] cloud_lats = cloudmask.variables['lat'][:] except: clouds_now = np.zeros(cloud_lons.shape) found_file = False print 'Found no cloud mask file!' sdf_root = '/soge-home/data_not_backed_up/satellite/meteosat' \ '/seviri/15' \ '-min/0.03x0.03/sdf/nc/' + datetimes[date_i].strftime( '%B').upper() + str(datetimes[date_i].year) + '/SDF_v2/' if os.path.isfile(sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc'): sdf = Dataset( sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc') found_file = True # print sdf if 'time' in sdf.variables: sdf_now = sdf.variables['bt108'][0] else: sdf_now = sdf.variables['bt108'][:] else: print 'No SDF file found for this date' found_file = False if found_file: if daily_bt == False: bt087 = bt.variables['bt087'][:][0] bt108 = bt.variables['bt108'][:][0] bt12 = bt.variables['bt120'][:][0] orig_lons = bt.variables['longitude'][:] orig_lats = bt.variables['latitude'][:] orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats) else: orig_lons = lons orig_lats = lats # print bt12.shape # print clouds_now.shape window_datetime_lower = datetime.datetime(datetimes[0].year, 6, 1, 0, 45) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(datetimes[-1].year, 8, 31, 23, 45) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7) BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, datetimes[date_i].hour, datetimes[date_i].hour, datetimes[date_i].minute, datetimes[date_i].minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) indices = np.arange(0, len(datetimes_7dayw)) lower_ind = datetimes_7dayw == BT_15_day_lower_bound lower_ind = indices[lower_ind][0] upper_ind = datetimes_7dayw == BT_15_day_upper_bound upper_ind = indices[upper_ind][0] current_ind = datetimes_7dayw == datetimes[date_i] current_ind = indices[current_ind][0] if double_digits: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() else: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + str(datetimes[date_i].year) + '_' + str(datetimes[date_i].hour) + '_' + str(datetimes[date_i].minute) + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() if daily_bt: bt087 = bt_data[0] bt108 = bt_data[1] bt12 = bt_data[2] bt_15day_087 = BT_15_days[:, 0] bt_15day_108 = BT_15_days[:, 1] bt_15day_120 = BT_15_days[:, 2] bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0) bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0) bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0) btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean bt_108_087 = bt108 - bt087 bt_120_108 = bt12 - bt108 bt_120_087 = bt12 - bt087 bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean bt_108_087_anom = bt_108_087 - bt_108_087_mean bt_120_108_anom = bt_120_108 - bt_120_108_mean bt_120_087_anom = bt_120_087 - bt_120_087_mean if mesh: cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats) clouds_now_regridded = pinkdust.regrid_data(cloud_lons, cloud_lats, orig_lons, orig_lats, clouds_now, mesh=True) bt_108_087_anom_m = deepcopy(bt_108_087_anom) bt_120_087_anom_m = deepcopy(bt_120_087_anom) bt_120_108_anom_m = deepcopy(bt_120_108_anom) clouds_now_regridded[sdf_now == 1] = 0 bt_108_087_anom_m[clouds_now_regridded == 1] = np.nan bt_120_087_anom_m[clouds_now_regridded == 1] = np.nan bt_120_108_anom_m[clouds_now_regridded == 1] = np.nan #btdiff_2 = bt12 - bt087 #btdiff_2_anom = btdiff_2 - btdiff_2_15daymean if bt_108_087_anom_m_prev_1 != None: arra = (bt_120_087_anom_m - bt_120_087_anom_m_prev_3) + ( bt_120_087_anom_m - bt_120_087_anom_m_prev_2) + ( bt_120_087_anom_m - bt_120_087_anom_m_prev_1) arrb = (bt_120_108_anom_m - bt_120_108_anom_m_prev_3) + ( bt_120_108_anom_m - bt_120_108_anom_m_prev_2) + ( bt_120_108_anom_m - bt_120_108_anom_m_prev_1) arrc = (bt_108_087_anom_m - bt_108_087_anom_m_prev_3) + ( bt_108_087_anom_m - bt_108_087_anom_m_prev_2) + ( bt_108_087_anom_m - bt_108_087_anom_m_prev_1) detected_bt = (arrc - arrb) + (arra - arrb) else: detected_bt = np.zeros((bt_108_087_anom.shape)) if date_i == 0: bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m) bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m) bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m) elif date_i == 1: bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1) bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1) bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1) bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m) bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m) bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m) elif date_i == 2: bt_120_087_anom_m_prev_3 = deepcopy(bt_120_087_anom_m_prev_2) bt_120_108_anom_m_prev_3 = deepcopy(bt_120_108_anom_m_prev_2) bt_108_087_anom_m_prev_3 = deepcopy(bt_108_087_anom_m_prev_2) bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1) bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1) bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1) bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m) bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m) bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m) elif date_i > 2: bt_120_087_anom_m_prev_3 = deepcopy(bt_120_087_anom_m_prev_2) bt_120_108_anom_m_prev_3 = deepcopy(bt_120_108_anom_m_prev_2) bt_108_087_anom_m_prev_3 = deepcopy(bt_108_087_anom_m_prev_2) bt_120_087_anom_m_prev_2 = deepcopy(bt_120_087_anom_m_prev_1) bt_120_108_anom_m_prev_2 = deepcopy(bt_120_108_anom_m_prev_1) bt_108_087_anom_m_prev_2 = deepcopy(bt_108_087_anom_m_prev_1) bt_120_087_anom_m_prev_1 = deepcopy(bt_120_087_anom_m) bt_120_108_anom_m_prev_1 = deepcopy(bt_120_108_anom_m) bt_108_087_anom_m_prev_1 = deepcopy(bt_108_087_anom_m) if daily_clouds: clouds_now_regridded = clouds_now_regridded > 1 ### PASS I ### # In the FIRST PASS the LORD sayeth unto the image, 'Let all # whose BTD is below -10K be classified as CPOs, and remove the # tiny ones' # And those who fulfilled this condition were classified, # and it was good # NOTE: Why cloud mask here? What if there was a strong cloud # gradient in the previous three timesteps which disappeared in this # one but still pushed us over the threshold? It wouldn't be cloud # masked detected_bt_um = deepcopy(detected_bt) #if cloud_mask: # detected_bt[clouds_now > 0] = np.nan levels = np.arange(-24, 40, 8) cpo_mask_pass_1 = detected_bt < -6 label_objects, nb_labels = ndi.label(cpo_mask_pass_1) sizes = np.bincount(label_objects.ravel()) # Set clusters smaller than size 20 to zero mask_sizes = sizes > 20 mask_sizes[0] = 0 cpo_mask_pass_1 = mask_sizes[label_objects] ### PASS II ### # In the SECOND PASS the LORD sayeth unto the image, 'Let all # those included in the first pass which contain pixels which # are below -15K be classified' # And those who fulfilled this condition were classified, # and it was better cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1) # Label the image and get all connected elements cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_pass_1) blob_ids = blob_ids[blob_ids != 0] for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_pass_1 == blob_ids[i] # First check if this region is within the original # time-gradient identified region (i.e. not one introduced # with the new generous gradient checker) if np.any(detected_bt[target_region == 1] < -20): # Next check if there is a generous gradient contained # within this region somewhere pass else: cpo_mask_pass_2[target_region == 1] = 0 # For identified CPO regions, undo the convolution on the cloud # mask cpo_mask_um = detected_bt_um < -6 # Label the image and get all connected elements cpo_mask_um, num = measurements.label(cpo_mask_um) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_um) blob_ids = blob_ids[blob_ids != 0] if 1 in cpo_mask_pass_2: for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_um == blob_ids[i] if 1 in cpo_mask_pass_2[target_region]: # if np.any(cpo_mask_pass_2[target_region == 1] == 1): cpo_mask_pass_2[target_region] = 1 if debug: extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats), np.max(orig_lats)) m = Basemap(projection='cyl', llcrnrlon=extent[0], urcrnrlon=extent[1], llcrnrlat=extent[2], urcrnrlat=extent[3], resolution='i') m.drawcoastlines(linewidth=0.5) m.drawcountries(linewidth=0.5) parallels = np.arange(10., 40, 2.) # labels = [left,right,top,bottom] m.drawparallels(parallels, labels=[False, True, True, False], linewidth=0.5) meridians = np.arange(-20., 17., 4.) m.drawmeridians(meridians, labels=[True, False, False, True], linewidth=0.5) min = -20 max = 5 levels = MaxNLocator(nbins=15).tick_values(min, max) cmap = cm.get_cmap('Blues_r') norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True) m.pcolormesh(orig_lons, orig_lats, detected_bt, cmap=cmap, vmin=min, vmax=max, norm=norm) cbar = plt.colorbar(orientation='horizontal', fraction=0.056, pad=0.06) cbar.ax.set_xlabel('BTdiff 2 anom diff') plt.tight_layout() plt.savefig('BTdiff_2_anom_diff' + datestrings[date_i] + '.png', bbox_inches='tight') plt.close() extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats), np.max(orig_lats)) m = Basemap(projection='cyl', llcrnrlon=extent[0], urcrnrlon=extent[1], llcrnrlat=extent[2], urcrnrlat=extent[3], resolution='i') m.drawcoastlines(linewidth=0.5) m.drawcountries(linewidth=0.5) parallels = np.arange(10., 40, 2.) # labels = [left,right,top,bottom] m.drawparallels(parallels, labels=[False, True, True, False], linewidth=0.5) meridians = np.arange(-20., 17., 4.) m.drawmeridians(meridians, labels=[True, False, False, True], linewidth=0.5) min = 180 max = 320 levels = MaxNLocator(nbins=15).tick_values(min, max) cmap = cm.get_cmap('Blues_r') norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True) m.pcolormesh(orig_lons, orig_lats, bt108, cmap=cmap, vmin=min, vmax=max, norm=norm) cbar = plt.colorbar(orientation='horizontal', fraction=0.056, pad=0.06) cbar.ax.set_xlabel('BT 10.8') plt.tight_layout() plt.savefig('BT108_' + datestrings[date_i] + '.png', bbox_inches='tight') plt.close() if show_all_indicators: bt_108_087 = bt108 - bt087 bt_120_108 = bt12 - bt108 bt_120_087 = bt12 - bt087 bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean bt_108_087_anom = bt_108_087 - bt_108_087_mean bt_120_108_anom = bt_120_108 - bt_120_108_mean bt_120_087_anom = bt_120_087 - bt_120_087_mean bt_087_anom = bt087 - bt_15day_087_mean bt_108_anom = bt108 - bt_15day_108_mean bt_120_anom = bt12 - bt_15day_120_mean lat_grad, lon_grad = np.gradient(bt_108_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_108_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_108_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_108_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_108_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_108_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_anom = deepcopy(total_grad) indicators = [ bt087, bt108, bt12, bt_108_087_anom, bt_120_108_anom, bt_120_087_anom, grad_108_087_anom, grad_120_108_anom, grad_120_087_anom, grad_087_anom, grad_108_anom, grad_120_anom ] extent = (np.min(orig_lons), np.max(orig_lons), np.min(orig_lats), np.max(orig_lats)) m = Basemap(projection='cyl', llcrnrlon=extent[0], urcrnrlon=extent[1], llcrnrlat=extent[2], urcrnrlat=extent[3], resolution='i') m.drawcoastlines(linewidth=0.5) m.drawcountries(linewidth=0.5) parallels = np.arange(10., 40, 2.) m.drawparallels(parallels, labels=[False, True, True, False], linewidth=0.5) meridians = np.arange(-20., 17., 4.) m.drawmeridians(meridians, labels=[True, False, False, True], linewidth=0.5) mins = [180, 180, 180, -13, -13, -13, 0, 0, 0, 0, 0, 0] maxs = [320, 320, 320, 6, 6, 6, 9, 9, 9, 50, 50, 50] labels = [ 'bt087', 'bt108', 'bt120', 'bt_108_087_anom', 'bt_120_108_anom', 'bt_120_087_anom', 'grad_108_087_anom', 'grad_120_108_anom', 'grad_120_087_anom', 'grad_087_anom', 'grad_108_anom', 'grad_120_anom' ] for i in np.arange(0, len(indicators)): min = mins[i] max = maxs[i] levels = MaxNLocator(nbins=15).tick_values(min, max) cmap = cm.get_cmap('Blues_r') norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True) plot = m.pcolormesh(orig_lons, orig_lats, indicators[i], cmap=cmap, vmin=min, vmax=max, norm=norm) cbar = plt.colorbar(orientation='horizontal', fraction=0.056, pad=0.06) cbar.ax.set_xlabel(labels[i]) plt.tight_layout() plt.savefig(labels[i] + '_' + datestrings[date_i] + '.png', bbox_inches='tight') cbar.remove() plot.remove() return cpo_mask_pass_2, bt_120_108_anom_m, \ bt_120_108_anom_m_prev_1, bt_120_108_anom_m_prev_2, \ bt_120_108_anom_m_prev_3, bt_108_087_anom_m,\ bt_108_087_anom_m_prev_1, bt_108_087_anom_m_prev_2, \ bt_108_087_anom_m_prev_3, bt_120_087_anom_m,\ bt_120_087_anom_m_prev_1, bt_120_087_anom_m_prev_2, \ bt_120_087_anom_m_prev_3 else: if mesh: empty_arr = np.zeros((lats.shape[0], lons.shape[1])) else: empty_arr = np.zeros((lats.shape[0], lons.shape[0])) empty_arr[:] = np.nan return empty_arr, None, None, None
def wrapper(datetimes, datestrings, date_i, lons, lats, cloud_lons, cloud_lats, daily_clouds=False, double_digits=False, mesh=False, daily_bt=False): used_ids = [] runtime = datetimes[date_i] - datetimes[0] # print '\n' + datestrings[date_i] + '\n' totaltest = datetime.datetime.now() found_file = True if daily_bt == False: if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') found_file = True else: found_file = False if daily_clouds: try: cloudmask = Dataset('/soge-home/projects/seviri_dust/raw_seviri_' 'data/cloudmask_nc/' + datetimes[date_i].strftime("%B%Y") + '/cloudmask_' + datetimes[date_i].strftime("%Y%m%d") + '.nc') cloudmask_times = num2date(cloudmask.variables['time'][:], cloudmask.variables['time'].units) cloudmask_times = np.asarray([ datetime.datetime(j.year, j.month, j.day, j.hour, j.minute) for j in cloudmask_times ]) cloudmask_bool = cloudmask_times == datetimes[date_i] clouds_now = cloudmask.variables['cloud_mask'][cloudmask_bool][0] found_file = True except: print 'Found no cloud mask file!' clouds_now = np.zeros(cloud_lons.shape) found_file = False sdf_root = '/soge-home/projects/seviri_dust/sdf/' \ + datetimes[date_i].strftime('%B') \ + str(datetimes[date_i].year) + '/' if os.path.isfile(sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc'): sdf = Dataset( sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc') found_file = True # print sdf sdf_now = sdf.variables['SDF'][:] else: print 'No SDF file found for this date' found_file = False else: try: cloudmask = Dataset( '/soge-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/cloudmask' '/nc/' + datetimes[date_i].strftime("%B").upper() + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' + datestrings[date_i] + '.nc') clouds_now = cloudmask.variables['cmask'][:][0] cloud_lons = cloudmask.variables['lon'][:] cloud_lats = cloudmask.variables['lat'][:] except: clouds_now = np.zeros(cloud_lons.shape) found_file = False print 'Found no cloud mask file!' sdf_root = '/soge-home/data_not_backed_up/satellite/meteosat' \ '/seviri/15' \ '-min/0.03x0.03/sdf/nc/' + datetimes[date_i].strftime( '%B').upper() + str(datetimes[date_i].year) + '/SDF_v2/' if os.path.isfile(sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc'): sdf = Dataset( sdf_root + 'SDF_v2.' + \ datestrings[date_i] + '.nc') found_file = True # print sdf if 'time' in sdf.variables: sdf_now = sdf.variables['bt108'][0] else: sdf_now = sdf.variables['bt108'][:] else: print 'No SDF file found for this date' found_file = False if found_file: if daily_bt == False: bt087 = bt.variables['bt087'][:][0] bt108 = bt.variables['bt108'][:][0] bt12 = bt.variables['bt120'][:][0] orig_lons = bt.variables['longitude'][:] orig_lats = bt.variables['latitude'][:] orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats) else: orig_lons = lons orig_lats = lats # print bt12.shape # print clouds_now.shape window_datetime_lower = datetime.datetime(datetimes[0].year, 6, 1, 0, 45) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(datetimes[-1].year, 8, 31, 23, 45) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta(days=7) BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta(days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([ window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, datetimes[date_i].hour, datetimes[date_i].hour, datetimes[date_i].minute, datetimes[date_i].minute ]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) indices = np.arange(0, len(datetimes_7dayw)) lower_ind = datetimes_7dayw == BT_15_day_lower_bound lower_ind = indices[lower_ind][0] upper_ind = datetimes_7dayw == BT_15_day_upper_bound upper_ind = indices[upper_ind][0] current_ind = datetimes_7dayw == datetimes[date_i] current_ind = indices[current_ind][0] if double_digits: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + datetimes[date_i].strftime('%Y_%H_%M') + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() else: f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files' '/bt_15d_' + str(datetimes[date_i].year) + '_' + str(datetimes[date_i].hour) + '_' + str(datetimes[date_i].minute) + '.hdf') BT_15_days = f.root.data[lower_ind:upper_ind] bt_data = f.root.data[current_ind] f.close() if daily_bt: bt087 = bt_data[0] bt108 = bt_data[1] bt12 = bt_data[2] bt_15day_087 = BT_15_days[:, 0] bt_15day_108 = BT_15_days[:, 1] bt_15day_120 = BT_15_days[:, 2] bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0) bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0) bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0) if mesh: cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats) clouds_now_regridded = pinkdust.regrid_data(cloud_lons, cloud_lats, orig_lons, orig_lats, clouds_now, mesh=True) if daily_clouds: clouds_now_regridded = clouds_now_regridded > 1 if show_all_indicators: bt_108_087 = bt108 - bt087 bt_120_108 = bt12 - bt108 bt_120_087 = bt12 - bt087 bt_108_087_mean = bt_15day_108_mean - bt_15day_087_mean bt_120_108_mean = bt_15day_120_mean - bt_15day_108_mean bt_120_087_mean = bt_15day_120_mean - bt_15day_087_mean bt_108_087_anom = bt_108_087 - bt_108_087_mean bt_120_108_anom = bt_120_108 - bt_120_108_mean bt_120_087_anom = bt_120_087 - bt_120_087_mean bt_087_anom = bt087 - bt_15day_087_mean bt_108_anom = bt108 - bt_15day_108_mean bt_120_anom = bt12 - bt_15day_120_mean lat_grad, lon_grad = np.gradient(bt_108_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_108_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_108_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_108_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_087_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_087_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_108_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_108_anom = deepcopy(total_grad) lat_grad, lon_grad = np.gradient(bt_120_anom) total_grad = np.sqrt(lat_grad**2 + lon_grad**2) grad_120_anom = deepcopy(total_grad) bt_108_087_anom_m = deepcopy(bt_108_087_anom) bt_120_087_anom_m = deepcopy(bt_120_087_anom) bt_120_108_anom_m = deepcopy(bt_120_108_anom) clouds_now_regridded[sdf_now == 1] = 0 bt_108_087_anom_m[clouds_now_regridded == 1] = np.nan bt_120_087_anom_m[clouds_now_regridded == 1] = np.nan bt_120_108_anom_m[clouds_now_regridded == 1] = np.nan return bt087, bt108, bt12, bt_108_087_anom, bt_120_087_anom, \ bt_120_108_anom, grad_087_anom, grad_108_anom, grad_120_anom,\ grad_108_087_anom, grad_120_108_anom, grad_120_087_anom, \ bt_108_087_anom_m, bt_120_087_anom_m, bt_120_108_anom_m, \ orig_lons, orig_lats, clouds_now_regridded, sdf_now
def detect_cpo(btdiff_2_anom_prev, btdiff_2_anom_prev_2, btdiff_2_anom_prev_3, datetimes, datestrings, date_i): # Get lats and lons sdf_test = Dataset( '/soge-home/data_not_backed_up/satellite/meteosat/seviri' '/15-min/0.03x0.03/sdf/nc/JUNE2010/SDF_v2/SDF_v2.' '201006031500.nc') lons, lats = np.meshgrid(sdf_test.variables['longitude'][:], sdf_test.variables['latitude'][:]) # Get cloud lats and lons cloud_test = Dataset( '/soge-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/cloudmask' '/nc/' + 'JUNE2010_CLOUDS/eumetsat.cloud.' + '201006031500.nc') cloud_lons = cloud_test.variables['lon'][:] cloud_lats = cloud_test.variables['lat'][:] cloud_lons, cloud_lats = np.meshgrid(cloud_lons, cloud_lats) lonmask = lons > 360 latmask = lats > 90 lons = np.ma.array(lons, mask=lonmask) lats = np.ma.array(lats, mask=latmask) used_ids = [] runtime = datetimes[date_i] - datetimes[0] #print '\n' + datestrings[date_i] + '\n' totaltest = datetime.datetime.now() found_file = True if os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper( ) + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset( '/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper( ) + str(datetimes[date_i].year) + '/H-000-MSG2__' '-MSG2________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') elif os.path.isfile('/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper( ) + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc'): bt = Dataset( '/ouce-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/bt' '/nc/' + datetimes[date_i].strftime("%B").upper( ) + str(datetimes[date_i].year) + '/H-000-MSG1__' '-MSG1________-' 'IR_BrightnessTemperatures___' '-000005___-' + datestrings[date_i] + '-__.nc') try: cloudmask = Dataset( '/soge-home/data/satellite/meteosat/seviri/15-min/' '0.03x0.03/cloudmask' '/nc/' + datetimes[date_i].strftime("%B").upper( ) + str(datetimes[date_i].year) + '_CLOUDS/eumetsat.cloud.' + datestrings[date_i] + '.nc') clouds_now = cloudmask.variables['cmask'][:][0] cloud_lons = cloudmask.variables['lon'][:] cloud_lats = cloudmask.variables['lat'][:] except: clouds_now = np.zeros(cloud_lons.shape) found_file = False if found_file: # Produce 12-10.8 imagery bt087 = bt.variables['bt087'][:][0] bt12 = bt.variables['bt120'][:][0] bt108 = bt.variables['bt108'][:][0] orig_lons = bt.variables['longitude'][:] orig_lats = bt.variables['latitude'][:] # print bt12.shape # print clouds_now.shape f = tables.open_file( '/soge-home/projects/seviri_dust/sdf/intermediary_files/bt_15d_' + str(datetimes[date_i].year) + '_' + str( datetimes[date_i].month) + '_' + str(datetimes[date_i].hour) + '_' + str( datetimes[date_i].minute) + '.hdf') arrobj = f.get_node('/data') bt_15day = arrobj.read() f.close() window_datetime_lower = datetime.datetime(datetimes[0].year, datetimes[0].month, datetimes[0].day, datetimes[0].hour, datetimes[0].minute) \ - datetime.timedelta(days=7) window_datetime_upper = datetime.datetime(datetimes[-1].year, datetimes[-1].month, datetimes[-1].day, datetimes[-1].hour, datetimes[-1].minute) \ + datetime.timedelta(days=7) BT_15_day_lower_bound = datetimes[date_i] - datetime.timedelta( days=7) BT_15_day_upper_bound = datetimes[date_i] + datetime.timedelta( days=7) # Get datetime objects between the above bounds time_params_7dayw = np.array([window_datetime_lower.year, window_datetime_upper.year, window_datetime_lower.month, window_datetime_upper.month, window_datetime_lower.day, window_datetime_upper.day, datetimes[date_i].hour, datetimes[date_i].hour, datetimes[date_i].minute, datetimes[date_i].minute]) datetimes_7dayw = utilities.get_daily_datetime_objects( time_params_7dayw) BT_15_days = \ bt_15day[np.asarray([j >= BT_15_day_lower_bound and j <= BT_15_day_upper_bound for j in datetimes_7dayw])] bt_15day_087 = BT_15_days[:, 0] #bt_15day_108 = BT_15_days[:, 1] bt_15day_120 = BT_15_days[:, 2] bt_15day_087_mean = np.nanmean(bt_15day_087, axis=0) #bt_15day_108_mean = np.nanmean(bt_15day_108, axis=0) bt_15day_120_mean = np.nanmean(bt_15day_120, axis=0) btdiff_2_15daymean = bt_15day_120_mean - bt_15day_087_mean orig_lons, orig_lats = np.meshgrid(orig_lons, orig_lats) bt087_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, bt087) bt12_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, bt12) btdiff_2_15daymean_regridded = pinkdust.regrid_data(orig_lons, orig_lats, cloud_lons, cloud_lats, btdiff_2_15daymean) btdiff_2 = bt12_regridded - bt087_regridded btdiff_2_anom = btdiff_2 - btdiff_2_15daymean_regridded btdiff_2_anom[clouds_now > 0] = np.nan if btdiff_2_anom_prev_3 != None: # Get the difference between this timestep and the one two # timesteps before btdiff_2_anom_diff = btdiff_2_anom - btdiff_2_anom_prev_3 orig_btdiff_2_anom_diff = deepcopy(btdiff_2_anom_diff) btdiff_2_anom_diff += \ orig_btdiff_2_anom_diff - btdiff_2_anom_prev_2 btdiff_2_anom_diff += \ orig_btdiff_2_anom_diff - btdiff_2_anom_prev else: btdiff_2_anom_diff = np.zeros((btdiff_2_anom.shape)) if date_i == 0: btdiff_2_anom_prev = btdiff_2_anom elif date_i == 1: btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = btdiff_2_anom elif date_i == 2: btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2) btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = deepcopy(btdiff_2_anom) elif date_i > 2: btdiff_2_anom_prev_3 = deepcopy(btdiff_2_anom_prev_2) btdiff_2_anom_prev_2 = deepcopy(btdiff_2_anom_prev) btdiff_2_anom_prev = deepcopy(btdiff_2_anom) lat_grad, lon_grad = np.gradient(btdiff_2_anom) total_grad = np.sqrt(lat_grad ** 2 + lon_grad ** 2) convolution = scipy.signal.convolve2d(clouds_now, np.ones((5, 5)), mode='same') clouds_now = convolution > 0 total_grad[clouds_now == 1] = np.nan ### PASS I ### # In the FIRST PASS the LORD sayeth unto the image, 'Let all # whose BTD is below -10K be classified as CPOs, and remove the # tiny ones' # And those who fulfilled this condition were classified, # and it was good convolution = scipy.signal.convolve2d(clouds_now, np.ones((5, 5)), mode='same') clouds_now = convolution > 0 btdiff_2_anom_diff_um = deepcopy(btdiff_2_anom_diff) btdiff_2_anom_diff[clouds_now > 0] = np.nan cpo_mask_pass_1 = btdiff_2_anom_diff < -7 label_objects, nb_labels = ndi.label(cpo_mask_pass_1) sizes = np.bincount(label_objects.ravel()) # Set clusters smaller than size 20 to zero mask_sizes = sizes > 20 mask_sizes[0] = 0 cpo_mask_pass_1 = mask_sizes[label_objects] ### PASS II ### # In the SECOND PASS the LORD sayeth unto the image, 'Let all # those included in the first pass which contain pixels which # are below -15K be classified' # And those who fulfilled this condition were classified, # and it was better cpo_mask_pass_2 = deepcopy(cpo_mask_pass_1) # Label the image and get all connected elements cpo_mask_pass_1, num = measurements.label(cpo_mask_pass_1) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_pass_1) blob_ids = blob_ids[blob_ids != 0] for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_pass_1 == blob_ids[i] # First check if this region is within the original # time-gradient identified region (i.e. not one introduced # with the new generous gradient checker) if np.any(btdiff_2_anom_diff[target_region == 1] < -15): # Next check if there is a generous gradient contained # within this region somewhere pass else: cpo_mask_pass_2[target_region == 1] = 0 # For identified CPO regions, undo the convolution on the cloud # mask cpo_mask_um = btdiff_2_anom_diff_um < -7 # Label the image and get all connected elements cpo_mask_um, num = measurements.label(cpo_mask_um) # Then loop through each labeled blob and find if one of the # pixels has a -15K. If it does, the whole blob is a freaking CPO. blob_ids = np.unique(cpo_mask_um) blob_ids = blob_ids[blob_ids != 0] for i in np.arange(0, len(blob_ids)): target_region = cpo_mask_um == blob_ids[i] if np.any(cpo_mask_pass_2[target_region == 1] == 1): cpo_mask_pass_2[target_region == 1] = 1 return cpo_mask_pass_2, btdiff_2_anom_prev, btdiff_2_anom_prev_2,\ btdiff_2_anom_prev_3