def test_issue_189(self): fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/descending/VV/gsar_rvl/RVL_ASA_WS_20100110211812087.gsar' if doppler_installed: n = Doppler(fn) xlon, xlat = n.get_corners() d = Domain( NSR(3857), '-lle %f %f %f %f -tr 1000 1000' % (xlon.min(), xlat.min(), xlon.max(), xlat.max())) n.reproject(d, eResampleAlg=1, tps=True) inci = n['incidence_angle']
def test_issue_189(self): fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/descending/VV/gsar_rvl/RVL_ASA_WS_20100110211812087.gsar' if doppler_installed: n = Doppler(fn) xlon, xlat = n.get_corners() d = Domain(NSR(3857), '-lle %f %f %f %f -tr 1000 1000' % ( xlon.min(), xlat.min(), xlon.max(), xlat.max())) n.reproject(d, eResampleAlg=1, tps=True) inci = n['incidence_angle']
def process(self, uri, *args, **kwargs): """ Create data products """ ds, created = self.get_or_create(uri, *args, **kwargs) fn = nansat_filename(uri) swath_data = {} # Read subswaths for i in range(self.N_SUBSWATHS): swath_data[i] = Doppler(fn, subswath=i) # Get module name mm = self.__module__.split('.') module = '%s.%s' % (mm[0], mm[1]) # Set media path (where images will be stored) mp = media_path(module, swath_data[i].filename) # Set product path (where netcdf products will be stored) ppath = product_path(module, swath_data[i].filename) # Loop subswaths, process each of them and create figures for display with leaflet for i in range(self.N_SUBSWATHS): is_corrupted = False # Check if the file is corrupted try: inci = swath_data[i]['incidence_angle'] # TODO: What kind of exception ? except: is_corrupted = True continue # Add Doppler anomaly swath_data[i].add_band(array=swath_data[i].anomaly(), parameters={ 'wkv': 'anomaly_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave' }) # Get band number of DC freq, then DC polarisation band_number = swath_data[i]._get_band_number({ 'standard_name': 'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave', }) pol = swath_data[i].get_metadata(bandID=band_number, key='polarization') # Calculate total geophysical Doppler shift fdg = swath_data[i].geophysical_doppler_shift() swath_data[i].add_band( array=fdg, parameters={ 'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity' }) # Set filename of exported netcdf fn = os.path.join(ppath, os.path.basename(swath_data[i].filename).split('.')[0] + 'subswath%d.nc' % i) # Set filename of original gsar file in metadata swath_data[i].set_metadata(key='Originating file', value=swath_data[i].filename) # Export data to netcdf print('Exporting %s (subswath %d)' % (swath_data[i].filename, i)) swath_data[i].export(filename=fn) # Add netcdf uri to DatasetURIs ncuri = os.path.join('file://localhost', fn) new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri, dataset=ds) # Reproject to leaflet projection xlon, xlat = swath_data[i].get_corners() d = Domain(NSR(3857), '-lle %f %f %f %f -tr 1000 1000' % (xlon.min(), xlat.min(), xlon.max(), xlat.max())) swath_data[i].reproject(d, eResampleAlg=1, tps=True) # Check if the reprojection failed try: inci = swath_data[i]['incidence_angle'] except: is_corrupted = True warnings.warn('Could not read incidence angles - reprojection failed') continue # Create visualizations of the following bands (short_names) ingest_creates = ['valid_doppler', 'valid_land_doppler', 'valid_sea_doppler', 'dca', 'fdg'] for band in ingest_creates: filename = '%s_subswath_%d.png' % (band, i) # check uniqueness of parameter param = Parameter.objects.get(short_name=band) fig = swath_data[i].write_figure( os.path.join(mp, filename), bands=band, mask_array=swath_data[i]['swathmask'], mask_lut={0: [128, 128, 128]}, transparency=[128, 128, 128]) if type(fig) == Figure: print 'Created figure of subswath %d, band %s' % (i, band) else: warnings.warn('Figure NOT CREATED') # Get or create DatasetParameter dsp, created = DatasetParameter.objects.get_or_create(dataset=ds, parameter=param) # Create GeographicLocation for the visualization object geom, created = GeographicLocation.objects.get_or_create( geometry=WKTReader().read(swath_data[i].get_border_wkt())) # Create Visualization vv, created = Visualization.objects.get_or_create( uri='file://localhost%s/%s' % (mp, filename), title='%s (swath %d)' % (param.standard_name, i + 1), geographic_location=geom ) # Create VisualizationParameter vp, created = VisualizationParameter.objects.get_or_create( visualization=vv, ds_parameter=dsp ) # TODO: consider merged figures like Jeong-Won has added in the development branch return ds, not is_corrupted
def create_merged_swaths(self, ds, EPSG=4326, **kwargs): """Merge swaths, add dataseturi, and return Nansat object. EPSG options: - 4326: WGS 84 / longlat - 3995: WGS 84 / Arctic Polar Stereographic """ nn = {} nn[0] = Doppler( nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % 0).uri)) lon0, lat0 = nn[0].get_geolocation_grids() nn[1] = Doppler( nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % 1).uri)) lon1, lat1 = nn[1].get_geolocation_grids() nn[2] = Doppler( nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % 2).uri)) lon2, lat2 = nn[2].get_geolocation_grids() nn[3] = Doppler( nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % 3).uri)) lon3, lat3 = nn[3].get_geolocation_grids() nn[4] = Doppler( nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % 4).uri)) lon4, lat4 = nn[4].get_geolocation_grids() connection.close() dlon = np.mean([ np.abs(np.mean(np.gradient(lon0, axis=1))), np.abs(np.mean(np.gradient(lon1, axis=1))), np.abs(np.mean(np.gradient(lon2, axis=1))), np.abs(np.mean(np.gradient(lon3, axis=1))), np.abs(np.mean(np.gradient(lon4, axis=1))) ]) nx = len( np.arange( np.array([ lon0.min(), lon1.min(), lon2.min(), lon3.min(), lon4.min() ]).min(), np.array([ lon0.max(), lon1.max(), lon2.max(), lon3.max(), lon4.max() ]).max(), dlon)) dlat = np.mean([ np.abs(np.mean(np.gradient(lat0, axis=0))), np.abs(np.mean(np.gradient(lat1, axis=0))), np.abs(np.mean(np.gradient(lat2, axis=0))), np.abs(np.mean(np.gradient(lat3, axis=0))), np.abs(np.mean(np.gradient(lat4, axis=0))) ]) ny = len( np.arange( np.array([ lat0.min(), lat1.min(), lat2.min(), lat3.min(), lat4.min() ]).min(), np.array([ lat0.max(), lat1.max(), lat2.max(), lat3.max(), lat4.max() ]).max(), dlat)) if ny is None: ny = np.array([ nn[0].shape()[0], nn[1].shape()[0], nn[2].shape()[0], nn[3].shape()[0], nn[4].shape()[0] ]).max() ## DETTE VIRKER IKKE.. #sensor_view = np.sort( # np.append(np.append(np.append(np.append( # nn[0]['sensor_view'][0,:], # nn[1]['sensor_view'][0,:]), # nn[2]['sensor_view'][0,:]), # nn[3]['sensor_view'][0,:]), # nn[4]['sensor_view'][0,:])) #nx = sensor_view.size #x = np.arange(nx) #def func(x, a, b, c, d): # return a*x**3+b*x**2+c*x+d #def linear_func(x, a, b): # return a*x + b #azimuth_time = np.sort( # np.append(np.append(np.append(np.append( # nn[0].get_azimuth_time(), # nn[1].get_azimuth_time()), # nn[2].get_azimuth_time()), # nn[3].get_azimuth_time()), # nn[4].get_azimuth_time())) #dt = azimuth_time.max() - azimuth_time[0] #tt = np.arange(0, dt, dt/ny) #tt = np.append(np.array([-dt/ny], dtype='<m8[us]'), tt) #tt = np.append(tt, tt[-1]+np.array([dt/ny, 2*dt/ny], dtype='<m8[us]')) #ny = len(tt) ## AZIMUTH_TIME #azimuth_time = (np.datetime64(azimuth_time[0])+tt).astype(datetime) #popt, pcov = curve_fit(func, x, sensor_view) ## SENSOR VIEW ANGLE #alpha = np.ones((ny, sensor_view.size))*np.deg2rad(func(x, *popt)) #range_time = np.sort( # np.append(np.append(np.append(np.append( # nn[0].get_range_time(), # nn[1].get_range_time()), # nn[2].get_range_time()), # nn[3].get_range_time()), # nn[4].get_range_time())) #popt, pcov = curve_fit(linear_func, x, range_time) ## RANGE_TIME #range_time = linear_func(x, *popt) #ecefPos, ecefVel = Doppler.orbital_state_vectors(azimuth_time) #eciPos, eciVel = ecef2eci(ecefPos, ecefVel, azimuth_time) ## Get satellite hour angle #satHourAng = np.deg2rad(Doppler.satellite_hour_angle(azimuth_time, ecefPos, ecefVel)) ## Get attitude from the Envisat yaw steering law #psi, gamma, phi = np.deg2rad(Doppler.orbital_attitude_vectors(azimuth_time, satHourAng)) #U1, AX1, S1 = Doppler.step_one_calculations(alpha, psi, gamma, phi, eciPos) #S2, U2, AX2 = Doppler.step_two_calculations(satHourAng, S1, U1, AX1) #S3, U3, AX3 = Doppler.step_three_a_calculations(eciPos, eciVel, S2, U2, AX2) #U3g = Doppler.step_three_b_calculations(S3, U3, AX3) #P3, U3g, lookAng = Doppler.step_four_calculations(S3, U3g, AX3, range_time) #dcm = dcmeci2ecef(azimuth_time, 'IAU-2000/2006') #lat = np.zeros((ny, nx)) #lon = np.zeros((ny, nx)) #alt = np.zeros((ny, nx)) #for i in range(P3.shape[1]): # ecefPos = np.matmul(dcm[0], P3[:,i,:,0, np.newaxis]) # lla = ecef2lla(ecefPos) # lat[:,i] = lla[:,0] # lon[:,i] = lla[:,1] # alt[:,i] = lla[:,2] #lon = lon.round(decimals=5) #lat = lat.round(decimals=5) # DETTE VIRKER: lonmin = np.array( [lon0.min(), lon1.min(), lon2.min(), lon3.min(), lon4.min()]).min() lonmax = np.array( [lon0.max(), lon1.max(), lon2.max(), lon3.max(), lon4.max()]).max() latmin = np.array( [lat0.min(), lat1.min(), lat2.min(), lat3.min(), lat4.min()]).min() latmax = np.array( [lat0.max(), lat1.max(), lat2.max(), lat3.max(), lat4.max()]).max() if nx is None: nx = nn[0].shape()[1] + nn[1].shape()[1] + nn[2].shape()[1] + nn[3].shape()[1] + \ nn[4].shape()[1] # prepare geospatial grid merged = Nansat.from_domain( Domain( NSR(EPSG), '-lle %f %f %f %f -ts %d %d' % (lonmin, latmin, lonmax, latmax, nx, ny))) ## DETTE VIRKER IKKE.. #merged = Nansat.from_domain(Domain.from_lonlat(lon, lat, add_gcps=False)) #merged.add_band(array = np.rad2deg(alpha), parameters={'wkv': 'sensor_view'}) dfdg = np.ones((self.N_SUBSWATHS)) * 5 # Hz (5 Hz a priori) for i in range(self.N_SUBSWATHS): dfdg[i] = nn[i].get_uncertainty_of_fdg() nn[i].reproject(merged, tps=True, resample_alg=1, block_size=2) # Initialize band arrays inc = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan fdg = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan ur = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan valid_sea_dop = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan std_fdg = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan std_ur = np.ones( (self.N_SUBSWATHS, merged.shape()[0], merged.shape()[1])) * np.nan for ii in range(self.N_SUBSWATHS): inc[ii] = nn[ii]['incidence_angle'] fdg[ii] = nn[ii]['fdg'] ur[ii] = nn[ii]['Ur'] valid_sea_dop[ii] = nn[ii]['valid_sea_doppler'] # uncertainty of fdg is a scalar std_fdg[ii][valid_sea_dop[ii] == 1] = dfdg[ii] # uncertainty of ur std_ur[ii] = nn[ii].get_uncertainty_of_radial_current(dfdg[ii]) # Calculate incidence angle as a simple average mean_inc = np.nanmean(inc, axis=0) merged.add_band(array=mean_inc, parameters={ 'name': 'incidence_angle', 'wkv': 'angle_of_incidence' }) # Calculate fdg as weighted average mean_fdg = nansumwrapper((fdg/np.square(std_fdg)).data, axis=0) / \ nansumwrapper((1./np.square(std_fdg)).data, axis=0) merged.add_band( array=mean_fdg, parameters={ 'name': 'fdg', 'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity' }) # Standard deviation of fdg std_mean_fdg = np.sqrt(1. / nansumwrapper( (1. / np.square(std_fdg)).data, axis=0)) merged.add_band(array=std_mean_fdg, parameters={'name': 'std_fdg'}) # Calculate ur as weighted average mean_ur = nansumwrapper((ur/np.square(std_ur)).data, axis=0) / \ nansumwrapper((1./np.square(std_ur)).data, axis=0) merged.add_band(array=mean_ur, parameters={ 'name': 'Ur', }) # Standard deviation of Ur std_mean_ur = np.sqrt(1. / nansumwrapper( (1. / np.square(std_ur)).data, axis=0)) merged.add_band(array=std_mean_ur, parameters={'name': 'std_ur'}) # Band of valid pixels vsd = np.nanmin(valid_sea_dop, axis=0) merged.add_band(array=vsd, parameters={ 'name': 'valid_sea_doppler', }) # Add file to db fn = os.path.join( product_path( self.module_name(), nansat_filename( ds.dataseturi_set.get(uri__endswith='.gsar').uri)), os.path.basename( nansat_filename( ds.dataseturi_set.get( uri__endswith='.gsar').uri)).split('.')[0] + '_merged.nc') merged.export(filename=fn) ncuri = 'file://localhost' + fn new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri, dataset=ds) connection.close() return merged
def process(self, ds, force=False, *args, **kwargs): """ Create data products Returns ======= ds : geospaas.catalog.models.Dataset processed : Boolean Flag to indicate if the dataset was processed or not """ swath_data = {} # Set media path (where images will be stored) mp = media_path( self.module_name(), nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri)) # Set product path (where netcdf products will be stored) ppath = product_path( self.module_name(), nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri)) # Read subswaths dss = {1: None, 2: None, 3: None, 4: None, 5: None} processed = [True, True, True, True, True] failing = [False, False, False, False, False] for i in range(self.N_SUBSWATHS): # Check if the data has already been processed try: fn = nansat_filename( ds.dataseturi_set.get(uri__endswith='%d.nc' % i).uri) except DatasetURI.DoesNotExist: processed[i] = False else: dd = Nansat(fn) try: std_Ur = dd['std_Ur'] except ValueError: processed[i] = False if processed[i] and not force: continue # Process from scratch to avoid duplication of bands fn = nansat_filename( ds.dataseturi_set.get(uri__endswith='.gsar').uri) try: dd = Doppler(fn, subswath=i) except Exception as e: logging.error('%s (Filename, subswath [1-5]): (%s, %d)' % (str(e), fn, i + 1)) failing[i] = True continue # Check if the file is corrupted try: inc = dd['incidence_angle'] except Exception as e: logging.error('%s (Filename, subswath [1-5]): (%s, %d)' % (str(e), fn, i + 1)) failing[i] = True continue dss[i + 1] = dd if all(processed) and not force: logging.info("%s: The dataset has already been processed." % nansat_filename( ds.dataseturi_set.get(uri__endswith='.gsar').uri)) return ds, False if all(failing): logging.error( "Processing of all subswaths is failing: %s" % nansat_filename( ds.dataseturi_set.get(uri__endswith='.gsar').uri)) return ds, False if any(failing): logging.error( "Some but not all subswaths processed: %s" % nansat_filename( ds.dataseturi_set.get(uri__endswith='.gsar').uri)) return ds, False logging.info( "Processing %s" % nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri)) # Loop subswaths, process each of them processed = False def get_overlap(d1, d2): b1 = d1.get_border_geometry() b2 = d2.get_border_geometry() intersection = b1.Intersection(b2) lo1, la1 = d1.get_geolocation_grids() overlap = np.zeros(lo1.shape) for i in range(lo1.shape[0]): for j in range(lo1.shape[1]): wkt_point = 'POINT(%.5f %.5f)' % (lo1[i, j], la1[i, j]) overlap[i, j] = intersection.Contains( ogr.CreateGeometryFromWkt(wkt_point)) return overlap for uri in ds.dataseturi_set.filter(uri__endswith='.nc'): logging.debug("%s" % nansat_filename(uri.uri)) # Find pixels in dss[1] which overlap with pixels in dss[2] overlap12 = get_overlap(dss[1], dss[2]) # Find pixels in dss[2] which overlap with pixels in dss[1] overlap21 = get_overlap(dss[2], dss[1]) # and so on.. overlap23 = get_overlap(dss[2], dss[3]) overlap32 = get_overlap(dss[3], dss[2]) overlap34 = get_overlap(dss[3], dss[4]) overlap43 = get_overlap(dss[4], dss[3]) overlap45 = get_overlap(dss[4], dss[5]) overlap54 = get_overlap(dss[5], dss[4]) # Get range bias corrected Doppler fdg = {} fdg[1] = dss[1].anomaly() - dss[1].range_bias() fdg[2] = dss[2].anomaly() - dss[2].range_bias() fdg[3] = dss[3].anomaly() - dss[3].range_bias() fdg[4] = dss[4].anomaly() - dss[4].range_bias() fdg[5] = dss[5].anomaly() - dss[5].range_bias() # Get median values at overlapping borders median12 = np.nanmedian(fdg[1][np.where(overlap12)]) median21 = np.nanmedian(fdg[2][np.where(overlap21)]) median23 = np.nanmedian(fdg[2][np.where(overlap23)]) median32 = np.nanmedian(fdg[3][np.where(overlap32)]) median34 = np.nanmedian(fdg[3][np.where(overlap34)]) median43 = np.nanmedian(fdg[4][np.where(overlap43)]) median45 = np.nanmedian(fdg[4][np.where(overlap45)]) median54 = np.nanmedian(fdg[5][np.where(overlap54)]) # Adjust levels to align at subswath borders fdg[1] -= median12 - np.nanmedian(np.array([median12, median21])) fdg[2] -= median21 - np.nanmedian(np.array([median12, median21])) fdg[1] -= median23 - np.nanmedian(np.array([median23, median32])) fdg[2] -= median23 - np.nanmedian(np.array([median23, median32])) fdg[3] -= median32 - np.nanmedian(np.array([median23, median32])) fdg[1] -= median34 - np.nanmedian(np.array([median34, median43])) fdg[2] -= median34 - np.nanmedian(np.array([median34, median43])) fdg[3] -= median34 - np.nanmedian(np.array([median34, median43])) fdg[4] -= median43 - np.nanmedian(np.array([median34, median43])) fdg[1] -= median45 - np.nanmedian(np.array([median45, median54])) fdg[2] -= median45 - np.nanmedian(np.array([median45, median54])) fdg[3] -= median45 - np.nanmedian(np.array([median45, median54])) fdg[4] -= median45 - np.nanmedian(np.array([median45, median54])) fdg[5] -= median54 - np.nanmedian(np.array([median45, median54])) # Correct by land or mean fww try: wind_fn = nansat_filename( Dataset.objects.get( source__platform__short_name='ERA15DAS', time_coverage_start__lte=ds.time_coverage_end, time_coverage_end__gte=ds.time_coverage_start). dataseturi_set.get().uri) except Exception as e: logging.error( "%s - in search for ERA15DAS data (%s, %s, %s) " % (str(e), nansat_filename( ds.dataseturi_set.get(uri__endswith=".gsar").uri), ds.time_coverage_start, ds.time_coverage_end)) return ds, False connection.close() land = np.array([]) fww = np.array([]) offset_corrected = 0 for key in dss.keys(): land = np.append( land, fdg[key][dss[key]['valid_land_doppler'] == 1].flatten()) if land.any(): logging.info('Using land for bias corrections') land_bias = np.nanmedian(land) offset_corrected = 1 else: logging.info('Using CDOP wind-waves Doppler for bias corrections') # correct by mean wind doppler for key in dss.keys(): ff = fdg[key].copy() # do CDOP correction ff[ dss[key]['valid_sea_doppler']==1 ] = \ ff[ dss[key]['valid_sea_doppler']==1 ] \ - dss[key].wind_waves_doppler(wind_fn)[0][ dss[key]['valid_sea_doppler']==1 ] ff[dss[key]['valid_doppler'] == 0] = np.nan fww = np.append(fww, ff.flatten()) land_bias = np.nanmedian(fww) if np.isnan(land_bias): offset_corrected = 0 raise Exception('land bias is NaN...') else: offset_corrected = 1 for key in dss.keys(): fdg[key] -= land_bias # Set unrealistically high/low values to NaN (ref issue #4 and #5) fdg[key][fdg[key] < -100] = np.nan fdg[key][fdg[key] > 100] = np.nan # Add fdg[key] as band dss[key].add_band( array=fdg[key], parameters={ 'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity', 'offset_corrected': str(offset_corrected) }) # Add Doppler anomaly dss[key].add_band( array=dss[key].anomaly(), parameters={ 'wkv': 'anomaly_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave' }) # Add wind doppler and its uncertainty as bands fww, dfww = dss[key].wind_waves_doppler(wind_fn) dss[key].add_band( array=fww, parameters={ 'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_wind_waves' }) dss[key].add_band(array=dfww, parameters={'name': 'std_fww'}) # Calculate range current velocity component v_current, std_v, offset_corrected = \ dss[key].surface_radial_doppler_sea_water_velocity(wind_fn, fdg=fdg[key]) dss[key].add_band(array=v_current, parameters={ 'wkv': 'surface_radial_doppler_sea_water_velocity', 'offset_corrected': str(offset_corrected) }) dss[key].add_band(array=std_v, parameters={'name': 'std_Ur'}) # Set satellite pass lon, lat = dss[key].get_geolocation_grids() gg = np.gradient(lat, axis=0) dss[key].add_band(array=gg, parameters={ 'name': 'sat_pass', 'comment': 'ascending pass is >0, descending pass is <0' }) history_message = ( 'sar_doppler.models.Dataset.objects.process("%s") ' '[geospaas sar_doppler version %s]' % (ds, os.getenv('GEOSPAAS_SAR_DOPPLER_VERSION', 'dev'))) new_uri, created = self.export2netcdf( dss[key], ds, history_message=history_message) processed = True m = self.create_merged_swaths(ds) return ds, processed
def calc_mean_doppler(datetime_start=timezone.datetime(2010,1,1, tzinfo=timezone.utc), datetime_end=timezone.datetime(2010,2,1, tzinfo=timezone.utc), domain=Domain(NSR().wkt, '-te 10 -44 40 -30 -tr 0.05 0.05')): geometry = WKTReader().read(domain.get_border_wkt(nPoints=1000)) ds = Dataset.objects.filter(entry_title__contains='Doppler', time_coverage_start__range=[datetime_start, datetime_end], geographic_location__geometry__intersects=geometry) Va = np.zeros(domain.shape()) Vd = np.zeros(domain.shape()) ca = np.zeros(domain.shape()) cd = np.zeros(domain.shape()) sa = np.zeros(domain.shape()) sd = np.zeros(domain.shape()) sum_var_inv_a = np.zeros(domain.shape()) sum_var_inv_d = np.zeros(domain.shape()) for dd in ds: uris = dd.dataseturi_set.filter(uri__endswith='nc') for uri in uris: dop = Doppler(uri.uri) # Consider skipping swath 1 and possibly 2... dop.reproject(domain) # TODO: HARDCODING - MUST BE IMPROVED satpass = dop.get_metadata(key='Originating file').split('/')[6] if satpass=='ascending': try: v_ai = dop['Ur'] v_ai[np.abs(v_ai)>3] = np.nan except: # subswath doesn't cover the given domain continue # uncertainty: # 5 Hz - TODO: estimate this correctly... sigma_ai = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) alpha_i = -dop['sensor_azimuth']*np.pi/180. Va = np.nansum(np.append(np.expand_dims(Va, 2), np.expand_dims(v_ai/np.square(sigma_ai), 2), axis=2), axis=2) ca = np.nansum(np.append(np.expand_dims(ca, 2), np.expand_dims(np.cos(alpha_i)/np.square(sigma_ai), 2), axis=2), axis=2) sa = np.nansum(np.append(np.expand_dims(sa, 2), np.expand_dims(np.sin(alpha_i)/np.square(sigma_ai), 2), axis=2), axis=2) sum_var_inv_a =np.nansum(np.append(np.expand_dims(sum_var_inv_a, 2), np.expand_dims(1./np.square(sigma_ai), 2), axis=2), axis=2) else: try: v_dj = -dop['Ur'] v_dj[np.abs(v_dj)>3] = np.nan except: # subswath doesn't cover the given domain continue # 5 Hz - TODO: estimate this correctly... sigma_dj = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) delta_j = (dop['sensor_azimuth']-180.)*np.pi/180. Vd = np.nansum(np.append(np.expand_dims(Vd, 2), np.expand_dims(v_dj/np.square(sigma_dj), 2), axis=2), axis=2) cd = np.nansum(np.append(np.expand_dims(cd, 2), np.expand_dims(np.cos(delta_j)/np.square(sigma_dj), 2), axis=2), axis=2) sd = np.nansum(np.append(np.expand_dims(sd, 2), np.expand_dims(np.sin(delta_j)/np.square(sigma_dj), 2), axis=2), axis=2) sum_var_inv_d = np.nansum(np.append( np.expand_dims(sum_var_inv_d, 2), np.expand_dims( 1./np.square(sigma_ai), 2), axis=2), axis=2) u = (Va*sd + Vd*sa)/(sa*cd + sd*ca) v = (Va*cd - Vd*ca)/(sa*cd + sd*ca) sigma_u = np.sqrt(np.square(sd)*sum_var_inv_a + np.square(sa)*sum_var_inv_d) / (sa*cd + sd*ca) sigma_v = np.sqrt(np.square(cd)*sum_var_inv_a + np.square(ca)*sum_var_inv_d) / (sa*cd + sd*ca) nu = Nansat(array=u, domain=domain) nmap=Nansatmap(nu, resolution='h') nmap.pcolormesh(nu[1], vmin=-1.5, vmax=1.5, cmap='bwr') nmap.add_colorbar() nmap.draw_continents() nmap.fig.savefig('/vagrant/shared/unwasc.png', bbox_inches='tight')
def update_geophysical_doppler(dopplerFile, t0, t1, swath, sensor='ASAR', platform='ENVISAT'): dop2correct = Doppler(dopplerFile) bandnum = dop2correct._get_band_number({ 'standard_name': 'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave' }) polarization = dop2correct.get_metadata(bandID=bandnum, key='polarization') lon,lat = dop2correct.get_geolocation_grids() indmidaz = lat.shape[0]/2 indmidra = lat.shape[1]/2 if lat[indmidaz,indmidra]>lat[0,indmidra]: use_pass = '******' else: use_pass = '******' # Get datasets DS = Dataset.objects.filter(source__platform__short_name=platform, source__instrument__short_name=sensor) dopDS = DS.filter( parameters__short_name = 'dca', time_coverage_start__gte = t0, time_coverage_start__lt = t1 ) swath_files = [] for dd in dopDS: try: fn = dd.dataseturi_set.get( uri__endswith='subswath%s.nc' %swath).uri except DatasetURI.DoesNotExist: continue n = Doppler(fn) try: dca = n.anomaly(pol=polarization) except OptionError: # wrong polarization.. continue lon,lat=n.get_geolocation_grids() indmidaz = lat.shape[0]/2 indmidra = lat.shape[1]/2 if lat[indmidaz,indmidra]>lat[0,indmidra]: orbit_pass = '******' else: orbit_pass = '******' if use_pass==orbit_pass: swath_files.append(fn) valid_land = np.array([]) valid = np.array([]) for ff in swath_files: n = Nansat(ff) view_bandnum = n._get_band_number({ 'standard_name': 'sensor_view_angle' }) std_bandnum = n._get_band_number({ 'standard_name': \ 'standard_deviation_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave', }) pol = n.get_metadata(bandID=std_bandnum, key='polarization') # For checking when antenna pattern changes if valid.shape==(0,): valid = n['valid_doppler'] dca0 = n['dca'] dca0[n['valid_doppler']==0] = np.nan dca0[n['valid_sea_doppler']==1] = dca0[n['valid_sea_doppler']==1] - \ n['fww'][n['valid_sea_doppler']==1] view_angle0 = n[view_bandnum] else: validn = n['valid_doppler'] dca0n = n['dca'] dca0n[n['valid_doppler']==0] = np.nan dca0n[n['valid_sea_doppler']==1] = dca0n[n['valid_sea_doppler']==1] - \ n['fww'][n['valid_sea_doppler']==1] view_angle0n = n[view_bandnum] if not validn.shape==valid.shape: if validn.shape[1] > valid.shape[1]: valid = np.resize(valid, (valid.shape[0], validn.shape[1])) dca0 = np.resize(dca0, (dca0.shape[0], dca0n.shape[1])) view_angle0 = np.resize(view_angle0, (view_angle0.shape[0], view_angle0n.shape[1])) else: validn = np.resize(validn, (validn.shape[0], valid.shape[1])) dca0n = np.resize(dca0n, (dca0n.shape[0], dca0.shape[1])) view_angle0n = np.resize(view_angle0n, (view_angle0n.shape[0], view_angle0.shape[1])) valid = np.concatenate((valid, validn)) dca0 = np.concatenate((dca0, dca0n)) view_angle0 = np.concatenate((view_angle0, view_angle0n)) if valid_land.shape==(0,): valid_land = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)] dca = n['dca'][n['valid_land_doppler'].any(axis=1)] view_angle = n[view_bandnum][n['valid_land_doppler'].any(axis=1)] std_dca = n[std_bandnum][n['valid_land_doppler'].any(axis=1)] else: vn = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)] dcan = n['dca'][n['valid_land_doppler'].any(axis=1)] view_angle_n = n[view_bandnum][n['valid_land_doppler'].any(axis=1)] std_dca_n = n[std_bandnum][n['valid_land_doppler'].any(axis=1)] if not vn.shape==valid_land.shape: # Resize arrays - just for visual inspection. Actual interpolation # is view angle vs doppler anomaly if vn.shape[1] > valid_land.shape[1]: valid_land = np.resize(valid_land, (valid_land.shape[0], vn.shape[1])) dca = np.resize(dca, (dca.shape[0], vn.shape[1])) view_angle = np.resize(view_angle, (view_angle.shape[0], vn.shape[1])) std_dca = np.resize(std_dca, (std_dca.shape[0], vn.shape[1])) if vn.shape[1] < valid_land.shape[1]: vn = np.resize(vn, (vn.shape[0], valid_land.shape[1])) dcan = np.resize(dcan, (dcan.shape[0], valid_land.shape[1])) view_angle_n = np.resize(view_angle_n, (view_angle_n.shape[0], valid_land.shape[1])) std_dca_n = np.resize(std_dca_n, (std_dca_n.shape[0], valid_land.shape[1])) valid_land = np.concatenate((valid_land, vn)) dca = np.concatenate((dca, dcan)) view_angle = np.concatenate((view_angle, view_angle_n)) std_dca = np.concatenate((std_dca, std_dca_n)) view_angle0 = view_angle0.flatten() dca0 = dca0.flatten() view_angle0 = np.delete(view_angle0, np.where(np.isnan(dca0))) dca0 = np.delete(dca0, np.where(np.isnan(dca0))) ind = np.argsort(view_angle0) view_angle0 = view_angle0[ind] dca0 = dca0[ind] # Set dca, view_angle and std_dca to nan where not land dca[valid_land==0] = np.nan std_dca[valid_land==0] = np.nan view_angle[valid_land==0] = np.nan dca = dca.flatten() std_dca = std_dca.flatten() view_angle = view_angle.flatten() dca = np.delete(dca, np.where(np.isnan(dca))) std_dca = np.delete(std_dca, np.where(np.isnan(std_dca))) view_angle = np.delete(view_angle, np.where(np.isnan(view_angle))) ind = np.argsort(view_angle) view_angle = view_angle[ind] dca = dca[ind] std_dca = std_dca[ind] freqLims = [-200,200] # Show this in presentation: plt.subplot(2,1,1) count, anglebins, dcabins, im = plt.hist2d(view_angle0, dca0, 100, cmin=1, range=[[np.min(view_angle), np.max(view_angle)], freqLims]) plt.colorbar() plt.title('Wind Doppler subtracted') plt.subplot(2,1,2) count, anglebins, dcabins, im = plt.hist2d(view_angle, dca, 100, cmin=1, range=[[np.min(view_angle), np.max(view_angle)], freqLims]) plt.colorbar() plt.title('Doppler over land') #plt.show() plt.close() countLims = 200 #{ # 0: 600, # 1: 250, # 2: 500, # 3: 140, # 4: 130, #} dcabins_grid, anglebins_grid = np.meshgrid(dcabins[:-1], anglebins[:-1]) anglebins_vec = anglebins_grid[count>countLims] dcabins_vec = dcabins_grid[count>countLims] #anglebins_vec = anglebins_grid[count>countLims[swath]] #dcabins_vec = dcabins_grid[count>countLims[swath]] va4interp = [] rb4interp = [] std_rb4interp = [] for i in range(len(anglebins)-1): if i==0: ind0 = 0 else: ind0 = np.where(view_angle>anglebins[i])[0][0] ind1 = np.where(view_angle<=anglebins[i+1])[0][-1] va4interp.append(np.mean(view_angle[ind0:ind1])) rb4interp.append(np.median(dca[ind0:ind1])) std_rb4interp.append(np.std(dca[ind0:ind1])) va4interp = np.array(va4interp) rb4interp = np.array(rb4interp) std_rb4interp = np.array(std_rb4interp) van = dop2correct['sensor_view'] rbfull = van.copy() rbfull[:,:] = np.nan # Is there a more efficient method than looping? import time start_time = time.time() for ii in range(len(anglebins)-1): vaii0 = anglebins[ii] vaii1 = anglebins[ii+1] rbfull[(van>=vaii0) & (van<=vaii1)] = \ np.median(dca[(view_angle>=vaii0) & (view_angle<=vaii1)]) #print("--- %s seconds ---" % (time.time() - start_time)) plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), '.') #plt.plot(anglebins_vec, dcabins_vec, '.') #plt.show() plt.close() #guess = [.1,.1,.1,.1,.1,.1] #[a,b,c,d,e,f], params_cov = optimize.curve_fit(rb_model_func, # va4interp, rb4interp, guess) # #anglebins_vec, dcabins_vec, guess) #n = Doppler(swath_files[0]) #van = np.mean(dop2correct['sensor_view'], axis=0) #plt.plot(van, rb_model_func(van,a,b,c,d,e,f), 'r--') #plt.plot(anglebins_vec, dcabins_vec, '.') #plt.show() #ww = 1./std_rb4interp #ww[np.isinf(ww)] = 0 #rbinterp = UnivariateSpline( # va4interp, # rb4interp, # w = ww, # k = 5 # ) #van = dop2correct['sensor_view'] #y = rbinterp(van.flatten()) #rbfull = y.reshape(van.shape) #plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), 'r--') #plt.plot(anglebins_vec, dcabins_vec, '.') #plt.show() band_name = 'fdg_corrected' fdg = dop2correct.anomaly() - rbfull #plt.imshow(fdg, vmin=-60, vmax=60) #plt.colorbar() #plt.show() dop2correct.add_band(array=fdg, parameters={ 'wkv':'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity', 'name': band_name } ) current = -(np.pi*(fdg - dop2correct['fww']) / 112 / np.sin(dop2correct['incidence_angle']*np.pi/180)) dop2correct.add_band(array=current, parameters={'name': 'current', 'units': 'm/s', 'minmax': '-2 2'} ) land = np.array([]) # add land data for accuracy calculation if land.shape==(0,): land = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)] land_fdg = fdg[dop2correct['valid_land_doppler'].any(axis=1)] else: landn = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)] land_fdgn = fdg[dop2correct['valid_land_doppler'].any(axis=1)] if not landn.shape==land.shape: if landn.shape[1] > land.shape[1]: land = np.resize(land, (land.shape[0], landn.shape[1])) land_fdg = np.resize(land_fdg, (land_fdg.shape[0], land_fdgn.shape[1])) if landn.shape[1] < land.shape[1]: landn = np.resize(landn, (landn.shape[0], land.shape[1])) land_fdgn = np.resize(land_fdgn, (land_fdgn.shape[0], land.shape[1])) land = np.concatenate((land, landn)) land_fdg = np.concatenate((land_fdg, land_fdgn)) module = 'sar_doppler' DS = Dataset.objects.get(dataseturi__uri__contains=dop2correct.fileName) #fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/gsar_rvl/' \ # + dop2correct.fileName.split('/')[-2]+'.gsar' mp = media_path(module, nansat_filename( DS.dataseturi_set.get( uri__endswith='gsar').uri)) ppath = product_path(module, nansat_filename( DS.dataseturi_set.get( uri__endswith='gsar').uri)) # See managers.py -- this must be generalized! pngfilename = '%s_subswath_%d.png'%(band_name, swath) ncfilename = '%s_subswath_%d.nc'%(band_name, swath) # Export to new netcdf with fdg as the only band expFile = os.path.join(ppath, ncfilename) print 'Exporting file: %s\n\n' %expFile dop2correct.export(expFile, bands=[dop2correct._get_band_number(band_name)]) ncuri = os.path.join('file://localhost', expFile) new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri, dataset=DS) # Reproject to leaflet projection xlon, xlat = dop2correct.get_corners() dom = Domain(NSR(3857), '-lle %f %f %f %f -tr 1000 1000' % ( xlon.min(), xlat.min(), xlon.max(), xlat.max())) dop2correct.reproject(dom, eResampleAlg=1, tps=True) # Update figure dop2correct.write_figure(os.path.join(mp, pngfilename), clim = [-60,60], bands=band_name, mask_array=dop2correct['swathmask'], mask_lut={0:[128,128,128]}, transparency=[128,128,128]) print("--- %s seconds ---" % (time.time() - start_time)) land_fdg[land==0] = np.nan print('Standard deviation over land: %.2f' %np.nanstd(land_fdg))