def test_open_ecmwf_y_wind_and_x_wind_at_given_time(self):
     n = Nansat(self.test_file_ecmwf, bands=['y_wind', 'x_wind'],
             netcdf_dim={'time': '1488409200'}, mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertEqual(2, len(n.bands()))
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #2
0
def distance2coast(dst_domain, distance_src=None):
    """ Estimate distance to the nearest coast (in km) for each pixcel in the
    domain of interest. The method utilizes NASA's OBPG group Distance to the Nearest Coast
    product: https://oceancolor.gsfc.nasa.gov/docs/distfromcoast/. The product is stored in GeoTiff
    format with pixcelsize of 0.01x0.01 degree.

    Parameters
    -----------
    dst_domain : Domain
        destination domain
    distance_src : str
        path to the NASA Distance to the Nearest coast GeoTIFF product

    Returns
    --------
    distance : Nansat object with distance to the coast mask in current projection

    See Also
    ---------
    `<https://oceancolor.gsfc.nasa.gov/docs/distfromcoast/>`_
    `<http://nansat.readthedocs.io/en/latest/source/features.html#differentiating-between-land-and-water>`

    """
    # Get path to the auxilary dataset predefined in enviromental variable
    if distance_src is None:
        distance_src = os.getenv('DIST2COAST')
    # If path to the distance data source was not specified or directly provided raise an error
    if distance_src is None or not os.path.exists(distance_src):
        raise IOError('Distance to the nearest coast product does not exist - see Nansat '
                      'documentation to get it (the path is % s)' % distance_src)
    distance = Nansat(distance_src)
    # Reproject the source file on the domain of interest
    distance.reproject(dst_domain, addmask=False)
    return distance
Example #3
0
 def incidence_angle(self, rsfile):
     n = Nansat(rsfile)
     inc_min = float(n.get_metadata()['NEAR_RANGE_INCIDENCE_ANGLE'])-0.5
     inc_max = float(n.get_metadata()['FAR_RANGE_INCIDENCE_ANGLE'])+0.5
     inc = n['incidence_angle']
     assert np.all(np.greater_equal(inc[np.isnan(inc)==False], inc_min))
     assert np.all(np.less_equal(inc[np.isnan(inc)==False], inc_max))
Example #4
0
    def _get_masked_windspeed(self, landmask=True, icemask=True, windspeedBand="windspeed"):
        try:
            sar_windspeed = self[windspeedBand]
        except:
            raise ValueError("SAR wind has not been calculated, " "execute calculate_wind(wind_direction) first.")

        sar_windspeed[sar_windspeed < 0] = 0
        palette = jet

        if landmask:
            try:  # Land mask
                sar_windspeed = np.ma.masked_where(self.watermask()[1] == 2, sar_windspeed)
                palette.set_bad([0.3, 0.3, 0.3], 1.0)  # Land is masked (bad)
            except:
                print "Land mask not available"

        if icemask:
            try:  # Ice mask
                try:  # first try local file
                    ice = Nansat(
                        "metno_local_hires_seaice_" + self.SAR_image_time.strftime("%Y%m%d"),
                        mapperName="metno_local_hires_seaice",
                    )
                except:  # otherwise Thredds
                    ice = Nansat("metno_hires_seaice:" + self.SAR_image_time.strftime("%Y%m%d"))
                ice.reproject(self)
                iceBandNo = ice._get_band_number({"standard_name": "sea_ice_area_fraction"})
                sar_windspeed[ice[iceBandNo] > 0] = -1
                palette.set_under("w", 1.0)  # Ice is 'under' (-1)
            except:
                print "Ice mask not available"

        return sar_windspeed, palette
 def test_arome_mapper_is_used(self):
     n = Nansat(self.test_file_arome_arctic)
     self.assertEqual(n.mapper, 'arome')
     n = Nansat(self.test_file_arome_metcoop)
     self.assertEqual(n.mapper, 'arome')
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #6
0
 def incidence_angle(self, rsfile):
     sys.stderr.write("\nincidence_angle:" + rsfile + "\n")
     n = Nansat(rsfile)
     inc_min = float(n.get_metadata()["NEAR_RANGE_INCIDENCE_ANGLE"]) - 0.5
     inc_max = float(n.get_metadata()["FAR_RANGE_INCIDENCE_ANGLE"]) + 0.5
     inc = n["incidence_angle"]
     assert np.all(np.greater_equal(inc[np.isnan(inc) == False], inc_min))
     assert np.all(np.less_equal(inc[np.isnan(inc) == False], inc_max))
 def test_open_arome_arctic_y_wind_and_x_wind_at_given_datetime(self):
     n = Nansat(self.test_file_arome_arctic, bands=['y_wind', 'x_wind'],
         netcdf_dim={'time':
             np.datetime64(datetime.datetime(2017,2,28,15,30,0))},
             mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertEqual(2, len(n.bands()))
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #8
0
    def write_geotiff(self, filename, landmask=True, icemask=True):

        sar_windspeed, palette = self._get_masked_windspeed(landmask, icemask)

        nansat_geotiff = Nansat(
            array=sar_windspeed, domain=self, parameters={"name": "masked_windspeed", "minmax": "0 20"}
        )

        nansat_geotiff.write_geotiffimage(filename)
Example #9
0
    def write_geotiff(self, filename, landmask=True, icemask=True):

        sar_windspeed, palette = self._get_masked_windspeed(landmask, icemask)

        nansat_geotiff = Nansat(array=sar_windspeed, domain=self,
                                parameters = {'name': 'masked_windspeed',
                                              'minmax': '0 20'})

        nansat_geotiff.write_geotiffimage(filename)
Example #10
0
 def export2thredds(self, rsfile):
     ncfile = 'test.nc'
     orig = Nansat(rsfile)
     orig.export2thredds(ncfile, bands = {'incidence_angle': {}})
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     np.testing.assert_allclose(inc0, inc1)
     os.unlink(ncfile)
 def test_open_ecmwf_y_wind_and_x_wind_at_given_time(self):
     n = Nansat(self.test_file_ecmwf,
                bands=['y_wind', 'x_wind'],
                netcdf_dim={'time': '1488409200'},
                mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertEqual(2, len(n.bands()))
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #12
0
    def write_geotiff(self, filename, landmask=True, icemask=True):

        sar_windspeed, palette = self._get_masked_windspeed(landmask, icemask)

        nansat_geotiff = Nansat(array=sar_windspeed, domain=self,
                                parameters = {'name': 'masked_windspeed',
                                              'minmax': '0 20'})

        nansat_geotiff.write_geotiffimage(filename)
 def test_reproject_ecmwf_to_SAR(self):
     sar = Nansat(self.s1aEW)
     wind = Nansat(self.test_file_ecmwf, netcdf_dim={'time':
         np.datetime64(sar.time_coverage_start)},
         bands=['y_wind','x_wind'])
     self.assertTrue(wind['x_wind_10m'].any())
     self.assertTrue(wind['y_wind_10m'].any())
     wind.reproject(sar, addmask=False)
     self.assertTrue(wind['x_wind_10m'].any())
     self.assertTrue(wind['y_wind_10m'].any())
Example #14
0
 def export_band(self, rsfile):
     sys.stderr.write('\nexport_band:'+rsfile+'\n')
     orig = Nansat(rsfile)
     ncfile = 'test.nc'
     orig.export(ncfile, bands=[orig.get_band_number('incidence_angle')])
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     np.testing.assert_allclose(inc0, inc1)
     os.unlink(ncfile)
Example #15
0
 def export_band(self, rsfile):
     sys.stderr.write("\nexport_band:" + rsfile + "\n")
     orig = Nansat(rsfile)
     ncfile = "test.nc"
     orig.export(ncfile, bands=[orig._get_band_number("incidence_angle")])
     copy = Nansat(ncfile)
     inc0 = orig["incidence_angle"]
     inc1 = copy["incidence_angle"]
     np.testing.assert_allclose(inc0, inc1)
     os.unlink(ncfile)
Example #16
0
 def export2thredds(self, rsfile):
     sys.stderr.write("\nexport2thredds:" + rsfile)
     ncfile = "test.nc"
     orig = Nansat(rsfile)
     orig.export2thredds(ncfile, bands={"incidence_angle": {}})
     copy = Nansat(ncfile)
     inc0 = orig["incidence_angle"]
     inc1 = copy["incidence_angle"]
     np.testing.assert_allclose(inc0, inc1)
     os.unlink(ncfile)
 def test_reproject_ecmwf_to_SAR(self):
     sar = Nansat(self.s1aEW)
     wind = Nansat(
         self.test_file_ecmwf,
         netcdf_dim={'time': np.datetime64(sar.time_coverage_start)},
         bands=['y_wind', 'x_wind'])
     self.assertTrue(wind['x_wind_10m'].any())
     self.assertTrue(wind['y_wind_10m'].any())
     wind.reproject(sar, addmask=False)
     self.assertTrue(wind['x_wind_10m'].any())
     self.assertTrue(wind['y_wind_10m'].any())
Example #18
0
 def test_sarwind_using_asar_nansat_ncep_nansat(self):
     if len(self.test_data.asar) == 0:
         raise IOError('No ASAR data - try adding some as ' \
                 'described in templates/openwind_local_archive.py' )
     for i in self.test_data.asar:
         asar = Nansat(self.test_data.asar[i])
         mw = Nansat(self.test_data.ncep4asar[i])
         w = SARWind(asar, wind_direction=mw)
     if sys.version_info < (2, 7):
         type(w) == SARWind
     else:
         self.assertIsInstance(w, SARWind)
 def test_open_arome_arctic_y_wind_and_x_wind_at_given_datetime(self):
     n = Nansat(self.test_file_arome_arctic,
                bands=['y_wind', 'x_wind'],
                netcdf_dim={
                    'time':
                    np.datetime64(datetime.datetime(2017, 2, 28, 15, 30, 0))
                },
                mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertEqual(2, len(n.bands()))
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
 def test_issue_193(self):
     fn = [
         '/vagrant/shared/test_data/cmems/GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS-x10-X30-y55-Y73-201705181200-201705271200.nc',
         '/vagrant/shared/test_data/cmems/ARC-METNO-ARC-TOPAZ4_2_PHYS-FOR-TDS-x10-X30-y55-Y73-20170518-20170526.nc',
         '/vagrant/shared/test_data/cmems/GLOBAL_ANALYSIS_FORECAST_BIO_001_014-TDS-x-180-X179.5-y-89-Y90-20170520-20170527.nc',
     ]
     for f in fn:
         n = Nansat(f)
         self.assertTrue(n.get_metadata().has_key('time_coverage_start'))
         self.assertTrue(n.get_metadata().has_key('time_coverage_end'))
         self.assertTrue(n.get_metadata().has_key('instrument'))
         self.assertTrue(n.get_metadata().has_key('platform'))
         self.assertEqual(n.mapper, 'cmems')
Example #21
0
    def _get_aux_wind_from_str(self, aux_wind_source):

        try:
            # If a complete filename of wind direction source is given
            aux_wind = Nansat(aux_wind_source)
        except:
            # If only mapper name is given, we add the SAR image
            # timestamp as string. Mappers with this functionality
            # implemented will then find a matching file
            aux_wind = Nansat(aux_wind_source +
                                datetime.strftime(
                                self.SAR_image_time, ':%Y%m%d%H%M'))
        return aux_wind
Example #22
0
 def export(self, rsfile):
     sys.stderr.write('\nexport:'+rsfile+'\n')
     ncfile = 'test.nc'
     orig = Nansat(rsfile)
     sys.stderr.write('\nExporting\n')
     orig.export(ncfile)
     sys.stderr.write('\nOpening Copy\n')
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     sys.stderr.write('\nGet orig grids\n')
     lon0, lat0 = orig.get_geolocation_grids()
     sys.stderr.write('\nGet copy grids\n')
     lon1, lat1 = copy.get_geolocation_grids()
     sys.stderr.write('\nGet orig sigma0_HH\n')
     sigma0_0 = orig['sigma0_HH']
     sys.stderr.write('\nGet copy sigma0_HH\n')
     sigma0_1 = copy['sigma0_HH']
     sys.stderr.write('\nAsserting\n')
     np.testing.assert_allclose(lon0, lon1)
     np.testing.assert_allclose(lat0, lat1)
     # If the next tests fail, it could indicate that the data is flipped
     # check by pyplot.imshow orig vs copy...
     np.testing.assert_allclose(inc0, inc1)
     np.testing.assert_allclose(sigma0_0, sigma0_1)
     os.unlink(ncfile)
 def test_open_arome_metcoop_at_given_time(self):
     n = Nansat(self.test_file_arome_metcoop,
                netcdf_dim={'time': '1488153600'},
                mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #24
0
    def _get_aux_wind_from_str(self, aux_wind_source, *args, **kwargs):
        try:
            wdir = int(aux_wind_source)
            wdir, wdir_time, wspeed = self._get_aux_wind_from_int(wdir)
        except ValueError:
            import nansat.nansat
            mnames = [
                key.replace('mapper_', '')
                for key in nansat.nansat.nansatMappers
            ]
            # check if aux_wind_source is like 'ncep_wind_online', i.e. only
            # mapper name is given. By adding the SAR image time stamp, we
            # can then get the data online
            if aux_wind_source in mnames:
                aux_wind_source = aux_wind_source + \
                        datetime.strftime(self.SAR_image_time, ':%Y%m%d%H%M')
            aux = Nansat(
                aux_wind_source,
                netcdf_dim={
                    'time': np.datetime64(self.SAR_image_time),
                    'height2': '10',  # height dimension used in AROME arctic
                    # datasets
                    'height3': '10',
                },
                bands=[  # CF standard names of desired bands
                    'x_wind',
                    'y_wind',  # or..:
                    'eastward_wind',
                    'northward_wind',
                ])
            # Set filename of source wind in metadata
            try:
                wind_u_bandNo = aux._get_band_number({
                    'standard_name':
                    'eastward_wind',
                })
            except OptionError:
                wind_u_bandNo = aux._get_band_number({
                    'standard_name': 'x_wind',
                })
            self.set_metadata(
                'WIND_DIRECTION_SOURCE',
                aux.get_metadata(bandID=wind_u_bandNo)['SourceFilename'])
            wdir, wdir_time, wspeed = self._get_wind_direction_array(
                aux, *args, **kwargs)

        return wdir, wdir_time, wspeed
Example #25
0
 def test_sarwind_using_asar_filename_ncep_nansat(self):
     if len(self.test_data.asar) == 0:
         raise IOError('No ASAR data - try adding some as ' \
                 'described in templates/openwind_local_archive.py' )
     for key in self.test_data.asar:
         mw = Nansat(self.test_data.ncep4asar[key])
         w = SARWind(self.test_data.asar[key], wind_direction=mw)
         self.assertIsInstance(w, SARWind)
Example #26
0
 def test_nansat_reproject(self):
     if len(self.test_data.asar) == 0:
         raise IOError('No ASAR data - try adding some as ' \
                 'described in templates/openwind_local_archive.py' )
     asar = Nansat(self.test_data.asar['agulhas'])
     asar.resize(pixelsize=500, eResampleAlg=1)
     mw = Nansat(self.test_data.ncep4asar['agulhas'])
     mw.reproject(asar, eResampleAlg=1)
     if sys.version_info < (2, 7):
         type(mw[1]) == np.ndarray
     else:
         self.assertIsInstance(mw[1], np.ndarray)
Example #27
0
    def get_merged_swaths(self, ds, **kwargs):
        """Get merged swaths
        """
        try:
            uri = ds.dataseturi_set.get(uri__contains='merged')
        except DatasetURI.DoesNotExist:
            n = Nansat(
                nansat_filename(
                    ds.dataseturi_set.get(uri__contains='subswath1').uri))
            if not n.has_band('Ur'):
                # Process dataset
                ds, processed = self.process(ds, **kwargs)
            else:
                m = self.create_merged_swaths(ds)
            uri = ds.dataseturi_set.get(uri__contains='merged')
        connection.close()
        m = Nansat(nansat_filename(uri.uri))

        return m
Example #28
0
def add_polarization(apps, schema_editor):
    ds_model = apps.get_model('sar_doppler', 'dataset')
    extra_model = apps.get_model('sar_doppler', 'sardopplerextrametadata')
    for ds in ds_model.objects.filter(dataseturi__uri__endswith='.gsar'):
        if ds.sardopplerextrametadata_set.all():
            # This should only happen if the migration is interrupted
            # No point in adding polarization if it was already added...
            continue
        fn = nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri)
        if not os.path.isfile(fn):
            # a missing file will break the migration
            # remove the dataset in case the file doesn't exist
            ds.delete()
            continue
        n = Nansat(fn)
        # Store the polarization and associate the dataset
        extra = extra_model(dataset=ds,
                            polarization=n.get_metadata('polarization'))
        extra.save()
        ds.sardopplerextrametadata_set.add(extra)
Example #29
0
    def _get_aux_wind_from_str(self, aux_wind_source, *args, **kwargs):
        try:
            wdir = int(aux_wind_source)
            wdir, wdir_time, wspeed = self._get_aux_wind_from_int(wdir)
        except ValueError:
            import nansat.nansat
            mnames = [key.replace('mapper_','') for key in
                    nansat.nansat.nansatMappers]
            # check if aux_wind_source is like 'ncep_wind_online', i.e. only
            # mapper name is given. By adding the SAR image time stamp, we
            # can then get the data online
            if aux_wind_source in mnames:
                aux_wind_source = aux_wind_source + \
                        datetime.strftime(self.SAR_image_time, ':%Y%m%d%H%M')
            aux = Nansat(aux_wind_source, netcdf_dim={
                    'time': np.datetime64(self.SAR_image_time),
                    'height2': '10', # height dimension used in AROME arctic
                                     # datasets
                    'height3': '10',
                },
                bands = [ # CF standard names of desired bands
                    'x_wind',
                    'y_wind', # or..:
                    'eastward_wind',
                    'northward_wind',
                ])
            # Set filename of source wind in metadata
            try:
                wind_u_bandNo = aux._get_band_number({
                            'standard_name': 'eastward_wind',
                        })
            except OptionError:
                wind_u_bandNo = aux._get_band_number({
                            'standard_name': 'x_wind',
                        })
            self.set_metadata('WIND_DIRECTION_SOURCE',
                   aux.get_metadata(bandID=wind_u_bandNo)['SourceFilename'])
            wdir, wdir_time, wspeed = self._get_wind_direction_array(aux,
                                        *args, **kwargs)

        return wdir, wdir_time, wspeed
Example #30
0
 def export(self, rsfile):
     sys.stderr.write('\nexport:'+rsfile+'\n')
     ncfile = 'test.nc'
     orig = Nansat(rsfile)
     sys.stderr.write('\nExporting\n')
     orig.export(ncfile)
     sys.stderr.write('\nOpening Copy\n')
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     sys.stderr.write('\nGet orig grids\n')
     lon0, lat0 = orig.get_geolocation_grids()
     sys.stderr.write('\nGet copy grids\n')
     lon1, lat1 = copy.get_geolocation_grids()
     sys.stderr.write('\nGet orig sigma0_HH\n')
     sigma0_0 = orig['sigma0_HH']
     sys.stderr.write('\nGet copy sigma0_HH\n')
     sigma0_1 = copy['sigma0_HH']
     sys.stderr.write('\nAsserting\n')
     np.testing.assert_allclose(lon0, lon1)
     np.testing.assert_allclose(lat0, lat1)
     # If the next tests fail, it could indicate that the data is flipped
     # check by pyplot.imshow orig vs copy...
     np.testing.assert_allclose(inc0, inc1)
     np.testing.assert_allclose(sigma0_0, sigma0_1)
     os.unlink(ncfile)
Example #31
0
    def _get_masked_windspeed(self, landmask=True, icemask=True,
            windspeedBand='windspeed'):
        try:
            sar_windspeed = self[windspeedBand]
        except:
            raise ValueError('SAR wind has not been calculated, ' \
                'execute calculate_wind(wind_direction) first.')

        sar_windspeed[sar_windspeed<0] = 0
        palette = jet

        if landmask:
            try: # Land mask
                sar_windspeed = np.ma.masked_where(
                                    self.watermask()[1]==2, sar_windspeed)
                palette.set_bad([.3, .3, .3], 1.0) # Land is masked (bad)
            except:
                print 'Land mask not available'

        if icemask:
            try: # Ice mask
                try: # first try local file
                    ice = Nansat('metno_local_hires_seaice_' +
                            self.SAR_image_time.strftime('%Y%m%d'),
                            mapperName='metno_local_hires_seaice')
                except: # otherwise Thredds
                    ice = Nansat('metno_hires_seaice:' +
                            self.SAR_image_time.strftime('%Y%m%d'))
                ice.reproject(self)
                iceBandNo = ice._get_band_number(
                    {'standard_name': 'sea_ice_area_fraction'})
                sar_windspeed[ice[iceBandNo]>0] = -1
                palette.set_under('w', 1.0) # Ice is 'under' (-1)
            except:
                print 'Ice mask not available'

        return sar_windspeed, palette
def mean_gc_geostrophic(datetime_start=timezone.datetime(2010,1,1,
    tzinfo=timezone.utc), datetime_end=timezone.datetime(2010,2,1,
    tzinfo=timezone.utc), domain=Domain(NSR().wkt, 
        '-te 10 -44 40 -30 -tr 0.05 0.05')):
    #gc_datasets = Dataset.objects.filter(entry_title__contains='globcurrent',
    #                time_coverage_start__range=[datetime_start,
    #                datetime_end])
    shapeD = domain.shape()
    U = np.zeros((shapeD[0], shapeD[1], 1))
    fn = 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CURGEO_0M-ALT_OI_025-V02.0_FULL_TIME_SERIE'
    dt = datetime_start
    while dt <= datetime_end:
        expFn = '/vagrant/shared/test_data/globcurrent/eastward_geostrophic_current_velocity_%d-%02d-%02d.nc'%(dt.year, dt.month, dt.day)
        #n = Nansat(
        #    fn, date='%d-%02d-%02d'%(dt.year, dt.month, dt.day),
        #    bands=['eastward_geostrophic_current_velocity'])
        #n.export(expFn)
        n = Nansat(expFn, mapper='generic')
        n.reproject(domain, addmask=False)
        u = n['eastward_geostrophic_current_velocity']
        # OK:
        #plt.imshow(u)
        #plt.colorbar()
        #plt.show()
        if np.sum(np.isnan(u))==u.size:
            continue
        else:
            U = np.append(U, np.expand_dims(u, axis=2), axis=2)
        dt = dt + timezone.timedelta(days=1)
    meanU = np.nanmean(U, axis=2)
    nu = Nansat(array=meanU, domain=domain)
    nmap=Nansatmap(nu, resolution='h')
    nmap.pcolormesh(nu[1], vmin=-1.5, vmax=1.5, cmap='jet') #bwr
    nmap.add_colorbar()
    nmap.draw_continents()
    nmap.fig.savefig('/vagrant/shared/u_gc.png', bbox_inches='tight')
Example #33
0
 def export_band(self, rsfile):
     sys.stderr.write('\nexport_band:'+rsfile+'\n')
     orig = Nansat(rsfile)
     ncfile = 'test.nc'
     orig.export(ncfile, bands=[orig._get_band_number('incidence_angle')])
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     np.testing.assert_allclose(inc0, inc1)
     os.unlink(ncfile)
Example #34
0
 def export(self, rsfile):
     ncfile = 'test.nc'
     orig = Nansat(rsfile)
     orig.export(ncfile)
     copy = Nansat(ncfile)
     inc0 = orig['incidence_angle']
     inc1 = copy['incidence_angle']
     lon0, lat0 = orig.get_geolocation_grids()
     lon1, lat1 = copy.get_geolocation_grids()
     sigma0_0 = orig['sigma0_HH']
     sigma0_1 = copy['sigma0_HH']
     np.testing.assert_allclose(lon0, lon1)
     np.testing.assert_allclose(lat0, lat1)
     # If the next tests fail, it could indicate that the data is flipped
     # check by pyplot.imshow orig vs copy...
     np.testing.assert_allclose(inc0, inc1)
     np.testing.assert_allclose(sigma0_0, sigma0_1)
     os.unlink(ncfile)
Example #35
0
 def test_nansat_reproject(self):
     if len(self.test_data.asar)==0:
         raise IOError('No ASAR data - try adding some as ' \
                 'described in templates/openwind_local_archive.py' )
     asar = Nansat(self.test_data.asar[0])
     asar.resize(pixelsize=500, eResampleAlg=1)
     mw = Nansat(self.test_data.ncep4asar[0])
     mw.reproject(asar, eResampleAlg=1)
     if sys.version_info < (2, 7):
         type(mw[1]) == np.ndarray
     else:
         self.assertIsInstance(mw[1], np.ndarray)
 def test_issue_193(self):
     fn = [
         '/vagrant/shared/test_data/cmems/GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS-x10-X30-y55-Y73-201705181200-201705271200.nc',
         '/vagrant/shared/test_data/cmems/ARC-METNO-ARC-TOPAZ4_2_PHYS-FOR-TDS-x10-X30-y55-Y73-20170518-20170526.nc',
         '/vagrant/shared/test_data/cmems/GLOBAL_ANALYSIS_FORECAST_BIO_001_014-TDS-x-180-X179.5-y-89-Y90-20170520-20170527.nc',
     ]
     for f in fn:
         n = Nansat(f)
         self.assertTrue(n.get_metadata().has_key('time_coverage_start'))
         self.assertTrue(n.get_metadata().has_key('time_coverage_end'))
         self.assertTrue(n.get_metadata().has_key('instrument'))
         self.assertTrue(n.get_metadata().has_key('platform'))
         self.assertEqual(n.mapper, 'cmems')
Example #37
0
    def _get_masked_windspeed(self,
                              landmask=True,
                              icemask=True,
                              windspeedBand='windspeed'):
        try:
            sar_windspeed = self[windspeedBand]
        except:
            raise ValueError('SAR wind has not been calculated, ' \
                'execute calculate_wind(wind_direction) first.')

        sar_windspeed[sar_windspeed < 0] = 0
        palette = jet

        if landmask:
            try:  # Land mask
                sar_windspeed = np.ma.masked_where(
                    self.watermask(tps=True)[1] == 2, sar_windspeed)
                palette.set_bad([.3, .3, .3], 1.0)  # Land is masked (bad)
            except:
                print 'Land mask not available'

        if icemask:
            try:  # Ice mask
                try:  # first try local file
                    ice = Nansat('metno_local_hires_seaice_' +
                                 self.SAR_image_time.strftime('%Y%m%d'),
                                 mapperName='metno_local_hires_seaice')
                except:  # otherwise Thredds
                    ice = Nansat('metno_hires_seaice:' +
                                 self.SAR_image_time.strftime('%Y%m%d'))
                ice.reproject(self, tps=True)
                iceBandNo = ice._get_band_number(
                    {'standard_name': 'sea_ice_area_fraction'})
                sar_windspeed[ice[iceBandNo] > 0] = -1
                palette.set_under('w', 1.0)  # Ice is 'under' (-1)
            except:
                print 'Ice mask not available'

        return sar_windspeed, palette
Example #38
0
 def resize(self, rsfile):
     sys.stderr.write("\nresize:" + rsfile + "\n")
     n = Nansat(rsfile)
     inc_max = float(n.get_metadata()["FAR_RANGE_INCIDENCE_ANGLE"]) + 0.5
     n.resize(0.5, eResampleAlg=0)
     assert np.nanmax(n["incidence_angle"]) <= inc_max
     n.undo()
     n.resize(0.5, eResampleAlg=1)
     assert np.nanmax(n["incidence_angle"]) <= inc_max
     n.undo()
     n.resize(0.5, eResampleAlg=2)
     assert np.nanmax(n["incidence_angle"]) <= inc_max
     n.undo()
     n.resize(0.5, eResampleAlg=3)
     assert np.nanmax(n["incidence_angle"]) <= inc_max
     n.undo()
     n.resize(0.5, eResampleAlg=4)
     assert np.nanmax(n["incidence_angle"]) <= inc_max
     n.undo()
Example #39
0
# License:
#-------------------------------------------------------------------------------
import sys, os
home = os.path.expanduser("~")

import numpy as np
import matplotlib.pyplot as plt

from nansat.nansatmap import Nansatmap
from nansat.nansat import Nansat, Domain

iFileName = os.path.join(home,
                'python/nansat/nansat/tests/data/gcps.tif')

# Open an input satellite image with Nansat
n = Nansat(iFileName)

# List bands and georeference of the object
print n

# Write picture with map of the file location
n.write_map('map.png')

# Write indexed picture with data from the first band
n.write_figure('rgb.png', clim='hist')

# Reproject input image onto map of Norwegian Coast
# 1. Create domain describing the desired map
# 2. Transform the original satellite image
# 3. Write the transfromed image into RGB picture
dLatlong = Domain("+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs",
Example #40
0
# Created:	06.11.2014
# Last modified:11.11.2014 11:23
# Copyright:    (c) NERSC
# License:      
#-------------------------------------------------------------------------------
import sys, os
home = os.path.expanduser("~")

import numpy as np
import matplotlib.pyplot as plt

from nansat.nansatmap import Nansatmap
from nansat.nansat import Nansat, Domain


n = Nansat(os.path.join(home,
        'conferences/ESABigData2014/demo_data/MER_RR__1PRACR20110222_020119_000025973099_00391_46957_0000.N1'))

d = Domain(4326, '-lle 118 28 132 40 -ts 1000 800')

n.reproject(d)

w = Nansat(os.path.join(home,
        'conferences/ESABigData2014/demo_data/gfs20110222/gfs.t00z.master.grbf03'))

w.reproject(d)

L_560 = n['L_560']

L_560[L_560>90] = np.nan

u = w['U']
    def __init__(self, fileName, gdalDataset, gdalMetadata,
                 outFolder=downloads, **kwargs):
        ''' Create NCEP VRT '''

        if not os.path.exists(outFolder):
            os.mkdir(outFolder)

        ##############
        # Get time
        ##############
        keywordBase = 'ncep_wind_online'
        if fileName[0:len(keywordBase)] != keywordBase:
            raise WrongMapperError

        timestr = fileName[len(keywordBase)+1::]
        time = datetime.strptime(timestr, '%Y%m%d%H%M')
        print time

        ########################################
        # Find and download online grib file
        ########################################
        # Find closest 6 hourly modelrun and forecast hour
        modelRunHour = round((time.hour + time.minute/60.)/6)*6
        nearestModelRun = (datetime(time.year, time.month, time.day)
                           + timedelta(hours=modelRunHour))
        if sys.version_info < (2, 7):
            td = (time - nearestModelRun)
            forecastHour = (td.microseconds +
                            (td.seconds + td.days * 24 * 3600)
                            * 10**6) / 10**6 / 3600.
        else:
            forecastHour = (time - nearestModelRun).total_seconds()/3600.
        if modelRunHour == 24:
            modelRunHour = 0
        if forecastHour < 1.5:
            forecastHour = 0
        else:
            forecastHour = 3

        #########################################################
        # Try first to get NRT data from
        # ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/
        # - avaliable approximately the latest month
        #########################################################
        url = ('ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/' +
               'gfs.' + nearestModelRun.strftime('%Y%m%d') +
               '%.2d' % modelRunHour +
               '/gfs.t' + '%.2d' % modelRunHour + 'z.master.grbf' +
               '%.3d' % forecastHour + '.10m.uv.grib2')
        outFileName = os.path.join(outFolder,
                                   ('ncep_gfs_' +
                                    nearestModelRun.strftime('%Y%m%d_%HH_') +
                                    '%.2d' % forecastHour +
                                    '.10m.uv.grib2'))
        if os.path.exists(outFileName):
            print 'NCEP wind is already downloaded: ' + outFileName
        else:
            os.system('curl -so ' + outFileName + ' ' + url)
            if os.path.exists(outFileName):
                print 'Downloaded ' + outFileName
            else:
                print 'NRT GRIB file not available: ' + url
                #########################################################
                # If NRT file not available, search in long term archive
                #########################################################
                url = ('http://nomads.ncdc.noaa.gov/data/gfs4/' +
                       nearestModelRun.strftime('%Y%m/%Y%m%d/'))
                baseName = ('gfs_4_' + nearestModelRun.strftime('%Y%m%d_') +
                            nearestModelRun.strftime('%H%M_') +
                            '%.3d' % forecastHour)
                fileName = baseName + '.grb2'
                outFileName = os.path.join(outFolder, fileName)
                print 'Downloading ' + url + fileName

                # Download subset of grib file
                mapperDir = os.path.dirname(os.path.abspath(__file__))
                get_inv = os.path.join(mapperDir, 'get_inv.pl')
                if not os.path.isfile(get_inv):
                    raise IOError('%s: File not found' % get_inv)
                get_grib = os.path.join(mapperDir, 'get_grib.pl')
                if not os.path.isfile(get_grib):
                    raise IOError('%s: File not found' % get_grib)
                if not os.path.isfile(outFileName):
                    command = (get_inv + ' ' + url + baseName +
                               '.inv | egrep "(:UGRD:10 m |:VGRD:10 m )" | ' +
                               get_grib + ' ' + url + fileName +
                               ' ' + outFileName)
                    os.system(command)
                    if os.path.isfile(outFileName):
                        print 'Downloaded ' + fileName + ' to ' + outFolder
                else:
                    print 'Already downloaded %s' % outFileName
                if not os.path.isfile(outFileName):
                    sys.exit('No NCEP wind files found for requested time')

        ######################################################
        # Open downloaded grib file with a(ny) Nansat mapper
        ######################################################
        w = Nansat(outFileName)
        VRT.__init__(self, vrtDataset=w.vrt.dataset)

        return
Example #42
0
 def setUp(self):
     self.d = Domain(4326, "-te 25 70 35 72 -ts 100 100")
     # define a test Nansat object
     test_domain = Domain(4326, "-lle -180 -90 180 90 -ts 500 500")
     self.n = NANSAT.from_domain(test_domain, array=np.ones([500, 500]))
Example #43
0
    def average(self,
                files=[],
                bands=[1],
                doReproject=True,
                maskName='mask',
                opener=Nansat,
                threads=1,
                eResampleAlg=0,
                period=(None, None)):
        '''Memory-friendly, multithreaded mosaicing(averaging) of input files

        Convert all input files into Nansat objects, reproject onto the
        Domain of the current object, get bands, from each object,
        calculate average and STD, add averaged bands (and STD) to the current
        object.

        average() tries to get band 'mask' from the input files. The mask
        should have the following coding:
            0 : nodata
            1 : clouds
            2 : land
            64 : valid pixel
        If it gets that band (which can be provided by some mappers or Nansat
        childs, e.g.  ModisL2Image) it uses it to select averagable pixels
        (i.e. where mask == 64).
        If it cannot locate the band 'mask' is assumes that all pixels are
        averagebale except for thouse out of swath after reprojection.

        average() adds bands to the object, so it works only with empty, or
        non-projected objects

        Parameters
        -----------
        files : list
            list of input files
        bands : list
            list of names/band_numbers to be processed
        doReproject : boolean, [True]
            reproject input files?
        maskName : str, ['mask']
            name of the mask in input files
        opener : child of Nansat, [Nansat]
            This class is used to read input files
        threads : int
            number of parallel processes to use
        eResampleAlg : int, [0]
            agorithm for reprojection, see Nansat.reproject()
        period : [datetime0, datetime1]
            Start and stop datetime objects from pyhon datetime.

        '''
        # shared array for multiple threads
        global sharedArray
        global domain

        # check inputs
        if len(files) == 0:
            self.logger.error('No input files given!')
            return

        # get desired shape
        dstShape = self.shape()
        # preallocate shared mem array
        sharedArray = mp.Array(ctypes.c_float,
                               [0] * (2 + len(bands) + len(bands)) *
                               dstShape[0] * dstShape[1])

        # create list of layers
        domain = Nansat(domain=self)
        layers = [
            Layer(ifile, bands, opener, maskName, doReproject, eResampleAlg,
                  period, self.logger.level) for ifile in files
        ]

        # test in debug
        # sumup(layers[0])

        # prepare pool of processors
        pool = mp.Pool(threads)

        # run reprojection and summing up
        metadata = pool.map(sumup, layers)

        # get band metadata from the first valid file
        for bandsMeta in metadata:
            if type(bandsMeta) is list:
                break

        # average products
        sharedNDArray = mparray2ndarray(
            sharedArray, (2 + len(bands) * 2, dstShape[0], dstShape[1]),
            'float32')

        # cleanup
        pool.terminate()
        pool = None
        layers = None
        metadata = None
        sharedArray = None

        cntMat = sharedNDArray[0]
        maskMat = sharedNDArray[1]
        avgMat = sharedNDArray[2:2 + len(bands)]
        stdMat = sharedNDArray[2 + len(bands):]

        cntMat[cntMat == 0] = np.nan
        for bi, b in enumerate(bands):
            self.logger.debug('    Averaging %s' % b)
            # get average
            avg = avgMat[bi] / cntMat
            # calculate STD
            # STD = sqrt(sum((x-M)^2)/n) = (sqrt((sum(x^2) -
            #                                2*mean(x)*sum(x) +
            #                                sum(mean(x)^2))/n))
            stdMat[bi] = np.sqrt(
                (stdMat[bi] - 2.0 * avg * avgMat[bi] + np.square(avg) * cntMat)
                / cntMat)
            # set mean
            avgMat[bi] = avg

        self.logger.debug('Adding bands')
        # add mask band
        self.logger.debug('    mask')
        self.add_band(array=maskMat,
                      parameters={
                          'name': maskName,
                          'long_name': 'L2-mask',
                          'standard_name': 'status_flag'
                      })

        # add averaged bands with metadata
        for bi, b in enumerate(bands):
            self.logger.debug('    %s' % b)
            # add band and std with metadata
            self.add_band(array=avgMat[bi], parameters=bandsMeta[bi])
            bandsMeta[bi]['name'] = bandsMeta[bi]['name'] + '_std'
            self.add_band(array=stdMat[bi], parameters=bandsMeta[bi])
    def get_or_create(self, uri, reprocess=False, *args, **kwargs):
        # ingest file to db
        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)

        # set Dataset entry_title
        ds.entry_title = 'SAR NRCS'
        ds.save()

        # Unless reprocess==True, we may not need to do the following... (see
        # managers.py in sar doppler processor)
        #visExists = ... # check if visualization(s) already created
        #if visExists and not reprocess:
        #    warnings.warn('NO VISUALISATIONS CREATED - update managers.py')
        #    return ds, created

        n = Nansat(nansat_filename(uri))
        n.reproject_GCPs()
        n.resize(pixelsize=500)
        lon, lat = n.get_corners()
        lat_max = min(lat.max(), 85)
        d = Domain(
            NSR(3857), '-lle %f %f %f %f -ts %d %d' %
            (lon.min(), lat.min(), lon.max(), lat_max, n.shape()[1],
             n.shape()[0]))
        # Get all NRCS bands
        s0bands = []
        pp = []
        for key, value in n.bands().iteritems():
            try:
                if value['standard_name'] == standard_name:
                    s0bands.append(key)
                    pp.append(value['polarization'])
            except KeyError:
                continue
        ''' Create data products
        '''
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        mp = media_path(module, n.fileName)
        # ppath = product_path(module, n.fileName)

        # Create png's for each band
        num_products = 0
        for band in s0bands:
            print 'Visualize', band
            s0_tmp = n[band]
            n_tmp = Nansat(domain=n, array=s0_tmp)
            n_tmp.reproject_GCPs()
            n_tmp.reproject(d)

            s0 = n_tmp[1]
            n_tmp = None
            mask = np.ones(s0.shape, np.uint8)
            mask[np.isnan(s0) + (s0 <= 0)] = 0
            s0 = np.log10(s0) * 10.

            meta = n.bands()[band]
            product_filename = '%s_%s.png' % (meta['short_name'],
                                              meta['polarization'])

            nansatFigure(s0, mask, polarization_clims[meta['polarization']][0],
                         polarization_clims[meta['polarization']][1], mp,
                         product_filename)

            # Get DatasetParameter
            param = Parameter.objects.get(short_name=meta['short_name'])
            dsp, created = DatasetParameter.objects.get_or_create(
                dataset=ds, parameter=param)

            # Create Visualization
            geom, created = GeographicLocation.objects.get_or_create(
                geometry=WKTReader().read(n.get_border_wkt()))
            vv, created = Visualization.objects.get_or_create(
                uri='file://localhost%s/%s' % (mp, product_filename),
                title='%s %s polarization' %
                (param.standard_name, meta['polarization']),
                geographic_location=geom)

            # Create VisualizationParameter
            vp, created = VisualizationParameter.objects.get_or_create(
                visualization=vv, ds_parameter=dsp)

        return ds, True
Example #45
0
    def __init__(self, filename, doppler_file='', *args, **kwargs):
        # TODO: What is a reason?
        super(BayesianWind, self).__init__(filename, *args, **kwargs)
        # TODO: separate calculations between U and V

        # Set apriori (0 step) distribution of the wind field
        u_apriori, v_apriori = np.meshgrid(self.wind_speed_range,
                                           self.wind_speed_range)
        direction_apriori = 180. / np.pi * np.arctan2(
            u_apriori, v_apriori)  # 0 is wind towards North
        speed_apriori = np.sqrt(np.square(u_apriori) + np.square(v_apriori))

        # Get Nansat object of the model wind field
        # model_wind = self.get_source_wind(reprojected=False) # where did this
        # function go? anyway, the below should be equally fine..
        model_wind = Nansat(self.get_metadata('WIND_DIRECTION_SOURCE'))

        # TODO: Move to separate function
        if doppler_file:
            # Get Nansat object of the range Doppler shift
            dop = Nansat(doppler_file)
            # Estimate Doppler uncertainty
            fdg = dop['dop_coef_observed'] - dop['dop_coef_predicted'] - \
                  dop['range_bias_scene'] - dop['azibias']
            fdg[fdg > 100] = np.nan
            fdg[fdg < -100] = np.nan
            mask = np.isnan(fdg)
            fdg[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask),
                                  fdg[~mask])
            err_fdg = grid_based_uncertainty(fdg, 2)
            err_fdg[err_fdg < self.doppler_err] = self.doppler_err
            dop.add_band(array=err_fdg, parameters={'name': 'err_fdg'})
            dop.reproject(self, eResampleAlg=self.resample_alg, tps=True)
            fdg = dop['dop_coef_observed'] - dop['dop_coef_predicted'] - \
                  dop['range_bias_scene'] - dop['azibias']

            err_fdg = dop['err_fdg']
            # fdg_err = dop['range_bias_std_scene'] - this is not the uncertainty...

        # Estimate sigma0 uncertainty
        s0 = self['sigma0_VV']
        err_s0 = self.s0_err_fac * s0
        #    #mask = np.isnan(fdg)
        #    #fdg[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask),
        #    #        fdg[~mask])
        #err_s0 = grid_based_uncertainty(s0,2)
        #import ipdb
        #ipdb.set_trace()
        #err_s0[err_s0<self.s0_err_fac*s0] = self.s0_err_fac*s0[err_s0<self.s0_err_fac*s0]

        # Estimate model wind uncertainty (leads to adjustment near fronts)
        #model_px_resolution = int(np.round( 2 * model_wind.get_pixelsize_meters()[0] /
        #        self.get_pixelsize_meters()[0] ))
        uu = model_wind['U']
        vv = model_wind['V']
        err_u = grid_based_uncertainty(uu, 2)
        err_v = grid_based_uncertainty(vv, 2)

        model_wind.add_band(array=err_u, parameters={'name': 'err_u'})
        model_wind.add_band(array=err_v, parameters={'name': 'err_v'})

        # Reproject to SAR image
        model_wind.reproject(self, eResampleAlg=self.resample_alg, tps=True)

        # Get uncertainties in model wind
        err_u = model_wind['err_u']
        # Without the below, uncertainties are lower in uniform areas - this
        # should be quite reasonable...
        #err_u[err_u<self.model_err] = self.model_err
        err_v = model_wind['err_v']
        #err_v[err_v<self.model_err] = self.model_err

        # Assign shape of SAR image to variable imshape
        imshape = self.shape()

        # Initialize result arrays
        ub_modcmod = np.ones(imshape)
        vb_modcmod = np.ones(imshape)
        ub_all = np.ones(imshape)
        vb_all = np.ones(imshape)

        self.has_doppler = np.zeros(imshape)

        model_u = model_wind['U']
        model_v = model_wind['V']
        sar_look = self[self._get_band_number(
            {'standard_name': 'sensor_azimuth_angle'})]
        inci = self['incidence_angle']

        # TODO: speed up processing
        print('Applying Bayesian on one-by-one pixel')
        for i in range(imshape[0]):
            print 'Row %d of %d' % (i + 1, imshape[0])
            for j in range(imshape[1]):
                # There seems to be a problem with Radarsat-2 incidence angles
                # after resize (nan-values and erroneous resampling)
                # THIS IS NOT YET IN ANY GITHUB ISSUES...
                if np.isnan(inci[i, j]) or inci[i, j] < 0 or s0[i, j] == 0.0:
                    ub_modcmod[i, j] = np.nan
                    vb_modcmod[i, j] = np.nan
                    ub_all[i, j] = np.nan
                    vb_all[i, j] = np.nan
                    continue

                # Calculate model cost functions
                cost_model_u = cost_function(u_apriori, model_u[i, j],
                                             err_u[i, j])
                cost_model_v = cost_function(v_apriori, model_v[i, j],
                                             err_v[i, j])

                # Calculate sigma0 cost function
                cmod_s0 = cmod5n_forward(
                    speed_apriori, direction_apriori - sar_look[i, j],
                    np.ones(np.shape(speed_apriori)) * inci[i, j])
                cost_sigma0 = cost_function(cmod_s0, s0[i, j], err_s0[i, j])

                cost = cost_model_v + cost_model_u + cost_sigma0
                ind_min = np.where(cost == np.min(cost, axis=None))
                ub_modcmod[i, j] = u_apriori[ind_min]
                vb_modcmod[i, j] = v_apriori[ind_min]
                # TODO: Simplify comparison
                if (doppler_file and fdg[i, j] > -100 and fdg[i, j] < 100
                        and err_fdg[i, j] != 0
                        and not np.isnan(err_fdg[i, j])):
                    # Calculate Doppler cost function
                    self.has_doppler[i, j] = 1
                    cdop_fdg = cdop(
                        speed_apriori, sar_look[i, j] - direction_apriori,
                        np.ones(np.shape(speed_apriori)) * inci[i, j], 'VV')
                    cost_doppler = cost_function(cdop_fdg, fdg[i, j],
                                                 err_fdg[i, j])
                    cost += cost_doppler

                    ind_min = np.where(cost == np.min(cost, axis=None))
                    ub_all[i, j] = u_apriori[ind_min]
                    vb_all[i, j] = v_apriori[ind_min]

                # Should give uncertainties as well
                # self.rms_u[i,j] = err_u[i,j] + err_v[i,j] + ...

        self.add_band(
            array=np.sqrt(np.square(ub_modcmod) + np.square(vb_modcmod)),
            parameters={
                'wkv': 'wind_speed',
                'name': 'bspeed_modcmod',
                'long_name': 'Bayesian wind speed using model and cmod data'
            })

        self.add_band(array=np.mod(
            180. + 180. / np.pi * np.arctan2(ub_modcmod, vb_modcmod), 360),
                      parameters={
                          'wkv':
                          'wind_from_direction',
                          'name':
                          'bdir_modcmod',
                          'long_name':
                          'Bayesian wind direction using model and cmod data'
                      })

        if doppler_file:
            self.add_band(array=np.sqrt(np.square(ub_all) + np.square(vb_all)),
                          parameters={
                              'wkv': 'wind_speed',
                              'name': 'bspeed_all',
                              'long_name': 'Bayesian wind speed using all data'
                          })

            self.add_band(array=np.mod(
                180. + 180. / np.pi * np.arctan2(ub_all, vb_all), 360),
                          parameters={
                              'wkv': 'wind_from_direction',
                              'name': 'bdir_all',
                              'long_name':
                              'Bayesian wind direction using all data'
                          })
Example #46
0
def contrast_transect(n, method='contrast', dir='.', title='', **kwargs):
    if kwargs.has_key('ylim'):
        ylim = kwargs.pop('ylim')
    if kwargs.has_key('semilogy'):
        semilogy = kwargs.pop('semilogy')
    if kwargs.has_key('points'):
        pp0 = kwargs.pop('points')
    else:
        print 'Mark 4 points. First and last sections are background. Middle is slick.'
        t, [lon, lat], pp0 = n.get_transect(transect=False, **kwargs)
    t1, [lon1, lat1], p1 = n.get_transect(latlon=False,
                                          points=((pp0[0][0], pp0[1][0]),
                                                  (pp0[0][1], pp0[1][1])),
                                          smoothRadius=kwargs['smoothRadius'],
                                          bandList=kwargs['bandList'])
    t2, [lon2, lat2], p2 = n.get_transect(latlon=False,
                                          points=((pp0[0][1], pp0[1][1]),
                                                  (pp0[0][2], pp0[1][2])),
                                          smoothRadius=kwargs['smoothRadius'],
                                          bandList=kwargs['bandList'])
    t3, [lon3, lat3], p3 = n.get_transect(latlon=False,
                                          points=((pp0[0][2], pp0[1][2]),
                                                  (pp0[0][3], pp0[1][3])),
                                          smoothRadius=kwargs['smoothRadius'],
                                          bandList=kwargs['bandList'])

    orig = Nansat(os.path.abspath(n.fileName).split('.')[0] + '.zip')
    inci1 = orig['incidence_angle'][p1[1], p1[0]]
    inci2 = orig['incidence_angle'][p2[1], p2[0]]
    inci3 = orig['incidence_angle'][p3[1], p3[0]]

    inci = np.array([])
    inci = np.append(inci, inci1)
    inci = np.append(inci, inci2)
    inci = np.append(inci, inci3)

    ii = np.array([])
    ii = np.append(ii, inci1)
    ii = np.append(ii, inci3)
    for i in range(len(t1)):
        bg = np.array([])
        bg = np.append(bg, t1[i])
        bg = np.append(bg, t3[i])

        A = np.array([ii, np.ones(len(ii))])
        w = np.linalg.lstsq(A.T, bg)[0]  # least squares fit
        line = w[0] * inci + w[1]  # regression line

        tr = np.array([])
        tr = np.append(tr, t1[i])
        tr = np.append(tr, t2[i])
        tr = np.append(tr, t3[i])

        fig = plt.figure(num=1)
        plt.plot(inci, tr, label=n.get_metadata('name', kwargs['bandList'][i]))
        plt.hold(True)
        plt.plot(inci, line, label='Background')
        plt.legend()
        if title:
            plt.title(title)
        fig.savefig(os.path.join(
            dir, 'transect_' + n.get_metadata('name', kwargs['bandList'][i]) +
            '_' + title.replace(' ', '_').lower() + '.png'),
                    pad_inches=1,
                    bbox_inches='tight')
        fig.clear()
        plt.close()

        if i == 0:
            if method == 'contrast':
                transects = np.array([np.divide(np.subtract(tr, line), line)])
            if method == 'ratio':
                transects = np.array([np.divide(line, tr)])
        else:
            if method == 'ratio':
                # sjekk for negative verdier og band vs index...
                transects = np.append(transects, [np.divide(line, tr)], axis=0)
            if method == 'contrast':
                transects = np.append(transects,
                                      [np.divide(np.subtract(tr, line), line)],
                                      axis=0)

    pp = np.array([[], []])
    pp = np.append(pp, [p1[0], p1[1]], axis=1)
    pp = np.append(pp, [p2[0], p2[1]], axis=1)
    pp = np.append(pp, [p3[0], p3[1]], axis=1)

    if 'ylim' in locals():
        kwargs['ylim'] = ylim
    if 'semilogy' in locals():
        kwargs['semilogy'] = semilogy

    _show_transect(n,
                   transects,
                   pp,
                   dir=dir,
                   label_prefix=method,
                   title=title,
                   **kwargs)

    lon = np.array([])
    lon = np.append(lon, lon1)
    lon = np.append(lon, lon2)
    lon = np.append(lon, lon3)
    lat = np.array([])
    lat = np.append(lat, lat1)
    lat = np.append(lat, lat2)
    lat = np.append(lat, lat3)

    return transects, [lon, lat], pp0
Example #47
0
    def get_or_create(self,
                      uri,
                      n_points=10,
                      uri_filter_args=None,
                      uri_service_name=FILE_SERVICE_NAME,
                      uri_service_type=LOCAL_FILE_SERVICE,
                      *args,
                      **kwargs):
        """ Create dataset and corresponding metadata

        Parameters:
        ----------
            uri : str
                  URI to file or stream openable by Nansat
            n_points : int
                  Number of border points (default is 10)
            uri_filter_args : dict
                Extra DatasetURI filter arguments if several datasets can refer to the same URI
            uri_service_name : str
                name of the service which is used  ('dapService', 'fileService', 'http' or 'wms')
            uri_service_type : str
                type of the service which is used  ('OPENDAP', 'local', 'HTTPServer' or 'WMS')

        Returns:
        -------
            dataset and flag
        """
        if not uri_filter_args:
            uri_filter_args = {}

        # Validate uri - this should raise an exception if the uri doesn't point to a valid
        # file or stream
        validate_uri(uri)

        # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we
        # need to pass uri_filter_args
        uris = DatasetURI.objects.filter(uri=uri, **uri_filter_args)
        if len(uris) > 0:
            return uris[0].dataset, False

        # Open file with Nansat
        n = Nansat(nansat_filename(uri), **kwargs)

        # get metadata from Nansat and get objects from vocabularies
        n_metadata = n.get_metadata()

        entry_id = n_metadata.get('entry_id', None)
        # set compulsory metadata (source)
        platform, _ = Platform.objects.get_or_create(
            json.loads(n_metadata['platform']))
        instrument, _ = Instrument.objects.get_or_create(
            json.loads(n_metadata['instrument']))
        specs = n_metadata.get('specs', '')
        source, _ = Source.objects.get_or_create(platform=platform,
                                                 instrument=instrument,
                                                 specs=specs)

        default_char_fields = {
            # Adding NERSC_ in front of the id violates the string representation of the uuid
            #'entry_id': lambda: 'NERSC_' + str(uuid.uuid4()),
            'entry_id': lambda: str(uuid.uuid4()),
            'entry_title': lambda: 'NONE',
            'summary': lambda: 'NONE',
        }

        # set optional CharField metadata from Nansat or from default_char_fields
        options = {}
        try:
            existing_ds = Dataset.objects.get(entry_id=entry_id)
        except Dataset.DoesNotExist:
            existing_ds = None
        for name in default_char_fields:
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
                # prevent overwriting of existing values by defaults
                if existing_ds:
                    options[name] = existing_ds.__getattribute__(name)
                else:
                    options[name] = default_char_fields[name]()
            else:
                options[name] = n_metadata[name]

        default_foreign_keys = {
            'gcmd_location': {
                'model': Location,
                'value': pti.get_gcmd_location('SEA SURFACE')
            },
            'data_center': {
                'model': DataCenter,
                'value': pti.get_gcmd_provider('NERSC')
            },
            'ISO_topic_category': {
                'model': ISOTopicCategory,
                'value': pti.get_iso19115_topic_category('Oceans')
            },
        }

        # set optional ForeignKey metadata from Nansat or from default_foreign_keys
        for name in default_foreign_keys:
            value = default_foreign_keys[name]['value']
            model = default_foreign_keys[name]['model']
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
            else:
                try:
                    value = json.loads(n_metadata[name])
                except:
                    warnings.warn(
                        '%s value of %s  metadata provided in Nansat is wrong!'
                        % (n_metadata[name], name))
            if existing_ds:
                options[name] = existing_ds.__getattribute__(name)
            else:
                options[name], _ = model.objects.get_or_create(value)

        # Find coverage to set number of points in the geolocation
        if len(n.vrt.dataset.GetGCPs()) > 0:
            n.reproject_gcps()
        geolocation = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt(nPoints=n_points)))[0]

        # create dataset
        # - the get_or_create method should use get_or_create here as well,
        #   or its name should be changed - see issue #127
        ds, created = Dataset.objects.update_or_create(
            entry_id=options['entry_id'],
            defaults={
                'time_coverage_start': n.get_metadata('time_coverage_start'),
                'time_coverage_end': n.get_metadata('time_coverage_end'),
                'source': source,
                'geographic_location': geolocation,
                'gcmd_location': options["gcmd_location"],
                'ISO_topic_category': options["ISO_topic_category"],
                "data_center": options["data_center"],
                'entry_title': options["entry_title"],
                'summary': options["summary"]
            })

        # create parameter
        all_band_meta = n.bands()
        for band_id in range(1, len(all_band_meta) + 1):
            band_meta = all_band_meta[band_id]
            standard_name = band_meta.get('standard_name', None)
            short_name = band_meta.get('short_name', None)
            units = band_meta.get('units', None)
            if standard_name in ['latitude', 'longitude', None]:
                continue
            params = Parameter.objects.filter(standard_name=standard_name)
            if params.count() > 1 and short_name is not None:
                params = params.filter(short_name=short_name)
            if params.count() > 1 and units is not None:
                params = params.filter(units=units)
            if params.count() >= 1:
                ds.parameters.add(params[0])

        # create dataset URI
        DatasetURI.objects.get_or_create(name=uri_service_name,
                                         service=uri_service_type,
                                         uri=uri,
                                         dataset=ds)

        return ds, created
Example #48
0
 def resize(self, rsfile):
     sys.stderr.write('\nresize:'+rsfile+'\n')
     n = Nansat(rsfile)
     inc_max = float(n.get_metadata()['FAR_RANGE_INCIDENCE_ANGLE'])+0.5
     n.resize(0.5, eResampleAlg=0)
     assert (np.nanmax(n['incidence_angle']) <= inc_max)
     n.undo()
     n.resize(0.5, eResampleAlg=1)
     assert (np.nanmax(n['incidence_angle']) <= inc_max)
     n.undo()
     n.resize(0.5, eResampleAlg=2)
     assert (np.nanmax(n['incidence_angle']) <= inc_max)
     n.undo()
     n.resize(0.5, eResampleAlg=3)
     assert (np.nanmax(n['incidence_angle']) <= inc_max)
     n.undo()
     n.resize(0.5, eResampleAlg=4)
     assert (np.nanmax(n['incidence_angle']) <= inc_max)
     n.undo()
Example #49
0
    def get_or_create(self,
                      uri,
                      n_points=10,
                      uri_filter_args=None,
                      *args,
                      **kwargs):
        ''' Create dataset and corresponding metadata

        Parameters:
        ----------
            uri : str
                  URI to file or stream openable by Nansat
            n_points : int
                  Number of border points (default is 10)
            uri_filter_args : dict
                Extra DatasetURI filter arguments if several datasets can refer to the same URI

        Returns:
        -------
            dataset and flag
        '''
        if not uri_filter_args:
            uri_filter_args = {}

        # Validate uri - this should raise an exception if the uri doesn't point to a valid
        # file or stream
        validate_uri(uri)

        # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we
        # need to pass uri_filter_args
        uris = DatasetURI.objects.filter(uri=uri, **uri_filter_args)
        if len(uris) > 0:
            return uris[0].dataset, False

        # Open file with Nansat
        n = Nansat(nansat_filename(uri), **kwargs)

        # get metadata from Nansat and get objects from vocabularies
        n_metadata = n.get_metadata()

        # set compulsory metadata (source)
        platform, _ = Platform.objects.get_or_create(
            json.loads(n_metadata['platform']))
        instrument, _ = Instrument.objects.get_or_create(
            json.loads(n_metadata['instrument']))
        specs = n_metadata.get('specs', '')
        source, _ = Source.objects.get_or_create(platform=platform,
                                                 instrument=instrument,
                                                 specs=specs)

        default_char_fields = {
            'entry_id': lambda: 'NERSC_' + str(uuid.uuid4()),
            'entry_title': lambda: 'NONE',
            'summary': lambda: 'NONE',
        }

        # set optional CharField metadata from Nansat or from default_char_fields
        options = {}
        for name in default_char_fields:
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
                options[name] = default_char_fields[name]()
            else:
                options[name] = n_metadata[name]

        default_foreign_keys = {
            'gcmd_location': {
                'model': Location,
                'value': pti.get_gcmd_location('SEA SURFACE')
            },
            'data_center': {
                'model': DataCenter,
                'value': pti.get_gcmd_provider('NERSC')
            },
            'ISO_topic_category': {
                'model': ISOTopicCategory,
                'value': pti.get_iso19115_topic_category('Oceans')
            },
        }

        # set optional ForeignKey metadata from Nansat or from default_foreign_keys
        for name in default_foreign_keys:
            value = default_foreign_keys[name]['value']
            model = default_foreign_keys[name]['model']
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
            else:
                try:
                    value = json.loads(n_metadata[name])
                except:
                    warnings.warn(
                        '%s value of %s  metadata provided in Nansat is wrong!'
                        % (n_metadata[name], name))
            options[name], _ = model.objects.get_or_create(value)

        # Find coverage to set number of points in the geolocation
        if len(n.vrt.dataset.GetGCPs()) > 0:
            n.reproject_gcps()
        geolocation = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt(nPoints=n_points)))[0]

        # create dataset
        ds, created = Dataset.objects.get_or_create(
            time_coverage_start=n.get_metadata('time_coverage_start'),
            time_coverage_end=n.get_metadata('time_coverage_end'),
            source=source,
            geographic_location=geolocation,
            **options)
        # create dataset URI
        ds_uri, _ = DatasetURI.objects.get_or_create(uri=uri, dataset=ds)

        return ds, created
Example #50
0
    def __init__(self, filename, doppler_file='', *args, **kwargs):

        super(BayesianWind, self).__init__(filename, *args, **kwargs)

        [u_apriori, v_apriori] = np.meshgrid(self.wind_speed_range,
                self.wind_speed_range)
        direction_apriori = 180./np.pi*np.arctan2(u_apriori, v_apriori) # 0 is wind towards North
        speed_apriori = np.sqrt(np.square(u_apriori) + np.square(v_apriori))

        # Get Nansat object of the model wind field
        #model_wind = self.get_source_wind(reprojected=False) # where did this
        # function go? anyway, the below should be equally fine..
        model_wind = Nansat(self.get_metadata('WIND_DIRECTION_SOURCE'))

        if doppler_file:
            # Get Nansat object of the range Doppler shift
            dop = Nansat(doppler_file)
            # Estimate Doppler uncertainty
            fdg = dop['dop_coef_observed'] - dop['dop_coef_predicted'] - \
                    dop['range_bias_scene'] - dop['azibias']
            fdg[fdg>100] = np.nan
            fdg[fdg<-100] = np.nan
            mask = np.isnan(fdg)
            fdg[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask),
                    fdg[~mask])
            err_fdg = grid_based_uncertainty(fdg,2)
            err_fdg[err_fdg<self.doppler_err]=self.doppler_err
            dop.add_band(array=err_fdg, parameters={'name':'err_fdg'})
            dop.reproject(self, eResampleAlg=self.resample_alg, tps=True)
            fdg = dop['dop_coef_observed'] - dop['dop_coef_predicted'] - \
                    dop['range_bias_scene'] - dop['azibias']
            err_fdg = dop['err_fdg']
            #fdg_err = dop['range_bias_std_scene'] - this is not the uncertainty...
        
        # Estimate sigma0 uncertainty
        s0 = self['sigma0_VV']
        err_s0 = self.s0_err_fac*s0
        #    #mask = np.isnan(fdg)
        #    #fdg[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask),
        #    #        fdg[~mask])
        #err_s0 = grid_based_uncertainty(s0,2)
        #import ipdb
        #ipdb.set_trace()
        #err_s0[err_s0<self.s0_err_fac*s0] = self.s0_err_fac*s0[err_s0<self.s0_err_fac*s0]

        # Estimate model wind uncertainty (leads to adjustment near fronts)
        #model_px_resolution = int(np.round( 2 * model_wind.get_pixelsize_meters()[0] /
        #        self.get_pixelsize_meters()[0] ))
        uu = model_wind['U']
        vv = model_wind['V']
        err_u = grid_based_uncertainty(uu,2)
        err_v = grid_based_uncertainty(vv,2)

        model_wind.add_band(array=err_u, parameters={'name':'err_u'})
        model_wind.add_band(array=err_v, parameters={'name':'err_v'})

        # Reproject to SAR image
        model_wind.reproject(self, eResampleAlg=self.resample_alg, tps=True)

        # Get uncertainties in model wind  
        err_u = model_wind['err_u']
        # Without the below, uncertainties are lower in uniform areas - this
        # should be quite reasonable...
        #err_u[err_u<self.model_err] = self.model_err
        err_v = model_wind['err_v']
        #err_v[err_v<self.model_err] = self.model_err

        # Assign shape of SAR image to variable imshape
        imshape = self.shape()

        # Initialize result arrays
        ub_modcmod = np.ones(imshape)
        vb_modcmod = np.ones(imshape)
        ub_all = np.ones(imshape)
        vb_all = np.ones(imshape)

        self.has_doppler = np.zeros(imshape)

        model_u = model_wind['U']
        model_v = model_wind['V']
        sar_look = self[self._get_band_number({'standard_name':
                'sensor_azimuth_angle'})]
        inci = self['incidence_angle']
        print 'Applying Bayesian on one-by-one pixel'
        for i in range(imshape[0]):
            print 'Row %d of %d'%(i+1,imshape[0])
            for j in range(imshape[1]):
                # There seems to be a problem with Radarsat-2 incidence angles
                # after resize (nan-values and erroneous resampling)
                # THIS IS NOT YET IN ANY GITHUB ISSUES...
                if np.isnan(inci[i,j]) or inci[i,j]<0 or s0[i,j]==0.0:
                    ub_modcmod[i,j] = np.nan
                    vb_modcmod[i,j] = np.nan
                    ub_all[i,j] = np.nan
                    vb_all[i,j] = np.nan
                    continue

                # Calculate model cost functions
                cost_model_u = cost_function(u_apriori, model_u[i,j], err_u[i,j])
                cost_model_v = cost_function(v_apriori, model_v[i,j], err_v[i,j])

                # Calculate sigma0 cost function
                cmod_s0 = cmod5n_forward(speed_apriori,
                        direction_apriori-sar_look[i,j],
                        np.ones(np.shape(speed_apriori))*inci[i,j])
                cost_sigma0 = cost_function( cmod_s0, s0[i,j], err_s0[i,j] )

                cost = cost_model_v + cost_model_u + cost_sigma0
                ind_min = np.where(cost==np.min(cost,axis=None))
                ub_modcmod[i,j] = u_apriori[ind_min]
                vb_modcmod[i,j] = v_apriori[ind_min]

                if (doppler_file and 
                        fdg[i,j]>-100 and 
                        fdg[i,j]<100 and 
                        err_fdg[i,j]!=0 and
                        not np.isnan(err_fdg[i,j])):
                    # Calculate Doppler cost function
                    self.has_doppler[i,j] = 1
                    cdop_fdg = cdop(speed_apriori, 
                        sar_look[i,j]-direction_apriori,
                        np.ones(np.shape(speed_apriori))*inci[i,j], 'VV')
                    cost_doppler = cost_function(cdop_fdg, fdg[i,j], err_fdg[i,j])
                    cost += cost_doppler

                    ind_min = np.where(cost==np.min(cost,axis=None))
                    ub_all[i,j] = u_apriori[ind_min]
                    vb_all[i,j] = v_apriori[ind_min]


                # Should give uncertainties as well
                #self.rms_u[i,j] = err_u[i,j] + err_v[i,j] + ...

        self.add_band(
            array = np.sqrt(np.square(ub_modcmod) + np.square(vb_modcmod)),
            parameters={
                'wkv': 'wind_speed',
                'name':'bspeed_modcmod',
                'long_name': 'Bayesian wind speed using model and ' \
                        'cmod data'}
            )
        self.add_band(
            array = np.mod(180. + 180./np.pi*np.arctan2(ub_modcmod,
                vb_modcmod), 360),
            parameters = {
                'wkv': 'wind_from_direction',
                'name': 'bdir_modcmod',
                'long_name': 'Bayesian wind direction using model and ' \
                        'cmod data'}
            )
        if doppler_file:
            self.add_band(
                array = np.sqrt(np.square(ub_all) + np.square(vb_all)),
                parameters={
                    'wkv': 'wind_speed',
                    'name':'bspeed_all',
                    'long_name': 'Bayesian wind speed using all data'}
                )
            self.add_band(
                array = np.mod(180. + 180./np.pi*np.arctan2(ub_all,
                    vb_all), 360),
                parameters = {
                    'wkv': 'wind_from_direction',
                    'name': 'bdir_all',
                    'long_name': 'Bayesian wind direction using all data'}
                )