def test_ecmwf_mapper_is_used(self):
     n = Nansat(self.test_file_ecmwf)
     self.assertEqual(n.mapper, 'ecmwf_metno')
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
 def test_mapper_opendap_arome(self):
     n = Nansat(self.test_file_arome_opendap, mapperName='opendap_arome')
     self.assertEqual(n.mapper, 'opendap_arome')
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
Example #3
0
    def get_or_create(self,
                      uri,
                      n_points=10,
                      uri_filter_args=None,
                      uri_service_name=FILE_SERVICE_NAME,
                      uri_service_type=LOCAL_FILE_SERVICE,
                      *args,
                      **kwargs):
        """ Create dataset and corresponding metadata

        Parameters:
        ----------
            uri : str
                  URI to file or stream openable by Nansat
            n_points : int
                  Number of border points (default is 10)
            uri_filter_args : dict
                Extra DatasetURI filter arguments if several datasets can refer to the same URI
            uri_service_name : str
                name of the service which is used  ('dapService', 'fileService', 'http' or 'wms')
            uri_service_type : str
                type of the service which is used  ('OPENDAP', 'local', 'HTTPServer' or 'WMS')

        Returns:
        -------
            dataset and flag
        """
        if not uri_filter_args:
            uri_filter_args = {}

        # Validate uri - this should raise an exception if the uri doesn't point to a valid
        # file or stream
        validate_uri(uri)

        # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we
        # need to pass uri_filter_args
        uris = DatasetURI.objects.filter(uri=uri, **uri_filter_args)
        if len(uris) > 0:
            return uris[0].dataset, False

        # Open file with Nansat
        n = Nansat(nansat_filename(uri), **kwargs)

        # get metadata from Nansat and get objects from vocabularies
        n_metadata = n.get_metadata()

        entry_id = n_metadata.get('entry_id', n_metadata.get('id', None))
        # set compulsory metadata (source)
        pp = n_metadata['platform']
        try:
            pp_dict = json.loads(pp)
        except json.JSONDecodeError:
            pp_entry = [elem.strip() for elem in pp.split('>')]
            pp_dict = pti.get_gcmd_platform(pp_entry[-1])
        platform, _ = Platform.objects.get_or_create(pp_dict)
        ii = n_metadata['instrument']
        try:
            ii_dict = json.loads(ii)
        except json.JSONDecodeError:
            ii_entry = [elem.strip() for elem in ii.split('>')]
            ii_dict = pti.get_gcmd_instrument(ii_entry[-1])
        instrument, _ = Instrument.objects.get_or_create(ii_dict)
        specs = n_metadata.get('specs', '')
        source, _ = Source.objects.get_or_create(platform=platform,
                                                 instrument=instrument,
                                                 specs=specs)

        default_char_fields = {
            # Adding NERSC_ in front of the id violates the string representation of the uuid
            #'entry_id': lambda: 'NERSC_' + str(uuid.uuid4()),
            'entry_id': lambda: str(uuid.uuid4()),
            'entry_title': lambda: 'NONE',
            'summary': lambda: 'NONE',
        }

        # set optional CharField metadata from Nansat or from default_char_fields
        options = {}
        try:
            existing_ds = Dataset.objects.get(entry_id=entry_id)
        except Dataset.DoesNotExist:
            existing_ds = None
        for name in default_char_fields:
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
                # prevent overwriting of existing values by defaults
                if existing_ds:
                    options[name] = existing_ds.__getattribute__(name)
                else:
                    options[name] = default_char_fields[name]()
            else:
                options[name] = n_metadata[name]

        default_foreign_keys = {
            'gcmd_location': {
                'model': Location,
                'value': pti.get_gcmd_location('SEA SURFACE')
            },
            'data_center': {
                'model': DataCenter,
                'value': pti.get_gcmd_provider('NERSC')
            },
            'ISO_topic_category': {
                'model': ISOTopicCategory,
                'value': pti.get_iso19115_topic_category('Oceans')
            },
        }

        # set optional ForeignKey metadata from Nansat or from default_foreign_keys
        for name in default_foreign_keys:
            value = default_foreign_keys[name]['value']
            model = default_foreign_keys[name]['model']
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
            else:
                try:
                    value = json.loads(n_metadata[name])
                except:
                    warnings.warn(
                        '%s value of %s  metadata provided in Nansat is wrong!'
                        % (n_metadata[name], name))
            if existing_ds:
                options[name] = existing_ds.__getattribute__(name)
            else:
                options[name], _ = model.objects.get_or_create(value)

        # Find coverage to set number of points in the geolocation
        if len(n.vrt.dataset.GetGCPs()) > 0:
            n.reproject_gcps()
        geolocation = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt(nPoints=n_points)))[0]

        # create dataset
        # - the get_or_create method should use get_or_create here as well,
        #   or its name should be changed - see issue #127
        ds, created = Dataset.objects.update_or_create(
            entry_id=options['entry_id'],
            defaults={
                'time_coverage_start': n.get_metadata('time_coverage_start'),
                'time_coverage_end': n.get_metadata('time_coverage_end'),
                'source': source,
                'geographic_location': geolocation,
                'gcmd_location': options["gcmd_location"],
                'ISO_topic_category': options["ISO_topic_category"],
                "data_center": options["data_center"],
                'entry_title': options["entry_title"],
                'summary': options["summary"]
            })

        # create parameter
        all_band_meta = n.bands()
        for band_id in range(1, len(all_band_meta) + 1):
            band_meta = all_band_meta[band_id]
            standard_name = band_meta.get('standard_name', None)
            short_name = band_meta.get('short_name', None)
            units = band_meta.get('units', None)
            if standard_name in ['latitude', 'longitude', None]:
                continue
            params = Parameter.objects.filter(standard_name=standard_name)
            if params.count() > 1 and short_name is not None:
                params = params.filter(short_name=short_name)
            if params.count() > 1 and units is not None:
                params = params.filter(units=units)
            if params.count() >= 1:
                ds.parameters.add(params[0])

        # create dataset URI
        DatasetURI.objects.get_or_create(name=uri_service_name,
                                         service=uri_service_type,
                                         uri=uri,
                                         dataset=ds)

        return ds, created
    def get_or_create(self, uri, *args, **kwargs):
        """ Ingest gsar file to geo-spaas db
        """

        ds, created = super(DatasetManager, self).get_or_create(uri, *args, **kwargs)

        # TODO: Check if the following is necessary
        if not type(ds) == Dataset:
            return ds, False

        # set Dataset entry_title
        ds.entry_title = 'SAR Doppler'
        ds.save()

        fn = nansat_filename(uri)
        n = Nansat(fn, subswath=0)
        gg = WKTReader().read(n.get_border_wkt())

        if ds.geographic_location.geometry.area>gg.area:
            return ds, False

        # Update dataset border geometry
        # This must be done every time a Doppler file is processed. It is time
        # consuming but apparently the only way to do it. Could be checked
        # though...

        swath_data = {}
        lon = {}
        lat = {}
        astep = {}
        rstep = {}
        az_left_lon = {}
        ra_upper_lon = {}
        az_right_lon = {}
        ra_lower_lon = {}
        az_left_lat = {}
        ra_upper_lat = {}
        az_right_lat = {}
        ra_lower_lat = {}
        num_border_points = 10
        border = 'POLYGON(('

        for i in range(self.N_SUBSWATHS):
            # Read subswaths 
            swath_data[i] = Nansat(fn, subswath=i)

            # Should use nansat.domain.get_border - see nansat issue #166
            # (https://github.com/nansencenter/nansat/issues/166)
            lon[i], lat[i] = swath_data[i].get_geolocation_grids()

            astep[i] = max(1, (lon[i].shape[0] / 2 * 2 - 1) / num_border_points)
            rstep[i] = max(1, (lon[i].shape[1] / 2 * 2 - 1) / num_border_points)

            az_left_lon[i] = lon[i][0:-1:astep[i], 0]
            az_left_lat[i] = lat[i][0:-1:astep[i], 0]

            az_right_lon[i] = lon[i][0:-1:astep[i], -1]
            az_right_lat[i] = lat[i][0:-1:astep[i], -1]

            ra_upper_lon[i] = lon[i][-1, 0:-1:rstep[i]]
            ra_upper_lat[i] = lat[i][-1, 0:-1:rstep[i]]

            ra_lower_lon[i] = lon[i][0, 0:-1:rstep[i]]
            ra_lower_lat[i] = lat[i][0, 0:-1:rstep[i]]

        lons = np.concatenate((az_left_lon[0],  ra_upper_lon[0],
                               ra_upper_lon[1], ra_upper_lon[2],
                               ra_upper_lon[3], ra_upper_lon[4],
                               np.flipud(az_right_lon[4]), np.flipud(ra_lower_lon[4]),
                               np.flipud(ra_lower_lon[3]), np.flipud(ra_lower_lon[2]),
                               np.flipud(ra_lower_lon[1]), np.flipud(ra_lower_lon[0])))

        # apply 180 degree correction to longitude - code copied from
        # get_border_wkt...
        # TODO: simplify using np.mod?
        for ilon, llo in enumerate(lons):
            lons[ilon] = copysign(acos(cos(llo * pi / 180.)) / pi * 180,
                                  sin(llo * pi / 180.))

        lats = np.concatenate((az_left_lat[0], ra_upper_lat[0],
                               ra_upper_lat[1], ra_upper_lat[2],
                               ra_upper_lat[3], ra_upper_lat[4],
                               np.flipud(az_right_lat[4]), np.flipud(ra_lower_lat[4]),
                               np.flipud(ra_lower_lat[3]), np.flipud(ra_lower_lat[2]),
                               np.flipud(ra_lower_lat[1]), np.flipud(ra_lower_lat[0])))

        poly_border = ','.join(str(llo) + ' ' + str(lla) for llo, lla in zip(lons, lats))
        wkt = 'POLYGON((%s))' % poly_border
        new_geometry = WKTReader().read(wkt)

        # Get geolocation of dataset - this must be updated
        geoloc = ds.geographic_location
        # Check geometry, return if it is the same as the stored one
        created = False
        if geoloc.geometry != new_geometry:
            # Change the dataset geolocation to cover all subswaths
            geoloc.geometry = new_geometry
            geoloc.save()
            created = True
        
        return ds, created
Example #5
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 outFolder=downloads,
                 **kwargs):
        """Create NCEP VRT"""

        if not os.path.exists(outFolder):
            os.mkdir(outFolder)

        ##############
        # Get time
        ##############
        keyword_base = 'ncep_wind_online'
        if filename[0:len(keyword_base)] != keyword_base:
            raise WrongMapperError

        time_str = filename[len(keyword_base) + 1::]
        time = datetime.strptime(time_str, '%Y%m%d%H%M')
        print(time)

        ########################################
        # Find and download online grib file
        ########################################
        # Find closest 6 hourly modelrun and forecast hour
        model_run_hour = round((time.hour + time.minute / 60.) / 6) * 6
        nearest_model_run = (datetime(time.year, time.month, time.day) +
                             timedelta(hours=model_run_hour))
        if sys.version_info < (2, 7):
            td = (time - nearest_model_run)
            forecast_hour = (
                td.microseconds +
                (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 / 3600.
        else:
            forecast_hour = (time - nearest_model_run).total_seconds() / 3600.
        if model_run_hour == 24:
            model_run_hour = 0
        if forecast_hour < 1.5:
            forecast_hour = 0
        else:
            forecast_hour = 3

        #########################################################
        # Try first to get NRT data from
        # ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/
        # - avaliable approximately the latest month
        #########################################################
        url = ('ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/' + 'gfs.' +
               nearest_model_run.strftime('%Y%m%d') + '%.2d' % model_run_hour +
               '/gfs.t' + '%.2d' % model_run_hour + 'z.master.grbf' +
               '%.3d' % forecast_hour + '.10m.uv.grib2')
        out_filename = os.path.join(
            outFolder,
            ('ncep_gfs_' + nearest_model_run.strftime('%Y%m%d_%HH_') +
             '%.2d' % forecast_hour + '.10m.uv.grib2'))
        if os.path.exists(out_filename):
            print('NCEP wind is already downloaded: ' + out_filename)
        else:
            os.system('curl -so ' + out_filename + ' ' + url)
            if os.path.exists(out_filename):
                print('Downloaded ' + out_filename)
            else:
                print('NRT GRIB file not available: ' + url)
                #########################################################
                # If NRT file not available, search in long term archive
                #########################################################
                url = ('http://nomads.ncdc.noaa.gov/data/gfs4/' +
                       nearest_model_run.strftime('%Y%m/%Y%m%d/'))
                basename = ('gfs_4_' + nearest_model_run.strftime('%Y%m%d_') +
                            nearest_model_run.strftime('%H%M_') +
                            '%.3d' % forecast_hour)
                filename = basename + '.grb2'
                out_filename = os.path.join(outFolder, filename)
                print('Downloading ' + url + filename)

                # Download subset of grib file
                mapper_dir = os.path.dirname(os.path.abspath(__file__))
                get_inv = os.path.join(mapper_dir, 'get_inv.pl')
                if not os.path.isfile(get_inv):
                    raise IOError('%s: File not found' % get_inv)
                get_grib = os.path.join(mapper_dir, 'get_grib.pl')

                if not os.path.isfile(get_grib):
                    raise IOError('%s: File not found' % get_grib)

                if not os.path.isfile(out_filename):
                    command = (get_inv + ' ' + url + basename +
                               '.inv | egrep "(:UGRD:10 m |:VGRD:10 m )" | ' +
                               get_grib + ' ' + url + filename + ' ' +
                               out_filename)
                    os.system(command)
                    if os.path.isfile(out_filename):
                        print('Downloaded ' + filename + ' to ' + outFolder)
                else:
                    print('Already downloaded %s' % out_filename)

                if not os.path.isfile(out_filename):
                    sys.exit('No NCEP wind files found for requested time')

        ######################################################
        # Open downloaded grib file with a(ny) Nansat mapper
        ######################################################
        w = Nansat(out_filename)
        self._copy_from_dataset(w.vrt.dataset)

        return
Example #6
0
# License:
#-------------------------------------------------------------------------------
import sys, os
home = os.path.expanduser("~")

import numpy as np
import matplotlib.pyplot as plt

from nansat.nansatmap import Nansatmap
from nansat.nansat import Nansat, Domain

iFileName = os.path.join(home,
                'python/nansat/nansat/tests/data/gcps.tif')

# Open an input satellite image with Nansat
n = Nansat(iFileName)

# List bands and georeference of the object
print n

# Write picture with map of the file location
n.write_map('map.png')

# Write indexed picture with data from the first band
n.write_figure('rgb.png', clim='hist')

# Reproject input image onto map of Norwegian Coast
# 1. Create domain describing the desired map
# 2. Transform the original satellite image
# 3. Write the transfromed image into RGB picture
dLatlong = Domain("+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs",
 def test_open_arome_metcoop(self):
     n = Nansat(self.test_file_arome_metcoop, mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertTrue(n['x_wind_10m'].any())
     self.assertTrue(n['y_wind_10m'].any())
 def test_open_arome_metcoop_at_given_height(self):
     n = Nansat(self.test_file_arome_metcoop,
                netcdf_dim={'height0': '0'},
                mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
     self.assertTrue(n['surface_air_pressure'].any())
 def test_netcdf_cf_mapper_is_used(self):
     n = Nansat(self.test_file_arctic)
     self.assertEqual(n.mapper, 'netcdf_cf')
 def test_open_netcdf_cf(self):
     n = Nansat(self.test_file_arctic, mapperName='netcdf_cf')
     self.assertIsInstance(n, Nansat)
Example #11
0
    def get_or_create(self, uri, *args, **kwargs):
        """ Ingest gsar file to geo-spaas db
        """

        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)
        connection.close()

        # TODO: Check if the following is necessary
        if not type(ds) == Dataset:
            return ds, False

        fn = nansat_filename(uri)
        n = Nansat(fn, subswath=0)

        # set Dataset entry_title
        ds.entry_title = n.get_metadata('title')
        ds.save()

        if created:
            from sar_doppler.models import SARDopplerExtraMetadata
            # Store the polarization and associate the dataset
            extra, _ = SARDopplerExtraMetadata.objects.get_or_create(
                dataset=ds, polarization=n.get_metadata('polarization'))
            if not _:
                raise ValueError(
                    'Created new dataset but could not create instance of ExtraMetadata'
                )
            ds.sardopplerextrametadata_set.add(extra)
            connection.close()

        gg = WKTReader().read(n.get_border_wkt())

        #lon, lat = n.get_border()
        #ind_near_range = 0
        #ind_far_range = int(lon.size/4)
        #import pyproj
        #geod = pyproj.Geod(ellps='WGS84')
        #angle1,angle2,img_width = geod.inv(lon[ind_near_range], lat[ind_near_range],
        #                                    lon[ind_far_range], lat[ind_far_range])

        # If the area of the dataset geometry is larger than the area of the subswath border, it means that the dataset
        # has already been created (the area should be the total area of all subswaths)
        if np.floor(ds.geographic_location.geometry.area) > np.round(gg.area):
            return ds, False

        swath_data = {}
        lon = {}
        lat = {}
        astep = {}
        rstep = {}
        az_left_lon = {}
        ra_upper_lon = {}
        az_right_lon = {}
        ra_lower_lon = {}
        az_left_lat = {}
        ra_upper_lat = {}
        az_right_lat = {}
        ra_lower_lat = {}
        num_border_points = 10
        border = 'POLYGON(('

        for i in range(self.N_SUBSWATHS):
            # Read subswaths
            swath_data[i] = Nansat(fn, subswath=i)

            lon[i], lat[i] = swath_data[i].get_geolocation_grids()

            astep[i] = int(
                max(1, (lon[i].shape[0] / 2 * 2 - 1) / num_border_points))
            rstep[i] = int(
                max(1, (lon[i].shape[1] / 2 * 2 - 1) / num_border_points))

            az_left_lon[i] = lon[i][0:-1:astep[i], 0]
            az_left_lat[i] = lat[i][0:-1:astep[i], 0]

            az_right_lon[i] = lon[i][0:-1:astep[i], -1]
            az_right_lat[i] = lat[i][0:-1:astep[i], -1]

            ra_upper_lon[i] = lon[i][-1, 0:-1:rstep[i]]
            ra_upper_lat[i] = lat[i][-1, 0:-1:rstep[i]]

            ra_lower_lon[i] = lon[i][0, 0:-1:rstep[i]]
            ra_lower_lat[i] = lat[i][0, 0:-1:rstep[i]]

        lons = np.concatenate(
            (az_left_lon[0], ra_upper_lon[0], ra_upper_lon[1], ra_upper_lon[2],
             ra_upper_lon[3], ra_upper_lon[4], np.flipud(az_right_lon[4]),
             np.flipud(ra_lower_lon[4]), np.flipud(ra_lower_lon[3]),
             np.flipud(ra_lower_lon[2]), np.flipud(ra_lower_lon[1]),
             np.flipud(ra_lower_lon[0]))).round(decimals=3)

        # apply 180 degree correction to longitude - code copied from
        # get_border_wkt...
        # TODO: simplify using np.mod?
        for ilon, llo in enumerate(lons):
            lons[ilon] = copysign(
                acos(cos(llo * pi / 180.)) / pi * 180, sin(llo * pi / 180.))

        lats = np.concatenate(
            (az_left_lat[0], ra_upper_lat[0], ra_upper_lat[1], ra_upper_lat[2],
             ra_upper_lat[3], ra_upper_lat[4], np.flipud(az_right_lat[4]),
             np.flipud(ra_lower_lat[4]), np.flipud(ra_lower_lat[3]),
             np.flipud(ra_lower_lat[2]), np.flipud(ra_lower_lat[1]),
             np.flipud(ra_lower_lat[0]))).round(decimals=3)

        poly_border = ','.join(
            str(llo) + ' ' + str(lla) for llo, lla in zip(lons, lats))
        wkt = 'POLYGON((%s))' % poly_border
        new_geometry = WKTReader().read(wkt)

        # Get or create new geolocation of dataset
        # Returns False if it is the same as an already created one (this may happen when a lot of data is processed)
        ds.geographic_location, cr = GeographicLocation.objects.get_or_create(
            geometry=new_geometry)
        connection.close()

        return ds, True
Example #12
0
    def process(self, ds, force=False, *args, **kwargs):
        """ Create data products

        Returns
        =======
        ds : geospaas.catalog.models.Dataset
        processed : Boolean
            Flag to indicate if the dataset was processed or not
        """
        swath_data = {}

        # Set media path (where images will be stored)
        mp = media_path(
            self.module_name(),
            nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri))
        # Set product path (where netcdf products will be stored)
        ppath = product_path(
            self.module_name(),
            nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri))

        # Read subswaths
        dss = {1: None, 2: None, 3: None, 4: None, 5: None}
        processed = [True, True, True, True, True]
        failing = [False, False, False, False, False]
        for i in range(self.N_SUBSWATHS):
            # Check if the data has already been processed
            try:
                fn = nansat_filename(
                    ds.dataseturi_set.get(uri__endswith='%d.nc' % i).uri)
            except DatasetURI.DoesNotExist:
                processed[i] = False
            else:
                dd = Nansat(fn)
                try:
                    std_Ur = dd['std_Ur']
                except ValueError:
                    processed[i] = False
            if processed[i] and not force:
                continue
            # Process from scratch to avoid duplication of bands
            fn = nansat_filename(
                ds.dataseturi_set.get(uri__endswith='.gsar').uri)
            try:
                dd = Doppler(fn, subswath=i)
            except Exception as e:
                logging.error('%s (Filename, subswath [1-5]): (%s, %d)' %
                              (str(e), fn, i + 1))
                failing[i] = True
                continue

            # Check if the file is corrupted
            try:
                inc = dd['incidence_angle']
            except Exception as e:
                logging.error('%s (Filename, subswath [1-5]): (%s, %d)' %
                              (str(e), fn, i + 1))
                failing[i] = True
                continue

            dss[i + 1] = dd

        if all(processed) and not force:
            logging.info("%s: The dataset has already been processed." %
                         nansat_filename(
                             ds.dataseturi_set.get(uri__endswith='.gsar').uri))
            return ds, False

        if all(failing):
            logging.error(
                "Processing of all subswaths is failing: %s" % nansat_filename(
                    ds.dataseturi_set.get(uri__endswith='.gsar').uri))
            return ds, False

        if any(failing):
            logging.error(
                "Some but not all subswaths processed: %s" % nansat_filename(
                    ds.dataseturi_set.get(uri__endswith='.gsar').uri))
            return ds, False

        logging.info(
            "Processing %s" %
            nansat_filename(ds.dataseturi_set.get(uri__endswith='.gsar').uri))

        # Loop subswaths, process each of them
        processed = False

        def get_overlap(d1, d2):
            b1 = d1.get_border_geometry()
            b2 = d2.get_border_geometry()
            intersection = b1.Intersection(b2)
            lo1, la1 = d1.get_geolocation_grids()
            overlap = np.zeros(lo1.shape)
            for i in range(lo1.shape[0]):
                for j in range(lo1.shape[1]):
                    wkt_point = 'POINT(%.5f %.5f)' % (lo1[i, j], la1[i, j])
                    overlap[i, j] = intersection.Contains(
                        ogr.CreateGeometryFromWkt(wkt_point))
            return overlap

        for uri in ds.dataseturi_set.filter(uri__endswith='.nc'):
            logging.debug("%s" % nansat_filename(uri.uri))
        # Find pixels in dss[1] which overlap with pixels in dss[2]
        overlap12 = get_overlap(dss[1], dss[2])
        # Find pixels in dss[2] which overlap with pixels in dss[1]
        overlap21 = get_overlap(dss[2], dss[1])
        # and so on..
        overlap23 = get_overlap(dss[2], dss[3])
        overlap32 = get_overlap(dss[3], dss[2])
        overlap34 = get_overlap(dss[3], dss[4])
        overlap43 = get_overlap(dss[4], dss[3])
        overlap45 = get_overlap(dss[4], dss[5])
        overlap54 = get_overlap(dss[5], dss[4])

        # Get range bias corrected Doppler
        fdg = {}
        fdg[1] = dss[1].anomaly() - dss[1].range_bias()
        fdg[2] = dss[2].anomaly() - dss[2].range_bias()
        fdg[3] = dss[3].anomaly() - dss[3].range_bias()
        fdg[4] = dss[4].anomaly() - dss[4].range_bias()
        fdg[5] = dss[5].anomaly() - dss[5].range_bias()

        # Get median values at overlapping borders
        median12 = np.nanmedian(fdg[1][np.where(overlap12)])
        median21 = np.nanmedian(fdg[2][np.where(overlap21)])
        median23 = np.nanmedian(fdg[2][np.where(overlap23)])
        median32 = np.nanmedian(fdg[3][np.where(overlap32)])
        median34 = np.nanmedian(fdg[3][np.where(overlap34)])
        median43 = np.nanmedian(fdg[4][np.where(overlap43)])
        median45 = np.nanmedian(fdg[4][np.where(overlap45)])
        median54 = np.nanmedian(fdg[5][np.where(overlap54)])

        # Adjust levels to align at subswath borders
        fdg[1] -= median12 - np.nanmedian(np.array([median12, median21]))
        fdg[2] -= median21 - np.nanmedian(np.array([median12, median21]))

        fdg[1] -= median23 - np.nanmedian(np.array([median23, median32]))
        fdg[2] -= median23 - np.nanmedian(np.array([median23, median32]))
        fdg[3] -= median32 - np.nanmedian(np.array([median23, median32]))

        fdg[1] -= median34 - np.nanmedian(np.array([median34, median43]))
        fdg[2] -= median34 - np.nanmedian(np.array([median34, median43]))
        fdg[3] -= median34 - np.nanmedian(np.array([median34, median43]))
        fdg[4] -= median43 - np.nanmedian(np.array([median34, median43]))

        fdg[1] -= median45 - np.nanmedian(np.array([median45, median54]))
        fdg[2] -= median45 - np.nanmedian(np.array([median45, median54]))
        fdg[3] -= median45 - np.nanmedian(np.array([median45, median54]))
        fdg[4] -= median45 - np.nanmedian(np.array([median45, median54]))
        fdg[5] -= median54 - np.nanmedian(np.array([median45, median54]))

        # Correct by land or mean fww
        try:
            wind_fn = nansat_filename(
                Dataset.objects.get(
                    source__platform__short_name='ERA15DAS',
                    time_coverage_start__lte=ds.time_coverage_end,
                    time_coverage_end__gte=ds.time_coverage_start).
                dataseturi_set.get().uri)
        except Exception as e:
            logging.error(
                "%s - in search for ERA15DAS data (%s, %s, %s) " %
                (str(e),
                 nansat_filename(
                     ds.dataseturi_set.get(uri__endswith=".gsar").uri),
                 ds.time_coverage_start, ds.time_coverage_end))
            return ds, False
        connection.close()
        land = np.array([])
        fww = np.array([])
        offset_corrected = 0
        for key in dss.keys():
            land = np.append(
                land, fdg[key][dss[key]['valid_land_doppler'] == 1].flatten())
        if land.any():
            logging.info('Using land for bias corrections')
            land_bias = np.nanmedian(land)
            offset_corrected = 1
        else:
            logging.info('Using CDOP wind-waves Doppler for bias corrections')
            # correct by mean wind doppler
            for key in dss.keys():
                ff = fdg[key].copy()
                # do CDOP correction
                ff[ dss[key]['valid_sea_doppler']==1 ] = \
                    ff[ dss[key]['valid_sea_doppler']==1 ] \
                    - dss[key].wind_waves_doppler(wind_fn)[0][ dss[key]['valid_sea_doppler']==1 ]
                ff[dss[key]['valid_doppler'] == 0] = np.nan
                fww = np.append(fww, ff.flatten())
            land_bias = np.nanmedian(fww)
            if np.isnan(land_bias):
                offset_corrected = 0
                raise Exception('land bias is NaN...')
            else:
                offset_corrected = 1

        for key in dss.keys():
            fdg[key] -= land_bias
            # Set unrealistically high/low values to NaN (ref issue #4 and #5)
            fdg[key][fdg[key] < -100] = np.nan
            fdg[key][fdg[key] > 100] = np.nan
            # Add fdg[key] as band
            dss[key].add_band(
                array=fdg[key],
                parameters={
                    'wkv':
                    'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity',
                    'offset_corrected': str(offset_corrected)
                })

            # Add Doppler anomaly
            dss[key].add_band(
                array=dss[key].anomaly(),
                parameters={
                    'wkv':
                    'anomaly_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave'
                })

            # Add wind doppler and its uncertainty as bands
            fww, dfww = dss[key].wind_waves_doppler(wind_fn)
            dss[key].add_band(
                array=fww,
                parameters={
                    'wkv':
                    'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_wind_waves'
                })
            dss[key].add_band(array=dfww, parameters={'name': 'std_fww'})

            # Calculate range current velocity component
            v_current, std_v, offset_corrected = \
                dss[key].surface_radial_doppler_sea_water_velocity(wind_fn, fdg=fdg[key])
            dss[key].add_band(array=v_current,
                              parameters={
                                  'wkv':
                                  'surface_radial_doppler_sea_water_velocity',
                                  'offset_corrected': str(offset_corrected)
                              })
            dss[key].add_band(array=std_v, parameters={'name': 'std_Ur'})

            # Set satellite pass
            lon, lat = dss[key].get_geolocation_grids()
            gg = np.gradient(lat, axis=0)
            dss[key].add_band(array=gg,
                              parameters={
                                  'name':
                                  'sat_pass',
                                  'comment':
                                  'ascending pass is >0, descending pass is <0'
                              })

            history_message = (
                'sar_doppler.models.Dataset.objects.process("%s") '
                '[geospaas sar_doppler version %s]' %
                (ds, os.getenv('GEOSPAAS_SAR_DOPPLER_VERSION', 'dev')))
            new_uri, created = self.export2netcdf(
                dss[key], ds, history_message=history_message)
            processed = True

        m = self.create_merged_swaths(ds)

        return ds, processed
Example #13
0
    def export2netcdf(self, n, ds, history_message=''):

        if not history_message:
            history_message = 'Export to netCDF [geospaas sar_doppler version %s]' % os.getenv(
                'GEOSPAAS_SAR_DOPPLER_VERSION', 'dev')

        ii = int(n.get_metadata('subswath'))

        date_created = datetime.now(timezone.utc)

        fn = self.nc_name(ds, ii)

        original = Nansat(n.get_metadata('Originating file'), subswath=ii)
        metadata = original.get_metadata()

        def pretty_print_gcmd_keywords(kw):
            retval = ''
            value_prev = ''
            for key, value in kw.items():
                if value:
                    if value_prev:
                        retval += ' > '
                    retval += value
                    value_prev = value
            return retval

        # Set global metadata
        metadata['Conventions'] = metadata['Conventions'] + ', ACDD-1.3'
        # id - the ID from the database should be registered in the file if it is not already there
        try:
            entry_id = n.get_metadata('entry_id')
        except ValueError:
            n.set_metadata(key='entry_id', value=ds.entry_id)
        try:
            id = n.get_metadata('id')
        except ValueError:
            n.set_metadata(key='id', value=ds.entry_id)
        metadata['date_created'] = date_created.strftime('%Y-%m-%d')
        metadata['date_created_type'] = 'Created'
        metadata['date_metadata_modified'] = date_created.strftime('%Y-%m-%d')
        metadata['processing_level'] = 'Scientific'
        metadata['creator_role'] = 'Investigator'
        metadata['creator_name'] = 'Morten Wergeland Hansen'
        metadata['creator_email'] = '*****@*****.**'
        metadata['creator_institution'] = pretty_print_gcmd_keywords(
            pti.get_gcmd_provider('NO/MET'))

        metadata[
            'project'] = 'Norwegian Space Agency project JOP.06.20.2: Reprocessing and analysis of historical data for future operationalization of Doppler shifts from SAR'
        metadata['publisher_name'] = 'Morten Wergeland Hansen'
        metadata['publisher_url'] = 'https://www.met.no/'
        metadata['publisher_email'] = '*****@*****.**'

        metadata['references'] = 'https://github.com/mortenwh/openwind'

        metadata['dataset_production_status'] = 'Complete'

        # Get image boundary
        lon, lat = n.get_border()
        boundary = 'POLYGON (('
        for la, lo in list(zip(lat, lon)):
            boundary += '%.2f %.2f, ' % (la, lo)
        boundary = boundary[:-2] + '))'
        # Set bounds as (lat,lon) following ACDD convention and EPSG:4326
        metadata['geospatial_bounds'] = boundary
        metadata['geospatial_bounds_crs'] = 'EPSG:4326'

        # history
        try:
            history = n.get_metadata('history')
        except ValueError:
            metadata['history'] = date_created.isoformat(
            ) + ': ' + history_message
        else:
            metadata['history'] = history + '\n' + date_created.isoformat(
            ) + ': ' + history_message

        # Set metadata from dict (export2thredds could take it as input..)
        for key, val in metadata.items():
            n.set_metadata(key=key, value=val)

        # Export data to netcdf
        logging.info('Exporting %s to %s (subswath %d)' %
                     (n.filename, fn, ii + 1))
        n.export(filename=fn)
        #ww.export2thredds(thredds_fn, mask_name='swathmask', metadata=metadata, no_mask_value=1)

        # Clean netcdf attributes
        history = n.get_metadata('history')
        self.clean_nc_attrs(fn, history)

        # Add netcdf uri to DatasetURIs
        ncuri = 'file://localhost' + fn
        new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri,
                                                            dataset=ds)
        connection.close()

        return new_uri, created
def calc_mean_doppler(datetime_start=timezone.datetime(2010,1,1,
    tzinfo=timezone.utc), datetime_end=timezone.datetime(2010,2,1,
    tzinfo=timezone.utc), domain=Domain(NSR().wkt, 
        '-te 10 -44 40 -30 -tr 0.05 0.05')):
    geometry = WKTReader().read(domain.get_border_wkt(nPoints=1000))
    ds = Dataset.objects.filter(entry_title__contains='Doppler',
            time_coverage_start__range=[datetime_start, datetime_end],
            geographic_location__geometry__intersects=geometry)
    Va = np.zeros(domain.shape())
    Vd = np.zeros(domain.shape())
    ca = np.zeros(domain.shape())
    cd = np.zeros(domain.shape())
    sa = np.zeros(domain.shape())
    sd = np.zeros(domain.shape())
    sum_var_inv_a = np.zeros(domain.shape())
    sum_var_inv_d = np.zeros(domain.shape())
    for dd in ds:
        uris = dd.dataseturi_set.filter(uri__endswith='nc')
        for uri in uris:
            dop = Doppler(uri.uri)
            # Consider skipping swath 1 and possibly 2...
            dop.reproject(domain)
            # TODO: HARDCODING - MUST BE IMPROVED
            satpass = dop.get_metadata(key='Originating file').split('/')[6]
            if satpass=='ascending':
                try:
                    v_ai = dop['Ur']
                    v_ai[np.abs(v_ai)>3] = np.nan
                except:
                    # subswath doesn't cover the given domain
                    continue
                # uncertainty:
                # 5 Hz - TODO: estimate this correctly...
                sigma_ai = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) 
                alpha_i = -dop['sensor_azimuth']*np.pi/180.
                Va = np.nansum(np.append(np.expand_dims(Va, 2),
                    np.expand_dims(v_ai/np.square(sigma_ai), 2), axis=2),
                    axis=2)
                ca = np.nansum(np.append(np.expand_dims(ca, 2),
                    np.expand_dims(np.cos(alpha_i)/np.square(sigma_ai), 2),
                    axis=2), axis=2)
                sa = np.nansum(np.append(np.expand_dims(sa, 2),
                    np.expand_dims(np.sin(alpha_i)/np.square(sigma_ai), 2),
                    axis=2), axis=2)
                sum_var_inv_a =np.nansum(np.append(np.expand_dims(sum_var_inv_a, 2),
                    np.expand_dims(1./np.square(sigma_ai), 2), axis=2),
                    axis=2)
            else:
                try:
                    v_dj = -dop['Ur']
                    v_dj[np.abs(v_dj)>3] = np.nan
                except:
                    # subswath doesn't cover the given domain
                    continue
                # 5 Hz - TODO: estimate this correctly...
                sigma_dj = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) 
                delta_j = (dop['sensor_azimuth']-180.)*np.pi/180.
                Vd = np.nansum(np.append(np.expand_dims(Vd, 2),
                    np.expand_dims(v_dj/np.square(sigma_dj), 2), axis=2),
                    axis=2)
                cd = np.nansum(np.append(np.expand_dims(cd, 2),
                    np.expand_dims(np.cos(delta_j)/np.square(sigma_dj), 2),
                    axis=2), axis=2)
                sd = np.nansum(np.append(np.expand_dims(sd, 2),
                    np.expand_dims(np.sin(delta_j)/np.square(sigma_dj), 2),
                    axis=2), axis=2)
                sum_var_inv_d = np.nansum(np.append(
                    np.expand_dims(sum_var_inv_d, 2), np.expand_dims(
                        1./np.square(sigma_ai), 2), axis=2), axis=2)

    u = (Va*sd + Vd*sa)/(sa*cd + sd*ca)
    v = (Va*cd - Vd*ca)/(sa*cd + sd*ca)
    sigma_u = np.sqrt(np.square(sd)*sum_var_inv_a +
            np.square(sa)*sum_var_inv_d) / (sa*cd + sd*ca)
    sigma_v = np.sqrt(np.square(cd)*sum_var_inv_a +
            np.square(ca)*sum_var_inv_d) / (sa*cd + sd*ca)
    nu = Nansat(array=u, domain=domain)
    nmap=Nansatmap(nu, resolution='h')
    nmap.pcolormesh(nu[1], vmin=-1.5, vmax=1.5, cmap='bwr')
    nmap.add_colorbar()
    nmap.draw_continents()
    nmap.fig.savefig('/vagrant/shared/unwasc.png', bbox_inches='tight')
def update_geophysical_doppler(dopplerFile, t0, t1, swath, sensor='ASAR',
        platform='ENVISAT'):

    dop2correct = Doppler(dopplerFile)
    bandnum = dop2correct._get_band_number({
        'standard_name':
            'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave'
    })
    polarization = dop2correct.get_metadata(bandID=bandnum, key='polarization')
    lon,lat = dop2correct.get_geolocation_grids()
    indmidaz = lat.shape[0]/2
    indmidra = lat.shape[1]/2
    if lat[indmidaz,indmidra]>lat[0,indmidra]:
        use_pass = '******'
    else:
        use_pass = '******'

    # Get datasets
    DS = Dataset.objects.filter(source__platform__short_name=platform,
        source__instrument__short_name=sensor)
    dopDS = DS.filter(
            parameters__short_name = 'dca',
            time_coverage_start__gte = t0,
            time_coverage_start__lt = t1
        )

    swath_files = []
    for dd in dopDS:
        try:
            fn = dd.dataseturi_set.get(
                    uri__endswith='subswath%s.nc' %swath).uri
        except DatasetURI.DoesNotExist:
            continue
        n = Doppler(fn)
        try:
            dca = n.anomaly(pol=polarization)
        except OptionError: # wrong polarization..
            continue
        lon,lat=n.get_geolocation_grids()
        indmidaz = lat.shape[0]/2
        indmidra = lat.shape[1]/2
        if lat[indmidaz,indmidra]>lat[0,indmidra]:
            orbit_pass = '******'
        else:
            orbit_pass = '******'
        if use_pass==orbit_pass:
            swath_files.append(fn)

    valid_land = np.array([])
    valid = np.array([])
    for ff in swath_files:
        n = Nansat(ff)
        view_bandnum = n._get_band_number({
            'standard_name': 'sensor_view_angle'
        })
        std_bandnum = n._get_band_number({
            'standard_name': \
                'standard_deviation_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave',
        })
        pol = n.get_metadata(bandID=std_bandnum, key='polarization')

        # For checking when antenna pattern changes
        if valid.shape==(0,):
            valid = n['valid_doppler']
            dca0 = n['dca']
            dca0[n['valid_doppler']==0] = np.nan
            dca0[n['valid_sea_doppler']==1] = dca0[n['valid_sea_doppler']==1] - \
                    n['fww'][n['valid_sea_doppler']==1]
            view_angle0 = n[view_bandnum]
        else:
            validn = n['valid_doppler']
            dca0n = n['dca']
            dca0n[n['valid_doppler']==0] = np.nan
            dca0n[n['valid_sea_doppler']==1] = dca0n[n['valid_sea_doppler']==1] - \
                    n['fww'][n['valid_sea_doppler']==1]
            view_angle0n = n[view_bandnum]
            if not validn.shape==valid.shape:
                if validn.shape[1] > valid.shape[1]:
                    valid = np.resize(valid, (valid.shape[0], validn.shape[1]))
                    dca0 = np.resize(dca0, (dca0.shape[0], dca0n.shape[1]))
                    view_angle0 = np.resize(view_angle0,
                        (view_angle0.shape[0], view_angle0n.shape[1]))
                else:
                    validn = np.resize(validn, (validn.shape[0],
                        valid.shape[1]))
                    dca0n = np.resize(dca0n, (dca0n.shape[0], dca0.shape[1]))
                    view_angle0n = np.resize(view_angle0n,
                        (view_angle0n.shape[0], view_angle0.shape[1]))
            valid = np.concatenate((valid, validn))
            dca0 = np.concatenate((dca0, dca0n))
            view_angle0 = np.concatenate((view_angle0, view_angle0n))


        if valid_land.shape==(0,):
            valid_land = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)]
            dca = n['dca'][n['valid_land_doppler'].any(axis=1)]
            view_angle = n[view_bandnum][n['valid_land_doppler'].any(axis=1)]
            std_dca = n[std_bandnum][n['valid_land_doppler'].any(axis=1)]
        else:
            vn = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)]
            dcan = n['dca'][n['valid_land_doppler'].any(axis=1)]
            view_angle_n = n[view_bandnum][n['valid_land_doppler'].any(axis=1)]
            std_dca_n = n[std_bandnum][n['valid_land_doppler'].any(axis=1)]
            if not vn.shape==valid_land.shape:
                # Resize arrays - just for visual inspection. Actual interpolation
                # is view angle vs doppler anomaly
                if vn.shape[1] > valid_land.shape[1]:
                    valid_land = np.resize(valid_land, (valid_land.shape[0],
                        vn.shape[1]))
                    dca = np.resize(dca, (dca.shape[0],
                        vn.shape[1]))
                    view_angle = np.resize(view_angle, (view_angle.shape[0],
                        vn.shape[1]))
                    std_dca = np.resize(std_dca, (std_dca.shape[0],
                        vn.shape[1]))
                if vn.shape[1] < valid_land.shape[1]:
                    vn = np.resize(vn, (vn.shape[0], valid_land.shape[1]))
                    dcan = np.resize(dcan, (dcan.shape[0], valid_land.shape[1]))
                    view_angle_n = np.resize(view_angle_n, (view_angle_n.shape[0], valid_land.shape[1]))
                    std_dca_n = np.resize(std_dca_n, (std_dca_n.shape[0], valid_land.shape[1]))
            valid_land = np.concatenate((valid_land, vn))
            dca = np.concatenate((dca, dcan))
            view_angle = np.concatenate((view_angle, view_angle_n))
            std_dca = np.concatenate((std_dca, std_dca_n))

    view_angle0 = view_angle0.flatten()
    dca0 = dca0.flatten()
    view_angle0 = np.delete(view_angle0, np.where(np.isnan(dca0)))
    dca0 = np.delete(dca0, np.where(np.isnan(dca0)))
    ind = np.argsort(view_angle0)
    view_angle0 = view_angle0[ind]
    dca0 = dca0[ind]

    # Set dca, view_angle and std_dca to nan where not land
    dca[valid_land==0] = np.nan
    std_dca[valid_land==0] = np.nan
    view_angle[valid_land==0] = np.nan

    dca = dca.flatten()
    std_dca = std_dca.flatten()
    view_angle = view_angle.flatten()

    dca = np.delete(dca, np.where(np.isnan(dca)))
    std_dca = np.delete(std_dca, np.where(np.isnan(std_dca)))
    view_angle = np.delete(view_angle, np.where(np.isnan(view_angle)))

    ind = np.argsort(view_angle)
    view_angle = view_angle[ind]
    dca = dca[ind]
    std_dca = std_dca[ind]

    freqLims = [-200,200]

    # Show this in presentation:
    plt.subplot(2,1,1)
    count, anglebins, dcabins, im = plt.hist2d(view_angle0, dca0, 100, cmin=1,
            range=[[np.min(view_angle), np.max(view_angle)], freqLims])
    plt.colorbar()
    plt.title('Wind Doppler subtracted')

    plt.subplot(2,1,2)
    count, anglebins, dcabins, im = plt.hist2d(view_angle, dca, 100, cmin=1,
            range=[[np.min(view_angle), np.max(view_angle)], freqLims])
    plt.colorbar()
    plt.title('Doppler over land')
    #plt.show()
    plt.close()
    countLims = 200
        #{
        #    0: 600,
        #    1: 250,
        #    2: 500,
        #    3: 140,
        #    4: 130,
        #}

    dcabins_grid, anglebins_grid = np.meshgrid(dcabins[:-1], anglebins[:-1])
    anglebins_vec = anglebins_grid[count>countLims]
    dcabins_vec = dcabins_grid[count>countLims]
    #anglebins_vec = anglebins_grid[count>countLims[swath]]
    #dcabins_vec = dcabins_grid[count>countLims[swath]]


    va4interp = []
    rb4interp = []
    std_rb4interp = []
    for i in range(len(anglebins)-1):
        if i==0:
            ind0 = 0
        else:
            ind0 = np.where(view_angle>anglebins[i])[0][0]
        ind1 = np.where(view_angle<=anglebins[i+1])[0][-1]
        va4interp.append(np.mean(view_angle[ind0:ind1]))
        rb4interp.append(np.median(dca[ind0:ind1]))
        std_rb4interp.append(np.std(dca[ind0:ind1]))
    va4interp = np.array(va4interp)
    rb4interp = np.array(rb4interp)
    std_rb4interp = np.array(std_rb4interp)

    van = dop2correct['sensor_view']
    rbfull = van.copy()
    rbfull[:,:] = np.nan
    # Is there a more efficient method than looping?
    import time
    start_time = time.time()
    for ii in range(len(anglebins)-1):
        vaii0 = anglebins[ii]
        vaii1 = anglebins[ii+1]
        rbfull[(van>=vaii0) & (van<=vaii1)] = \
                np.median(dca[(view_angle>=vaii0) & (view_angle<=vaii1)])
    #print("--- %s seconds ---" % (time.time() - start_time))
    plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), '.')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()
    plt.close()


    #guess = [.1,.1,.1,.1,.1,.1]
    #[a,b,c,d,e,f], params_cov = optimize.curve_fit(rb_model_func,
    #        va4interp, rb4interp, guess)
    #        #anglebins_vec, dcabins_vec, guess)

    #n = Doppler(swath_files[0])
    #van = np.mean(dop2correct['sensor_view'], axis=0)
    #plt.plot(van, rb_model_func(van,a,b,c,d,e,f), 'r--')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()

    #ww = 1./std_rb4interp
    #ww[np.isinf(ww)] = 0
    #rbinterp = UnivariateSpline(
    #        va4interp,
    #        rb4interp,
    #        w = ww, 
    #        k = 5
    #    )

    #van = dop2correct['sensor_view']
    #y = rbinterp(van.flatten())
    #rbfull = y.reshape(van.shape)
    #plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), 'r--')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()

    band_name = 'fdg_corrected'
    fdg = dop2correct.anomaly() - rbfull
    #plt.imshow(fdg, vmin=-60, vmax=60)
    #plt.colorbar()
    #plt.show()
    dop2correct.add_band(array=fdg,
        parameters={
            'wkv':'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity',
            'name': band_name
        }
    )

    current = -(np.pi*(fdg - dop2correct['fww']) / 112 /
                np.sin(dop2correct['incidence_angle']*np.pi/180))
    dop2correct.add_band(array=current,
            parameters={'name': 'current', 'units': 'm/s', 'minmax': '-2 2'}
        )

    land = np.array([])
    # add land data for accuracy calculation
    if land.shape==(0,):
        land = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)]
        land_fdg = fdg[dop2correct['valid_land_doppler'].any(axis=1)]
    else:
        landn = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)]
        land_fdgn = fdg[dop2correct['valid_land_doppler'].any(axis=1)]
        if not landn.shape==land.shape:
            if landn.shape[1] > land.shape[1]:
                land = np.resize(land, (land.shape[0], landn.shape[1]))
                land_fdg = np.resize(land_fdg, (land_fdg.shape[0],
                    land_fdgn.shape[1]))
            if landn.shape[1] < land.shape[1]:
                landn = np.resize(landn, (landn.shape[0], land.shape[1]))
                land_fdgn = np.resize(land_fdgn, (land_fdgn.shape[0],
                    land.shape[1]))
        land = np.concatenate((land, landn))
        land_fdg = np.concatenate((land_fdg, land_fdgn))

    module = 'sar_doppler'
    DS = Dataset.objects.get(dataseturi__uri__contains=dop2correct.fileName)
    #fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/gsar_rvl/' \
    #        + dop2correct.fileName.split('/')[-2]+'.gsar'
    mp = media_path(module, nansat_filename( DS.dataseturi_set.get(
            uri__endswith='gsar').uri))
    ppath = product_path(module, nansat_filename( DS.dataseturi_set.get(
            uri__endswith='gsar').uri))
    # See managers.py -- this must be generalized!
    pngfilename = '%s_subswath_%d.png'%(band_name, swath)
    ncfilename = '%s_subswath_%d.nc'%(band_name, swath)

    # Export to new netcdf with fdg as the only band
    expFile = os.path.join(ppath, ncfilename)
    print 'Exporting file: %s\n\n' %expFile
    dop2correct.export(expFile, bands=[dop2correct._get_band_number(band_name)])
    ncuri = os.path.join('file://localhost', expFile)
    new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri,
            dataset=DS)

    # Reproject to leaflet projection
    xlon, xlat = dop2correct.get_corners()
    dom = Domain(NSR(3857),
            '-lle %f %f %f %f -tr 1000 1000' % (
                xlon.min(), xlat.min(), xlon.max(), xlat.max()))
    dop2correct.reproject(dom, eResampleAlg=1, tps=True)

    # Update figure
    dop2correct.write_figure(os.path.join(mp, pngfilename),
            clim = [-60,60],
            bands=band_name,
            mask_array=dop2correct['swathmask'],
            mask_lut={0:[128,128,128]}, transparency=[128,128,128])
    print("--- %s seconds ---" % (time.time() - start_time))

    land_fdg[land==0] = np.nan
    print('Standard deviation over land: %.2f' %np.nanstd(land_fdg))