Exemplo n.º 1
0
    def get_trajectory(self, start_time, end_time):
        if not type(start_time) == datetime.datetime:
            raise ValueError('Given times must be of type datetime.datetime')
        if not type(end_time) == datetime.datetime:
            raise ValueError('Given times must be of type datetime.datetime')

        # Could also take the trajectory directly from the geometry given 0.25
        # day sampling frequency...

        m = re.search('^.*drifter\s{1}no\.\s{1}(\d+)$', self.entry_title)
        id = int(m.group(1))
        uu = self.dataseturi_set.get(uri__contains='buoydata')
        fn = nansat_filename(uu.uri)

        # Get all drifter ID's
        ids = np.loadtxt(fn, usecols=(0, ))
        # Get indices of current drifter
        ind = np.where(ids == id)
        # Get year, month, day and hour of each sample
        year = np.loadtxt(fn, usecols=(3, ))[ind]
        month = np.loadtxt(fn, usecols=(1, ))[ind]
        day = np.loadtxt(fn, usecols=(2, ))[ind]
        hour = np.remainder(day, np.floor(day)) * 24
        # Get longitudes and latitudes
        lat = np.loadtxt(fn, usecols=(4, ))[ind]
        lon = np.loadtxt(fn, usecols=(5, ))[ind]

        # Pandas DataFrame
        df = pd.DataFrame({
            'year': year,
            'month': month,
            'day': np.floor(day),
            'hour': hour
        })
        # Create datetime64 array
        datetimes = pd.to_datetime(df)

        # Pick indices of required trajectory
        t0_diff = np.min(np.abs(datetimes - start_time.replace(tzinfo=None)))
        t1_diff = np.min(np.abs(datetimes - end_time.replace(tzinfo=None)))
        indt0 = np.argmin(np.abs(datetimes - start_time.replace(tzinfo=None)))
        indt1 = np.argmin(np.abs(datetimes - end_time.replace(tzinfo=None)))

        # Return geometry of required trajectory
        return LineString(zip(lon[indt0:indt1], lat[indt0:indt1]))
    def get_or_create(self, uri, reprocess=False, *args, **kwargs):
        # ingest file to db
        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)

        # set Dataset entry_title
        ds.entry_title = 'SAR NRCS'
        ds.save()

        # Unless reprocess==True, we may not need to do the following... (see
        # managers.py in sar doppler processor)
        #visExists = ... # check if visualization(s) already created
        #if visExists and not reprocess:
        #    warnings.warn('NO VISUALISATIONS CREATED - update managers.py')
        #    return ds, created

        n = Nansat(nansat_filename(uri))
        n.reproject_GCPs()
        n.resize(pixelsize=500)
        lon, lat = n.get_corners()
        lat_max = min(lat.max(), 85)
        d = Domain(
            NSR(3857), '-lle %f %f %f %f -ts %d %d' %
            (lon.min(), lat.min(), lon.max(), lat_max, n.shape()[1],
             n.shape()[0]))
        # Get all NRCS bands
        s0bands = []
        pp = []
        for key, value in n.bands().iteritems():
            try:
                if value['standard_name'] == standard_name:
                    s0bands.append(key)
                    pp.append(value['polarization'])
            except KeyError:
                continue
        ''' Create data products
        '''
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        mp = media_path(module, n.fileName)
        # ppath = product_path(module, n.fileName)

        # Create png's for each band
        num_products = 0
        for band in s0bands:
            print 'Visualize', band
            s0_tmp = n[band]
            n_tmp = Nansat(domain=n, array=s0_tmp)
            n_tmp.reproject_GCPs()
            n_tmp.reproject(d)

            s0 = n_tmp[1]
            n_tmp = None
            mask = np.ones(s0.shape, np.uint8)
            mask[np.isnan(s0) + (s0 <= 0)] = 0
            s0 = np.log10(s0) * 10.

            meta = n.bands()[band]
            product_filename = '%s_%s.png' % (meta['short_name'],
                                              meta['polarization'])

            nansatFigure(s0, mask, polarization_clims[meta['polarization']][0],
                         polarization_clims[meta['polarization']][1], mp,
                         product_filename)

            # Get DatasetParameter
            param = Parameter.objects.get(short_name=meta['short_name'])
            dsp, created = DatasetParameter.objects.get_or_create(
                dataset=ds, parameter=param)

            # Create Visualization
            geom, created = GeographicLocation.objects.get_or_create(
                geometry=WKTReader().read(n.get_border_wkt()))
            vv, created = Visualization.objects.get_or_create(
                uri='file://localhost%s/%s' % (mp, product_filename),
                title='%s %s polarization' %
                (param.standard_name, meta['polarization']),
                geographic_location=geom)

            # Create VisualizationParameter
            vp, created = VisualizationParameter.objects.get_or_create(
                visualization=vv, ds_parameter=dsp)

        return ds, True
    def get_or_create(self, uri, reprocess=False, *args, **kwargs):
        # ingest file to db
        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)

        fn = nansat_filename(uri)

        n = Nansat(fn)

        # Reproject to leaflet projection
        xlon, xlat = n.get_corners()
        d = Domain(
            NSR(3857), '-lle %f %f %f %f -tr 1000 1000' %
            (xlon.min(), xlat.min(), xlon.max(), xlat.max()))
        n.reproject(d)

        # Get band numbers of required bands according to standard names
        speedBandNum = n._get_band_number({'standard_name': 'wind_speed'})
        dirBandNum = n._get_band_number(
            {'standard_name': 'wind_from_direction'})

        # Get numpy arrays of the bands
        speed = n[speedBandNum]
        dir = n[dirBandNum]

        ## It probably wont work with nansatmap...
        #nmap = Nansatmap(n, resolution='l')
        #nmap.pcolormesh(speed, vmax=18)
        #nmap.quiver(-speed*np.sin(dir), speed*np.cos(dir), step=10, scale=300,
        #        width=0.002)

        # Set paths - this code should be inherited but I think there is an
        # issue in generalising the first line that defines the current module
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        mp = media_path(module, n.fileName)
        ppath = product_path(module, n.fileName)

        filename = os.path.basename(n.fileName).split('.')[0] + '.' + \
                os.path.basename(n.fileName).split('.')[1] + '.png'

        # check uniqueness of parameter
        param1 = Parameter.objects.get(standard_name=n.get_metadata(
            bandID=speedBandNum, key='standard_name'))
        param2 = Parameter.objects.get(standard_name=n.get_metadata(
            bandID=dirBandNum, key='standard_name'))

        n.write_figure(os.path.join(mp, filename),
                       bands=speedBandNum,
                       mask_array=n['swathmask'],
                       mask_lut={0: [128, 128, 128]},
                       transparency=[128, 128, 128])

        # Get DatasetParameter
        dsp1, created = DatasetParameter.objects.get_or_create(
            dataset=ds, parameter=param1)

        # Create Visualization
        geom, created = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt()))
        vv, created = Visualization.objects.get_or_create(
            uri='file://localhost%s/%s' % (mp, filename),
            title='%s' % (param1.standard_name),
            geographic_location=geom)

        # Create VisualizationParameter
        vp, created = VisualizationParameter.objects.get_or_create(
            visualization=vv, ds_parameter=dsp1)

        return ds, True
    def get_or_create(self,
                      metadata_uri,
                      data_uri,
                      time_coverage_start=None,
                      time_coverage_end=None,
                      maxnum=None,
                      minlat=-90,
                      maxlat=90,
                      minlon=-180,
                      maxlon=180):
        """ Create al''l datasets from given file and add corresponding metadata

        Parameters:
        ----------
            uri_data : str
                URI to file
            uri_metadata : str
                URI to metadata file
            time_coverage_start : timezone.datetime object
                Optional start time for ingestion
            time_coverage_end : timezone.datetime object
                Optional end time for ingestion
        Returns:
        -------
            count : Number of ingested buoy datasets
        """

        source, data_center, iso = self.set_metadata()
        metadata_path = nansat_filename(metadata_uri)
        data_file = nansat_filename(data_uri)

        data = []

        # Metadata info: http://www.aoml.noaa.gov/envids/gld/general_info/dir_table.php
        # Data: http://www.aoml.noaa.gov/envids/gld/FtpMetadataInstructions.php
        metadata = self.read_metadata(metadata_path)

        # Read file with buoy data
        # Metadata info: http://www.aoml.noaa.gov/envids/gld/general_info/dir_table.php
        # Data: http://www.aoml.noaa.gov/envids/gld/general_info/krig_table.php
        # Attention! The columns in the description are not exactly correct
        cnt = 0
        with open(data_file, 'r') as data_f:
            print('Open file: %s' % data_uri)
            for line in data_f:
                line = line.strip().split()
                # If buoy id form the line is equal to buoy id from first row in metadata
                if line[0] == metadata[0][0]:
                    # Then add this line to arr
                    data.append(line)
                # Else we accumulated all information about one buoy
                # and want to process that
                else:
                    cnt += 1
                    # Extract metadata about the buoy from meta file
                    self.process_data(metadata.pop(0), data, iso, data_center,
                                      source, metadata_path)
                    data = list()

            # Add last buoy thom the input file
            cnt += 1
            self.process_data(metadata.pop(0), data, iso, data_center, source,
                              metadata_path)

            if len(metadata) == 0:
                print('All buoys were added to the database')
            else:
                warnings.warn(
                    'Not all buoys were added to the database! Missed %s buoys'
                    % (len(metadata)))

        return cnt
    def get_or_create(self, uri, *args, **kwargs):
        """ Ingest gsar file to geo-spaas db
        """

        ds, created = super(DatasetManager, self).get_or_create(uri, *args, **kwargs)

        # TODO: Check if the following is necessary
        if not type(ds) == Dataset:
            return ds, False

        # set Dataset entry_title
        ds.entry_title = 'SAR Doppler'
        ds.save()

        fn = nansat_filename(uri)
        n = Nansat(fn, subswath=0)
        gg = WKTReader().read(n.get_border_wkt())

        if ds.geographic_location.geometry.area>gg.area:
            return ds, False

        # Update dataset border geometry
        # This must be done every time a Doppler file is processed. It is time
        # consuming but apparently the only way to do it. Could be checked
        # though...

        swath_data = {}
        lon = {}
        lat = {}
        astep = {}
        rstep = {}
        az_left_lon = {}
        ra_upper_lon = {}
        az_right_lon = {}
        ra_lower_lon = {}
        az_left_lat = {}
        ra_upper_lat = {}
        az_right_lat = {}
        ra_lower_lat = {}
        num_border_points = 10
        border = 'POLYGON(('

        for i in range(self.N_SUBSWATHS):
            # Read subswaths 
            swath_data[i] = Nansat(fn, subswath=i)

            # Should use nansat.domain.get_border - see nansat issue #166
            # (https://github.com/nansencenter/nansat/issues/166)
            lon[i], lat[i] = swath_data[i].get_geolocation_grids()

            astep[i] = max(1, (lon[i].shape[0] / 2 * 2 - 1) / num_border_points)
            rstep[i] = max(1, (lon[i].shape[1] / 2 * 2 - 1) / num_border_points)

            az_left_lon[i] = lon[i][0:-1:astep[i], 0]
            az_left_lat[i] = lat[i][0:-1:astep[i], 0]

            az_right_lon[i] = lon[i][0:-1:astep[i], -1]
            az_right_lat[i] = lat[i][0:-1:astep[i], -1]

            ra_upper_lon[i] = lon[i][-1, 0:-1:rstep[i]]
            ra_upper_lat[i] = lat[i][-1, 0:-1:rstep[i]]

            ra_lower_lon[i] = lon[i][0, 0:-1:rstep[i]]
            ra_lower_lat[i] = lat[i][0, 0:-1:rstep[i]]

        lons = np.concatenate((az_left_lon[0],  ra_upper_lon[0],
                               ra_upper_lon[1], ra_upper_lon[2],
                               ra_upper_lon[3], ra_upper_lon[4],
                               np.flipud(az_right_lon[4]), np.flipud(ra_lower_lon[4]),
                               np.flipud(ra_lower_lon[3]), np.flipud(ra_lower_lon[2]),
                               np.flipud(ra_lower_lon[1]), np.flipud(ra_lower_lon[0])))

        # apply 180 degree correction to longitude - code copied from
        # get_border_wkt...
        # TODO: simplify using np.mod?
        for ilon, llo in enumerate(lons):
            lons[ilon] = copysign(acos(cos(llo * pi / 180.)) / pi * 180,
                                  sin(llo * pi / 180.))

        lats = np.concatenate((az_left_lat[0], ra_upper_lat[0],
                               ra_upper_lat[1], ra_upper_lat[2],
                               ra_upper_lat[3], ra_upper_lat[4],
                               np.flipud(az_right_lat[4]), np.flipud(ra_lower_lat[4]),
                               np.flipud(ra_lower_lat[3]), np.flipud(ra_lower_lat[2]),
                               np.flipud(ra_lower_lat[1]), np.flipud(ra_lower_lat[0])))

        poly_border = ','.join(str(llo) + ' ' + str(lla) for llo, lla in zip(lons, lats))
        wkt = 'POLYGON((%s))' % poly_border
        new_geometry = WKTReader().read(wkt)

        # Get geolocation of dataset - this must be updated
        geoloc = ds.geographic_location
        # Check geometry, return if it is the same as the stored one
        created = False
        if geoloc.geometry != new_geometry:
            # Change the dataset geolocation to cover all subswaths
            geoloc.geometry = new_geometry
            geoloc.save()
            created = True
        
        return ds, created
    def process(self, uri, *args, **kwargs):
        """ Create data products
        """
        ds, created = self.get_or_create(uri, *args, **kwargs)
        fn = nansat_filename(uri)
        swath_data = {}
        # Read subswaths 
        for i in range(self.N_SUBSWATHS):
            swath_data[i] = Doppler(fn, subswath=i)

        # Get module name
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        # Set media path (where images will be stored)
        mp = media_path(module, swath_data[i].filename)
        # Set product path (where netcdf products will be stored)
        ppath = product_path(module, swath_data[i].filename)

        # Loop subswaths, process each of them and create figures for display with leaflet
        for i in range(self.N_SUBSWATHS):
            is_corrupted = False
            # Check if the file is corrupted
            try:
                inci = swath_data[i]['incidence_angle']
            #  TODO: What kind of exception ?
            except:
                is_corrupted = True
                continue

            # Add Doppler anomaly
            swath_data[i].add_band(array=swath_data[i].anomaly(), parameters={
                'wkv':
                'anomaly_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave'
            })

            # Get band number of DC freq, then DC polarisation
            band_number = swath_data[i]._get_band_number({
                'standard_name': 'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave',
                })
            pol = swath_data[i].get_metadata(bandID=band_number, key='polarization')

            # Calculate total geophysical Doppler shift
            fdg = swath_data[i].geophysical_doppler_shift()
            swath_data[i].add_band(
                array=fdg,
                parameters={
                    'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity'
                })

            # Set filename of exported netcdf
            fn = os.path.join(ppath,
                              os.path.basename(swath_data[i].filename).split('.')[0]
                              + 'subswath%d.nc' % i)
            # Set filename of original gsar file in metadata
            swath_data[i].set_metadata(key='Originating file',
                                        value=swath_data[i].filename)
            # Export data to netcdf
            print('Exporting %s (subswath %d)' % (swath_data[i].filename, i))
            swath_data[i].export(filename=fn)

            # Add netcdf uri to DatasetURIs
            ncuri = os.path.join('file://localhost', fn)
            new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri,
                                                                dataset=ds)

            # Reproject to leaflet projection
            xlon, xlat = swath_data[i].get_corners()
            d = Domain(NSR(3857),
                       '-lle %f %f %f %f -tr 1000 1000'
                       % (xlon.min(), xlat.min(), xlon.max(), xlat.max()))
            swath_data[i].reproject(d, eResampleAlg=1, tps=True)

            # Check if the reprojection failed
            try:
                inci = swath_data[i]['incidence_angle']
            except:
                is_corrupted = True
                warnings.warn('Could not read incidence angles - reprojection failed')
                continue

            # Create visualizations of the following bands (short_names)
            ingest_creates = ['valid_doppler',
                              'valid_land_doppler',
                              'valid_sea_doppler',
                              'dca',
                              'fdg']
            for band in ingest_creates:
                filename = '%s_subswath_%d.png' % (band, i)
                # check uniqueness of parameter
                param = Parameter.objects.get(short_name=band)
                fig = swath_data[i].write_figure(
                    os.path.join(mp, filename),
                    bands=band,
                    mask_array=swath_data[i]['swathmask'],
                    mask_lut={0: [128, 128, 128]},
                    transparency=[128, 128, 128])

                if type(fig) == Figure:
                    print 'Created figure of subswath %d, band %s' % (i, band)
                else:
                    warnings.warn('Figure NOT CREATED')

                # Get or create DatasetParameter
                dsp, created = DatasetParameter.objects.get_or_create(dataset=ds,
                                                                      parameter=param)

                # Create GeographicLocation for the visualization object
                geom, created = GeographicLocation.objects.get_or_create(
                        geometry=WKTReader().read(swath_data[i].get_border_wkt()))

                # Create Visualization
                vv, created = Visualization.objects.get_or_create(
                    uri='file://localhost%s/%s' % (mp, filename),
                    title='%s (swath %d)' % (param.standard_name, i + 1),
                    geographic_location=geom
                )

                # Create VisualizationParameter
                vp, created = VisualizationParameter.objects.get_or_create(
                    visualization=vv,
                    ds_parameter=dsp
                )

        # TODO: consider merged figures like Jeong-Won has added in the development branch

        return ds, not is_corrupted
def update_geophysical_doppler(dopplerFile, t0, t1, swath, sensor='ASAR',
        platform='ENVISAT'):

    dop2correct = Doppler(dopplerFile)
    bandnum = dop2correct._get_band_number({
        'standard_name':
            'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave'
    })
    polarization = dop2correct.get_metadata(bandID=bandnum, key='polarization')
    lon,lat = dop2correct.get_geolocation_grids()
    indmidaz = lat.shape[0]/2
    indmidra = lat.shape[1]/2
    if lat[indmidaz,indmidra]>lat[0,indmidra]:
        use_pass = '******'
    else:
        use_pass = '******'

    # Get datasets
    DS = Dataset.objects.filter(source__platform__short_name=platform,
        source__instrument__short_name=sensor)
    dopDS = DS.filter(
            parameters__short_name = 'dca',
            time_coverage_start__gte = t0,
            time_coverage_start__lt = t1
        )

    swath_files = []
    for dd in dopDS:
        try:
            fn = dd.dataseturi_set.get(
                    uri__endswith='subswath%s.nc' %swath).uri
        except DatasetURI.DoesNotExist:
            continue
        n = Doppler(fn)
        try:
            dca = n.anomaly(pol=polarization)
        except OptionError: # wrong polarization..
            continue
        lon,lat=n.get_geolocation_grids()
        indmidaz = lat.shape[0]/2
        indmidra = lat.shape[1]/2
        if lat[indmidaz,indmidra]>lat[0,indmidra]:
            orbit_pass = '******'
        else:
            orbit_pass = '******'
        if use_pass==orbit_pass:
            swath_files.append(fn)

    valid_land = np.array([])
    valid = np.array([])
    for ff in swath_files:
        n = Nansat(ff)
        view_bandnum = n._get_band_number({
            'standard_name': 'sensor_view_angle'
        })
        std_bandnum = n._get_band_number({
            'standard_name': \
                'standard_deviation_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave',
        })
        pol = n.get_metadata(bandID=std_bandnum, key='polarization')

        # For checking when antenna pattern changes
        if valid.shape==(0,):
            valid = n['valid_doppler']
            dca0 = n['dca']
            dca0[n['valid_doppler']==0] = np.nan
            dca0[n['valid_sea_doppler']==1] = dca0[n['valid_sea_doppler']==1] - \
                    n['fww'][n['valid_sea_doppler']==1]
            view_angle0 = n[view_bandnum]
        else:
            validn = n['valid_doppler']
            dca0n = n['dca']
            dca0n[n['valid_doppler']==0] = np.nan
            dca0n[n['valid_sea_doppler']==1] = dca0n[n['valid_sea_doppler']==1] - \
                    n['fww'][n['valid_sea_doppler']==1]
            view_angle0n = n[view_bandnum]
            if not validn.shape==valid.shape:
                if validn.shape[1] > valid.shape[1]:
                    valid = np.resize(valid, (valid.shape[0], validn.shape[1]))
                    dca0 = np.resize(dca0, (dca0.shape[0], dca0n.shape[1]))
                    view_angle0 = np.resize(view_angle0,
                        (view_angle0.shape[0], view_angle0n.shape[1]))
                else:
                    validn = np.resize(validn, (validn.shape[0],
                        valid.shape[1]))
                    dca0n = np.resize(dca0n, (dca0n.shape[0], dca0.shape[1]))
                    view_angle0n = np.resize(view_angle0n,
                        (view_angle0n.shape[0], view_angle0.shape[1]))
            valid = np.concatenate((valid, validn))
            dca0 = np.concatenate((dca0, dca0n))
            view_angle0 = np.concatenate((view_angle0, view_angle0n))


        if valid_land.shape==(0,):
            valid_land = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)]
            dca = n['dca'][n['valid_land_doppler'].any(axis=1)]
            view_angle = n[view_bandnum][n['valid_land_doppler'].any(axis=1)]
            std_dca = n[std_bandnum][n['valid_land_doppler'].any(axis=1)]
        else:
            vn = n['valid_land_doppler'][n['valid_land_doppler'].any(axis=1)]
            dcan = n['dca'][n['valid_land_doppler'].any(axis=1)]
            view_angle_n = n[view_bandnum][n['valid_land_doppler'].any(axis=1)]
            std_dca_n = n[std_bandnum][n['valid_land_doppler'].any(axis=1)]
            if not vn.shape==valid_land.shape:
                # Resize arrays - just for visual inspection. Actual interpolation
                # is view angle vs doppler anomaly
                if vn.shape[1] > valid_land.shape[1]:
                    valid_land = np.resize(valid_land, (valid_land.shape[0],
                        vn.shape[1]))
                    dca = np.resize(dca, (dca.shape[0],
                        vn.shape[1]))
                    view_angle = np.resize(view_angle, (view_angle.shape[0],
                        vn.shape[1]))
                    std_dca = np.resize(std_dca, (std_dca.shape[0],
                        vn.shape[1]))
                if vn.shape[1] < valid_land.shape[1]:
                    vn = np.resize(vn, (vn.shape[0], valid_land.shape[1]))
                    dcan = np.resize(dcan, (dcan.shape[0], valid_land.shape[1]))
                    view_angle_n = np.resize(view_angle_n, (view_angle_n.shape[0], valid_land.shape[1]))
                    std_dca_n = np.resize(std_dca_n, (std_dca_n.shape[0], valid_land.shape[1]))
            valid_land = np.concatenate((valid_land, vn))
            dca = np.concatenate((dca, dcan))
            view_angle = np.concatenate((view_angle, view_angle_n))
            std_dca = np.concatenate((std_dca, std_dca_n))

    view_angle0 = view_angle0.flatten()
    dca0 = dca0.flatten()
    view_angle0 = np.delete(view_angle0, np.where(np.isnan(dca0)))
    dca0 = np.delete(dca0, np.where(np.isnan(dca0)))
    ind = np.argsort(view_angle0)
    view_angle0 = view_angle0[ind]
    dca0 = dca0[ind]

    # Set dca, view_angle and std_dca to nan where not land
    dca[valid_land==0] = np.nan
    std_dca[valid_land==0] = np.nan
    view_angle[valid_land==0] = np.nan

    dca = dca.flatten()
    std_dca = std_dca.flatten()
    view_angle = view_angle.flatten()

    dca = np.delete(dca, np.where(np.isnan(dca)))
    std_dca = np.delete(std_dca, np.where(np.isnan(std_dca)))
    view_angle = np.delete(view_angle, np.where(np.isnan(view_angle)))

    ind = np.argsort(view_angle)
    view_angle = view_angle[ind]
    dca = dca[ind]
    std_dca = std_dca[ind]

    freqLims = [-200,200]

    # Show this in presentation:
    plt.subplot(2,1,1)
    count, anglebins, dcabins, im = plt.hist2d(view_angle0, dca0, 100, cmin=1,
            range=[[np.min(view_angle), np.max(view_angle)], freqLims])
    plt.colorbar()
    plt.title('Wind Doppler subtracted')

    plt.subplot(2,1,2)
    count, anglebins, dcabins, im = plt.hist2d(view_angle, dca, 100, cmin=1,
            range=[[np.min(view_angle), np.max(view_angle)], freqLims])
    plt.colorbar()
    plt.title('Doppler over land')
    #plt.show()
    plt.close()
    countLims = 200
        #{
        #    0: 600,
        #    1: 250,
        #    2: 500,
        #    3: 140,
        #    4: 130,
        #}

    dcabins_grid, anglebins_grid = np.meshgrid(dcabins[:-1], anglebins[:-1])
    anglebins_vec = anglebins_grid[count>countLims]
    dcabins_vec = dcabins_grid[count>countLims]
    #anglebins_vec = anglebins_grid[count>countLims[swath]]
    #dcabins_vec = dcabins_grid[count>countLims[swath]]


    va4interp = []
    rb4interp = []
    std_rb4interp = []
    for i in range(len(anglebins)-1):
        if i==0:
            ind0 = 0
        else:
            ind0 = np.where(view_angle>anglebins[i])[0][0]
        ind1 = np.where(view_angle<=anglebins[i+1])[0][-1]
        va4interp.append(np.mean(view_angle[ind0:ind1]))
        rb4interp.append(np.median(dca[ind0:ind1]))
        std_rb4interp.append(np.std(dca[ind0:ind1]))
    va4interp = np.array(va4interp)
    rb4interp = np.array(rb4interp)
    std_rb4interp = np.array(std_rb4interp)

    van = dop2correct['sensor_view']
    rbfull = van.copy()
    rbfull[:,:] = np.nan
    # Is there a more efficient method than looping?
    import time
    start_time = time.time()
    for ii in range(len(anglebins)-1):
        vaii0 = anglebins[ii]
        vaii1 = anglebins[ii+1]
        rbfull[(van>=vaii0) & (van<=vaii1)] = \
                np.median(dca[(view_angle>=vaii0) & (view_angle<=vaii1)])
    #print("--- %s seconds ---" % (time.time() - start_time))
    plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), '.')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()
    plt.close()


    #guess = [.1,.1,.1,.1,.1,.1]
    #[a,b,c,d,e,f], params_cov = optimize.curve_fit(rb_model_func,
    #        va4interp, rb4interp, guess)
    #        #anglebins_vec, dcabins_vec, guess)

    #n = Doppler(swath_files[0])
    #van = np.mean(dop2correct['sensor_view'], axis=0)
    #plt.plot(van, rb_model_func(van,a,b,c,d,e,f), 'r--')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()

    #ww = 1./std_rb4interp
    #ww[np.isinf(ww)] = 0
    #rbinterp = UnivariateSpline(
    #        va4interp,
    #        rb4interp,
    #        w = ww, 
    #        k = 5
    #    )

    #van = dop2correct['sensor_view']
    #y = rbinterp(van.flatten())
    #rbfull = y.reshape(van.shape)
    #plt.plot(np.mean(van, axis=0), np.mean(rbfull, axis=0), 'r--')
    #plt.plot(anglebins_vec, dcabins_vec, '.')
    #plt.show()

    band_name = 'fdg_corrected'
    fdg = dop2correct.anomaly() - rbfull
    #plt.imshow(fdg, vmin=-60, vmax=60)
    #plt.colorbar()
    #plt.show()
    dop2correct.add_band(array=fdg,
        parameters={
            'wkv':'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity',
            'name': band_name
        }
    )

    current = -(np.pi*(fdg - dop2correct['fww']) / 112 /
                np.sin(dop2correct['incidence_angle']*np.pi/180))
    dop2correct.add_band(array=current,
            parameters={'name': 'current', 'units': 'm/s', 'minmax': '-2 2'}
        )

    land = np.array([])
    # add land data for accuracy calculation
    if land.shape==(0,):
        land = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)]
        land_fdg = fdg[dop2correct['valid_land_doppler'].any(axis=1)]
    else:
        landn = dop2correct['valid_land_doppler'][dop2correct['valid_land_doppler'].any(axis=1)]
        land_fdgn = fdg[dop2correct['valid_land_doppler'].any(axis=1)]
        if not landn.shape==land.shape:
            if landn.shape[1] > land.shape[1]:
                land = np.resize(land, (land.shape[0], landn.shape[1]))
                land_fdg = np.resize(land_fdg, (land_fdg.shape[0],
                    land_fdgn.shape[1]))
            if landn.shape[1] < land.shape[1]:
                landn = np.resize(landn, (landn.shape[0], land.shape[1]))
                land_fdgn = np.resize(land_fdgn, (land_fdgn.shape[0],
                    land.shape[1]))
        land = np.concatenate((land, landn))
        land_fdg = np.concatenate((land_fdg, land_fdgn))

    module = 'sar_doppler'
    DS = Dataset.objects.get(dataseturi__uri__contains=dop2correct.fileName)
    #fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/gsar_rvl/' \
    #        + dop2correct.fileName.split('/')[-2]+'.gsar'
    mp = media_path(module, nansat_filename( DS.dataseturi_set.get(
            uri__endswith='gsar').uri))
    ppath = product_path(module, nansat_filename( DS.dataseturi_set.get(
            uri__endswith='gsar').uri))
    # See managers.py -- this must be generalized!
    pngfilename = '%s_subswath_%d.png'%(band_name, swath)
    ncfilename = '%s_subswath_%d.nc'%(band_name, swath)

    # Export to new netcdf with fdg as the only band
    expFile = os.path.join(ppath, ncfilename)
    print 'Exporting file: %s\n\n' %expFile
    dop2correct.export(expFile, bands=[dop2correct._get_band_number(band_name)])
    ncuri = os.path.join('file://localhost', expFile)
    new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri,
            dataset=DS)

    # Reproject to leaflet projection
    xlon, xlat = dop2correct.get_corners()
    dom = Domain(NSR(3857),
            '-lle %f %f %f %f -tr 1000 1000' % (
                xlon.min(), xlat.min(), xlon.max(), xlat.max()))
    dop2correct.reproject(dom, eResampleAlg=1, tps=True)

    # Update figure
    dop2correct.write_figure(os.path.join(mp, pngfilename),
            clim = [-60,60],
            bands=band_name,
            mask_array=dop2correct['swathmask'],
            mask_lut={0:[128,128,128]}, transparency=[128,128,128])
    print("--- %s seconds ---" % (time.time() - start_time))

    land_fdg[land==0] = np.nan
    print('Standard deviation over land: %.2f' %np.nanstd(land_fdg))