def smos_l4_ecmwf_sst(infile, outdir,
                      vmin=271.05, vmax=309.15, vmin_pal=273., vmax_pal=305.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    if (time_stop - time_start) == timedelta(days=6):
        time_stop += timedelta(days=1)
    lat = smos.read_values('lat')[::-1]
    lon = smos.read_values('lon')
    sst = smos.read_values('sea_surface_temperature')[::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start, time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_ECMWF_SST'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer/CNES'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0-dlon/2., dlon, 0,
                                   lat0-dlat/2., 0, dlat]
    band = []
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(sst.data, vmin, vmax, out=sst.data)
    array = np.round((sst.data - offset) / scale).astype('uint8')
    array[sst.mask] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax, vmax_pal=vmax_pal,
                                         vmin=vmin, vmin_pal=vmin_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface temperature', 'unittype':'K',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #2
0
def smos_l3_bec_sss(infile, outdir,
                vmin=31.825, vmax=38.175, vmin_pal=32, vmax_pal=38):
    """
    """
    # Read/Process data
    logger.info('Read/Process data')
    smos = netCDF4.Dataset(infile, 'r')
    time_start = netCDF4.num2date(smos['time'][0], smos['time'].units)
    time_stop = time_start + datetime.timedelta(days=1)
    lat = smos['lat'][::-1]
    lon = smos['lon'][:]
    sss = smos['oa_sss'][0, ::-1, :]

    # Construct metadata/geolocation/band(s)
    logger.info('Construct metadata/geolocation/band(s)')
    dtime, time_range = stfmt.format_time_and_range(time_start, time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    now = datetime.datetime.utcnow()
    metadata = {}
    metadata['product_name'] = 'SMOS_L3_BEC_SSS'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    src = 'SMOS Barcelona Expert Centre, ICM-CSIC / UPC, Barcelona, Spain'
    metadata['source_provider'] = src
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0-dlon/2., dlon, 0,
                                   lat0-dlat/2., 0, dlat]
    band = []
    offset, scale = vmin, (vmax-vmin)/254.
    numpy.clip(sss.data, vmin, vmax, out=sss.data)
    array = numpy.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin, vmax=vmax,
                                         vmin_pal=vmin_pal, vmax_pal=vmax_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface salinity', 'unittype':'PSS',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    # Write geotiff
    logger.info('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #3
0
def smosstorm_smos_wind(infile,
                        outdir,
                        vmin=0.,
                        vmax=50.8,
                        vmin_pal=0.,
                        vmax_pal=50.):
    """
    """
    # Read/Process data
    dataset = Dataset(infile)
    wind = dataset.variables['wind_speed'][0, :, :]
    valid = np.where(ma.getmaskarray(wind) == False)
    lat_slice = slice(valid[0].min(), valid[0].max() + 1)
    lon_slice = slice(valid[1].min(), valid[1].max() + 1)
    nlon = dataset.variables['lon'].shape[0]
    if lon_slice.stop - lon_slice.start < nlon / 2:
        wind = wind[lat_slice, lon_slice]
        lat = dataset.variables['lat'][lat_slice]
        lon = dataset.variables['lon'][lon_slice]
    else:
        lon_first = dataset.variables['lon'][0]
        lon_last = dataset.variables['lon'][-1]
        validl = np.where(
            ma.getmaskarray(wind[lat_slice, 0:nlon / 2]) == False)
        lon_slicel = slice(0, validl[1].max() + 1)
        validr = np.where(ma.getmaskarray(wind[lat_slice, nlon / 2:]) == False)
        if (lon_last - lon_first) % 360 == 0:
            lon_slicer = slice(validr[1].min() + nlon / 2, nlon - 1)
        else:
            lon_slicer = slice(validr[1].min() + nlon / 2, nlon)
        wind = np.ma.hstack((wind[lat_slice, lon_slicer], wind[lat_slice,
                                                               lon_slicel]))
        lat = dataset.variables['lat'][lat_slice]
        lon = np.hstack((dataset.variables['lon'][lon_slicer],
                         dataset.variables['lon'][lon_slicel] + 360))
    start_time = datetime.strptime(dataset.time_coverage_start,
                                   '%Y%m%dT%H%M%S')
    stop_time = datetime.strptime(dataset.time_coverage_stop, '%Y%m%dT%H%M%S')

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SMOSSTORM_SMOS_wind'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'IFREMER'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'wind speed'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    dlon = lon[1] - lon[0]
    dlat = lat[1] - lat[0]
    geolocation['geotransform'] = [
        lon[0] - dlon / 2., dlon, 0, lat[0] - dlat / 2., 0, dlat
    ]
    band = []
    indndv = np.where(ma.getmaskarray(wind))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(wind.data, vmin, vmax, out=wind.data)
    array = np.round((wind.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'wind speed',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #4
0
def eodyn_current(infile,
                  outdir,
                  vmin=0.,
                  vmax=5.08,
                  vmin_pal=0.,
                  vmax_pal=2.,
                  write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
#        l4id = 'e-Odyn' #ncfile.read_global_attribute('id')
    elif re.match(r'^e-Odyn_.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'e-Odyn'
    else:
        raise Exception('Unknown GlobCurrent L4 file.')
    # /TMP
    ucur = ncfile.read_values(L4_MAPS[l4id]['uname'])[::, ::-1, 0]
    ucur = np.transpose(ucur)
    vcur = ncfile.read_values(L4_MAPS[l4id]['vname'])[::, ::-1, 0]
    vcur = np.transpose(vcur)
    masku = [ucur == -9999]
    maskv = [vcur == -9999]
    if l4id not in ['CourantGeostr']:
        lon = ncfile.read_values('lon')[0:2].astype('float64')
        lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
        for i in range(2):  # avoid rounding errors
            lon[i] = np.round(lon[i] * 10000) / 10000
            lat[i] = np.round(lat[i] * 10000) / 10000
    else:
        lon = ncfile.read_values('lon')[:]
        shift = -np.where(lon < 0)[0][0]
        ucur = np.roll(ucur, shift, axis=1)
        vcur = np.roll(vcur, shift, axis=1)
        lon = lon[shift:shift + 2]
        lat = ncfile.read_values('lat')[-1:-3:-1]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    #dtime_units = ncfile.read_field('time').units
    #dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    timefmt = '%Y-%m-%dT%H:%M:%S.%fZ'
    start_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_start'), timefmt)
    stop_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_end'), timefmt)

    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    #metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'e-Odyn'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    mask = ucur.mask | vcur.mask
    print(mask)
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    print(array)
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })
    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        # u/v -> bands
        band = []
        mask = ucur.mask | vcur.mask
        vmin = -vmax
        offset, scale = vmin, (vmax - vmin) / 254.
        u = np.clip(ucur.data, vmin, vmax)
        array = np.round((u - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current u',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        v = np.clip(vcur.data, vmin, vmax)
        array = np.round((v - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current v',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        # Write
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           dgcpy=1.,
                           dgcpx=1.)
def ascat_l2b(infile,
              outdir,
              vmin=0.,
              vmax=25.4,
              vmin_pal=0.,
              vmax_pal=50 * 0.514,
              write_netcdf=False):
    """
    """
    # Read/Process data
    dset = Dataset(infile)
    nrow = len(dset.dimensions['NUMROWS'])
    ncell = len(dset.dimensions['NUMCELLS'])
    if ncell != 82:
        raise Exception('Expects NUMCELLS=82 (KNMI ASCAT L2B 12.5km).')
    source = dset.source
    if 'metop-a' in source.lower():
        platform = 'Metop-A'
    elif 'metop-b' in source.lower():
        platform = 'Metop-B'
    else:
        raise Exception('Platform ?')
    start_time = datetime.strptime(dset.start_date + dset.start_time,
                                   '%Y-%m-%d%H:%M:%S')
    stop_time = datetime.strptime(dset.stop_date + dset.stop_time,
                                  '%Y-%m-%d%H:%M:%S')
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    datagroup = os.path.splitext(os.path.basename(infile))[0]
    if write_netcdf == False:
        # Create GeoTIFF file
        _to_geotiff(infile, outdir, vmin, vmax, vmin_pal, vmax_pal, nrow,
                    ncell, dtime, time_range, datagroup, platform, dset)
    else:
        for i in range(2):
            if i == 0:  # left swath
                swath_slice = [slice(0, nrow), slice(0, ncell / 2)]
                dataset_name = '{}_left'.format(datagroup)
            else:  # right swath
                swath_slice = [slice(0, nrow), slice(ncell / 2, ncell)]
                dataset_name = '{}_right'.format(datagroup)
            lat = dset.variables['lat'][swath_slice]
            lon = dset.variables['lon'][swath_slice]
            irow = 0
            while irow < nrow:
                lon0 = lon[irow, 0] - 180
                lon[irow:, :] = np.mod(lon[irow:, :] - lon0, 360) + lon0
                notcont = (lon[irow + 1:, :] < lon0 + 90) & (lon[irow:-1, :] > lon0 + 270) | \
                          (lon[irow + 1:, :] > lon0 + 270) & (lon[irow:-1, :] < lon0 + 90)
                indnotcont = np.where(notcont.any(axis=1))[0]
                if indnotcont.size == 0:
                    irow = nrow
                else:
                    indnotcont = indnotcont.min()
                    if indnotcont == 0:
                        raise Exception('Unexpected longitudes.')
                    irow = irow + indnotcont
            ind = np.where(np.abs(lon[1:, :] - lon[:-1, :]) > 180.)
            if ind[0].size != 0:
                raise Exception('Failed to make longitudes continuous.')
            if lon[nrow / 2, ncell / 4] > 180:
                lon -= 360
            elif lon[nrow / 2, ncell / 4] < -180:
                lon += 360

            print(lon)

            wind_speed = dset.variables['wind_speed'][swath_slice]
            wind_dir = dset.variables['wind_dir'][swath_slice]
            wind_dir = np.mod(90. - wind_dir, 360.)
            dgcp = 16.
            ngcps = (np.ceil(np.array(lon.shape) / dgcp) + 1.).astype('int32')
            pix = np.linspace(0, lon.shape[1] - 1,
                              num=ngcps[1]).round().astype('int32')
            lin = np.linspace(0, lon.shape[0] - 1,
                              num=ngcps[0]).round().astype('int32')
            pix2d, lin2d = np.meshgrid(pix, lin)
            gcplon = lon[lin2d, pix2d]
            gcplat = lat[lin2d, pix2d]
            gcppix = pix2d + 0.5
            gcplin = lin2d + 0.5
            gcphei = np.zeros(ngcps)
            # Construct metadata/geolocation/band(s)
            print('Construct metadata/geolocation/band(s)')
            metadata = {}
            metadata['product_name'] = '{}_ASCAT_L2B'.format(platform)
            metadata['datagroup'] = datagroup
            metadata['name'] = dataset_name
            metadata['datetime'] = dtime
            metadata['time_range'] = time_range
            metadata['source_URI'] = infile
            metadata['conversion_software'] = 'Syntool'
            metadata['conversion_version'] = '0.0.0'
            metadata['conversion_datetime'] = stfmt.format_time(
                datetime.utcnow())
            metadata['parameter'] = ['wind speed', 'wind direction']
            geolocation = {}
            geolocation['projection'] = stfmt.format_gdalprojection()
            geolocation['gcps'] = stfmt.format_gdalgcps(
                gcplon, gcplat, gcphei, gcppix, gcplin)
            print('Write netcdf')
            # u/v -> bands
            band = []
            u = wind_speed * np.cos(np.deg2rad(wind_dir))
            v = wind_speed * np.sin(np.deg2rad(wind_dir))
            mask = np.ma.getmaskarray(u) | np.ma.getmaskarray(v)
            vmin = -vmax
            offset, scale = vmin, (vmax - vmin) / 254.
            clipped = np.clip(np.ma.getdata(u), vmin, vmax)
            array = np.round((clipped - offset) / scale).astype('uint8')
            array[mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': 'wind u',
                'unittype': 'm s-1',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax],
                'name': 'u',
                'standard_name': 'eastward_wind'
            })
            clipped = np.clip(np.ma.getdata(v), vmin, vmax)
            array = np.round((clipped - offset) / scale).astype('uint8')
            array[mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': 'wind v',
                'unittype': 'm s-1',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax],
                'name': 'v',
                'standard_name': 'northward_wind'
            })
            # Write
            ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
            metadata['spatial_resolution'] = 12500.
            stfmt.write_netcdf(ncfile,
                               metadata,
                               geolocation,
                               band,
                               'swath',
                               ngcps=gcplon.shape)
def smos_l4_oscar_current(infile,
                          outdir,
                          vmin=0.,
                          vmax=5.08,
                          vmin_pal=0.,
                          vmax_pal=2.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    if (time_stop - time_start) == timedelta(days=6):
        time_stop += timedelta(days=1)
    lat = smos.read_values('lat')[::-1]
    lon = smos.read_values('lon')
    ucur = smos.read_values('Zonal_component_surface_currents')[::-1, :]
    vcur = smos.read_values('Meridional_component_surface_currents')[::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_OSCAR_current'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer/CNES'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    mask = ucur.mask | vcur.mask
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #7
0
def smos_l4_locean_sss(infile,
                       outdir,
                       vmin=31.825,
                       vmax=38.175,
                       vmin_pal=32,
                       vmax_pal=38):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    time_stop = time_stop + timedelta(days=1)
    lat = smos.read_values('latitude')[::-1]
    lon = smos.read_values('longitude')
    sss = smos.read_values('Time_interpolated_ISAS_sss')[0][::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_LOCEAN_ISAS_SSS'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'SEANOE'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sss.data, vmin, vmax, out=sss.data)
    array = np.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface salinity',
        'unittype': 'PSS',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def smosstorm_ascat_wind(infile, outdir,
                         vmin=0., vmax=50.8, vmin_pal=0., vmax_pal=50.):
    """
    """
    # Read/Process data
    dataset = Dataset(infile)
    lon = dataset.variables['lon'][:]
    valid = np.where(ma.getmaskarray(lon) == False)
    slices = [slice(valid[0].min(), valid[0].max() + 1),
              slice(valid[1].min(), valid[1].max() + 1)]
    lon = lon[slices]
    if lon.shape[1] % 2 != 0:
        raise Exception('Number of cells should be even.')
    swath_ncell = lon.shape[1] / 2
    lat = dataset.variables['lat'][slices]
    wind_speed = dataset.variables['wind_speed'][slices]
    wind_dir = dataset.variables['wind_dir'][slices]
    time = dataset.variables['time'][slices]
    time_units = dataset.variables['time'].units
    start_time = num2date(time.min(), time_units)
    stop_time = num2date(time.max(), time_units)
    datagroup = '_'.join(os.path.splitext(os.path.basename(infile))[0].split('_')[0:3])
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time, stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SMOSSTORM_ASCAT_wind'
    metadata['datagroup'] = datagroup
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'EUMETSAT'
    metadata['processing_center'] = 'KNMI'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['wind speed', 'wind direction']
    for i in range(2):
        if i == 0:
            metadata['name'] = datagroup + '_left'
        else:
            metadata['name'] = datagroup + '_right'
        swath_slice = slice(i * swath_ncell, (i + 1) * swath_ncell)
        swath_lon = lon[:, swath_slice]
        swath_lat = lat[:, swath_slice]
        swath_wind_speed = wind_speed[:, swath_slice]
        swath_wind_dir = wind_dir[:, swath_slice]
        dgcp = 16. # spacing is about 12.5km
        ngcps = np.ceil(np.array(swath_lon.shape) / dgcp) + 1.
        pix = np.linspace(0, swath_lon.shape[1] - 1, num=ngcps[1]).round().astype('int32')
        lin = np.linspace(0, swath_lon.shape[0] - 1, num=ngcps[0]).round().astype('int32')
        pix2d, lin2d = np.meshgrid(pix, lin)
        gcplon = swath_lon[lin2d, pix2d]
        gcplon[np.where(gcplon >= 180)] -= 360
        if gcplon.max() - gcplon.min() > 180:
            gcplon[np.where(gcplon < 0)] += 360.
        gcplat = swath_lat[lin2d, pix2d]
        gcppix = pix2d + 0.5
        gcplin = lin2d + 0.5
        gcphei = np.zeros(ngcps)
        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei,
                                                    gcppix, gcplin)
        band = []
        indndv = np.where(ma.getmaskarray(swath_wind_speed))
        offset, scale = vmin, (vmax-vmin)/254.
        clipped = np.clip(ma.getdata(swath_wind_speed), vmin, vmax)
        array = np.round((clipped - offset) / scale).astype('uint8')
        array[indndv] = 255
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmax=vmax, vmax_pal=vmax_pal,
                                             vmin=vmin, vmin_pal=vmin_pal)
        band.append({'array':array, 'scale':scale, 'offset':offset,
                     'description':'wind speed', 'unittype':'m/s',
                     'nodatavalue':255, 'parameter_range':[vmin, vmax],
                     'colortable':colortable})
        indndv = np.where(ma.getmaskarray(swath_wind_dir))
        clipped = np.clip(ma.getdata(swath_wind_dir), 0, 360)
        array = np.round(clipped / 360. * 254.).astype('uint8')
        array[indndv] = 255
        band.append({'array':array, 'scale':360./254., 'offset':0.,
                     'description':'wind direction', 'unittype':'deg',
                     'nodatavalue':255, 'parameter_range':[0, 360.]})
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def smos_l3_locean_sss(infile,
                       outdir,
                       vmin=31.825,
                       vmax=38.175,
                       vmin_pal=32,
                       vmax_pal=38):
    """
    """
    # Read/Process data
    logger.info('Read/Process data')
    smos = netCDF4.Dataset(infile, 'r')
    _time = netCDF4.num2date(smos['time'][0], smos['time'].units)
    time_start = _time - datetime.timedelta(days=2)
    time_stop = _time + datetime.timedelta(days=2)
    lat = smos['lat'][::-1]
    lon = smos['lon'][:]
    sss = smos['SSS'][::-1, :]

    # Remove last line because it has latitudes below -90 and the ingestor
    # does not support it.
    # Make sure there are no data before removal
    logger.info('Removing row for latitude {}'.format(lat[-1]))
    if numpy.all(sss[-1].mask):
        # Mask must be adapted separately and applied back to the result
        sss_mask = sss.mask
        sss = numpy.delete(sss, -1, 0)
        sss_mask = numpy.delete(sss_mask, -1, 0)
        sss.mask = sss_mask
        lat = lat[:-1]

    # Construct metadata/geolocation/band(s)
    logger.info('Construct metadata/geolocation/band(s)')
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    now = datetime.datetime.utcnow()
    metadata = {}
    metadata['product_name'] = 'SMOS_L3_LOCEAN_SSS'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'CEC-OS LOCEAN/IPSL/ACRI-ST'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    numpy.clip(sss.data, vmin, vmax, out=sss.data)
    array = numpy.round((sss.data - offset) / scale).astype('uint8')
    array[numpy.where(sss.mask)] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface salinity',
        'unittype': 'PSS',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    logger.info('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def process_slices(slices, dset, datagroup, beam, ref_dt, vmin, vmax, vmin_pal,
                   vmax_pal):
    """"""
    geod = Geod(ellps='WGS84')

    # Inner, middle and outer beam widths
    beam_widths = (94.0, 120.0, 156.0,)
    beam_width = beam_widths[beam]

    for k, chunk_rows in enumerate(slices):
        _chunk_lon = dset.variables['beam_clon'][chunk_rows, beam]
        chunk_size = numpy.shape(_chunk_lon)[0]

        if 0 >= chunk_size:
            # Empty chunk, skip
            continue

        chunk_lon0 = _chunk_lon[0] - 180.0
        chunk_lon = numpy.mod(_chunk_lon - chunk_lon0, 360.0) + chunk_lon0
        chunk_lat = dset.variables['beam_clat'][chunk_rows, beam]
        values = dset.variables['SSS'][chunk_rows, beam]
        values = numpy.ma.masked_where(values==-999., values)

        # Build GCPs
        dgcp = 32.
        if numpy.max(numpy.abs(chunk_lat)) > 75.0:
            dgcp = 4.

        ngcplin = numpy.ceil(chunk_lon.size / dgcp).astype('int32')
        _gcp_alongtrack = numpy.linspace(0, chunk_lon.size - 1, num=ngcplin)
        _gcp_indices = numpy.round(_gcp_alongtrack).astype('int32')
        _gcppix = numpy.array([-1.0, 0.5, 2.0])
        ngcppix = _gcppix.size
        gcppix = numpy.tile(_gcppix[numpy.newaxis, :],
                            (ngcplin, 1))
        gcpind = numpy.tile(_gcp_indices[:, numpy.newaxis],
                            (1, ngcppix)).astype('int32')
        gcplin = gcpind + 0.5

        # Compute swath direction
        _ind0 = numpy.minimum(gcpind, chunk_lon.size - 2)
        _ind1 = _ind0 + 1
        ind_same = numpy.where((chunk_lon[_ind0] == chunk_lon[_ind1]) &
                               (chunk_lat[_ind0] == chunk_lat[_ind1]))
        for ig_line, ig_pixel in zip(ind_same[0], ind_same[1]):
            if _ind1[ig_line, ig_pixel] < chunk_lon.size -1:
                _ind1[ig_line, ig_pixel] += 1
            else:
                _ind0[ig_line, ig_pixel] -= 1

        lat_diff = chunk_lat[_ind1] - chunk_lat[_ind0]
        lon_diff = chunk_lon[_ind1] - chunk_lon[_ind0]
        swath_dir = numpy.arctan2(lat_diff, lon_diff)

        # Compute GCPs geographical coordinates from the location of the beam
        # center and its width
        gcphei = numpy.zeros(gcppix.shape)
        gcplon = numpy.zeros(gcppix.shape)
        gcplat = numpy.zeros(gcppix.shape)
        for gcp_i in range(len(_gcp_indices)):
            ind = _gcp_indices[gcp_i]
            central_lon = chunk_lon[ind]
            central_lat = chunk_lat[ind]
            across_dir = -1 * numpy.rad2deg(numpy.mod(swath_dir[gcp_i][1],
                                            2 * numpy.pi))
            lon_a, lat_a, _ = geod.fwd(central_lon, central_lat,
                                       across_dir,
                                       1000.0 * 1.5 * beam_width)
            lon_b, lat_b, _ = geod.fwd(central_lon, central_lat,
                                       180.0 + across_dir,
                                       1000.0 * 1.5 * beam_width)

            gcplon[gcp_i][0] = lon_b
            gcplon[gcp_i][1] = central_lon
            gcplon[gcp_i][2] = lon_a

            gcplat[gcp_i][0] = lat_b
            gcplat[gcp_i][1] = central_lat
            gcplat[gcp_i][2] = lat_a

        # Fix longitudinal continuity
        half_ind = numpy.floor(len(_gcp_indices) * 0.5).astype('int32')
        gcplon0 = gcplon[half_ind, 1] - 180.0
        gcplon = numpy.mod(gcplon - gcplon0, 360.0) + gcplon0

        # Construct metadata/geolocation/band(s)
        sec = dset.variables['sec'][chunk_rows]
        start_dt = ref_dt + datetime.timedelta(seconds=sec[0])
        stop_dt = ref_dt + datetime.timedelta(seconds=sec[-1])
        dtime, time_range = stfmt.format_time_and_range(start_dt, stop_dt,
                                                        units='h')
        metadata = {}
        metadata['name'] = '{}_{}'.format(datagroup, k)
        metadata['time_range'] = time_range
        metadata['datetime'] = dtime
        metadata['datagroup'] = datagroup

        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei,
                                                    gcppix, gcplin)

        # Build mask
        land_frac = dset.variables['land_frac'][chunk_rows, beam]
        ice_frac = dset.variables['ice_frac'][chunk_rows, beam]
        scat_land_frac = dset.variables['scat_land_frac'][chunk_rows, beam]
        scat_ice_frac = dset.variables['scat_ice_frac'][chunk_rows, beam]
        mask = (values.mask |
                (land_frac > 0.1) |
                (ice_frac > 0.1))  # |
                # (scat_land_frac > 0.001) |
                # (scat_ice_frac > 0.001))

        # Pack data
        band = []
        offset, scale = vmin, (vmax - vmin) / 254.
        numpy.clip(values.data, vmin, vmax, out=values.data)
        array = numpy.round((values.data - offset) / scale).astype('uint8')
        array[numpy.where(mask)] = 255
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmin=vmin, vmax=vmax,
                                             vmin_pal=vmin_pal,
                                             vmax_pal=vmax_pal)
        array = array[:, numpy.newaxis]
        band.append({'array':array,
                     'scale':scale,
                     'offset':offset,
                     'description':'sea surface salinity',
                     'unittype':'PSS',
                     'nodatavalue':255,
                     'parameter_range':[vmin, vmax],
                     'colortable':colortable})

        yield metadata, geolocation, band
Beispiel #11
0
def read_orbit(infile,
               outdir,
               vmin=-0.2,
               vmax=0.2,
               vmin_pal=-0.2,
               vmax_pal=0.2,
               dist_gcp=25.5,
               keep_empty=False):
    """
    """
    if (re.match('.*SLAXT_filt_NorthEastAtlantic*.', os.path.basename(infile))
            is not None):
        L2id = 'X_TRACK_NEA_SLA'
    elif (re.match('.*SLAXT_filt_GoMCaribbean*.', os.path.basename(infile))
          is not None):
        L2id = 'X_TRACK_GOM_SLA'
    elif (re.match('.*SLAXT_filt_MediterraneanSea*.', os.path.basename(infile))
          is not None):
        L2id = 'X_TRACK_MED_SLA'
    else:
        logger.warn('Unknown id for file {}'.format(os.path.basename(infile)))
        # vmin = -0.2 ; vmax = 0.2 ; vmin_pal = -0.2 ; vmax_pal = 0.2

    # Read variables
    dset = Dataset(infile)
    ### For debug purposes
    ind_tmp = 0
    lon_0 = dset.variables[L2_MAPS[L2id]['lonname']][ind_tmp:]
    lon_0[lon_0 > 180] = lon_0[lon_0 > 180] - 360
    lat_0 = dset.variables[L2_MAPS[L2id]['latname']][ind_tmp:]
    ssha_0 = dset.variables[L2_MAPS[L2id]['hname']][ind_tmp:]
    ssha_fill_value = dset.variables[L2_MAPS[L2id]['hname']]._FillValue

    # var_0 /=  dset.variables[L2_MAPS[L2id]['hname']].scale_factor
    time_0 = dset.variables['time'][ind_tmp:]
    time_units = dset.variables['time'].units
    dset.close()
    # Trick to deal with continuity in longitude
    dlon = abs(lon_0[1:] - lon_0[:-1])
    lref = lon_0[np.shape(lon_0)[0] / 2]
    lon_0 = np.mod(lon_0 - (lref - 180), 360) + (lref - 180)
    lon_0 = np.rad2deg(np.unwrap(np.deg2rad(lon_0)))

    # Interpolate on land
    dtime = time_0[1:] - time_0[:-1]
    dlon = abs(lon_0[1:] - lon_0[:-1])
    # delta = np.median(dtime)
    if len(time_0) > 3:
        delta = stats.mode(dtime)[0][0]
    else:
        sys.exit('orbit too short')
    ndelta = np.round(dtime / delta).astype('int')
    ind_dtime = np.where(ndelta >= 5)[0]
    if len(ind_dtime) == 0:
        time = time_0
        ssha = ssha_0
        lon = lon_0
        lat = lat_0
    else:
        time = time_0[:ind_dtime[0] + 1]
        ssha = ssha_0[:ind_dtime[0] + 1]
        for i in range(len(ind_dtime)):
            # time_fill = np.linspace(time_0[ind_dtime[i]], time_0[ind_dtime[i] + 1],
            #                         num=(time_0[ind_dtime[i]+1]
            #                         - time_0[ind_dtime[i]])/delta)
            time_fill = np.linspace(time_0[ind_dtime[i]],
                                    time_0[ind_dtime[i] + 1],
                                    num=ndelta[ind_dtime[i]],
                                    endpoint=False)
            ssha_fill = np.zeros(np.shape(time_fill[1:])) * np.nan
            ssha = np.hstack([ssha, ssha_fill])
            time = np.hstack([time, time_fill[1:]])
            if i != (len(ind_dtime) - 1):
                time = np.hstack(
                    [time, time_0[ind_dtime[i] + 1:ind_dtime[i + 1] + 1]])
                ssha = np.hstack(
                    [ssha, ssha_0[ind_dtime[i] + 1:ind_dtime[i + 1] + 1]])
            else:
                time = np.hstack([time, time_0[ind_dtime[i] + 1:]])
                ssha = np.hstack([ssha, ssha_0[ind_dtime[i] + 1:]])
        func = interpolate.interp1d(time_0, lon_0, kind='linear')
        lon = func(time)
        func = interpolate.interp1d(time_0, lat_0, kind='linear')
        lat = func(time)
        ssha[ssha == ssha_fill_value] = np.nan

    lon = lon - np.floor((np.min(lon) + 180.) / 360.) * 360.
    ntime = np.shape(time)[0]
    mask_gap_ssha = np.isnan(ssha)
    start_time = num2date(time[0], time_units)
    end_time = num2date(time[-1], time_units)
    # NOTE : lon/lat must be continuous even if crossing dateline
    # (ie. no [-180,180] clipping)
    # Make GCPs (mimic a swath of arbitrary width in lon/lat, here ~5km)
    # gcps = tools_for_gcp.make_gcps_v1(lon, lat, dist_gcp=dist_gcp)
    gcps = tools_for_gcp.make_gcps_v2(lon, lat, dist_gcp=dist_gcp)
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      end_time,
                                                      units='s')
    metadata['product_name'] = (L2_MAPS[L2id]['productname'])
    metadata['name'] = (os.path.splitext(os.path.basename(infile))[0])
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(*gcps)
    band = []
    #vmin = listvar[i]['range'][0]
    #vmax = listvar[i]['range'][1]
    #vmin_pal = listvar[i]['range_pal'][0]
    #vmax_pal = listvar[i]['range_pal'][1]
    scale = (vmax - vmin) / 254.
    offset = vmin
    # Avoid warnings caused by NaN values
    #ssha[np.where(~(np.isfinite(mask_gap)))] = 255
    var = ssha
    var[np.where(mask_gap_ssha)] = 255
    array = np.clip(np.round((var - offset) / scale), 0, 254).astype('uint8')
    array[np.where(mask_gap_ssha)] = 255
    array = array[:, np.newaxis]
    if not keep_empty and np.all(array == 255):
        parameter = L2_MAPS[L2id]['parameter']
        logger.warn('No valid values in this dataset for ' \
                    '{}'.format(parameter))
        logger.warn('Skipped.')
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': L2_MAPS[L2id]['parameter'],
        'unittype': 'm',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sentinel3_olci(infile,
                   outdir,
                   vmin=None,
                   vmax=None,
                   slope_threshold=-0.00001,
                   channels='nir',
                   write_netcdf=False,
                   lut_path=None,
                   log_path=None,
                   lat_crop=85.0):
    """"""
    t0 = datetime.utcnow()

    if type(channels) is list or type(channels) is tuple:
        bandnames = channels
        product_name = 'Sentinel-3_OLCI'
    elif 'nir' == channels:
        bandnames = ('Oa17', )
        product_name = 'Sentinel-3_OLCI_NIR'
    elif 'true_rgb' == channels:
        bandnames = ('Oa09', 'Oa06', 'Oa04')
        product_name = 'Sentinel-3_OLCI_true_RGB'
    elif 'false_rgb' == channels:
        bandnames = ('Oa17', 'Oa06', 'Oa04')
        product_name = 'Sentinel-3_OLCI_false_RGB'
    else:
        raise Exception('channels must be either "nir", "true_rgb" '
                        'or "false_rgb"')

    syntool_stats = {}
    for bandname in bandnames:
        syntool_stats[bandname] = {}
    if log_path is not None and not os.path.exists(log_path):
        try:
            os.makedirs(log_path)
        except OSError:
            _, e, _ = sys.exc_info()
            if e.errno != errno.EEXIST:
                raise

    # convert band into column number in the LUT
    band_columns = [int(x[2:]) + 3 - 1 for x in bandnames]

    if vmin is None:
        vmin = [None] * len(bandnames)
    if vmax is None:
        vmax = [None] * len(bandnames)

    full_path = os.path.normpath(infile)
    file_path = os.path.basename(full_path)
    file_name, _ = os.path.splitext(file_path)

    geo_path = os.path.join(infile, 'geo_coordinates.nc')
    time_path = os.path.join(infile, 'time_coordinates.nc')
    quality_path = os.path.join(infile, 'qualityFlags.nc')

    # Extract geo coordinates information
    geo_handler = netCDF4.Dataset(geo_path, 'r')
    nrow = geo_handler.dimensions['rows'].size
    nrow_all = nrow
    ncell = geo_handler.dimensions['columns'].size
    ncell_all = ncell
    lon = geo_handler.variables['longitude'][:]
    tie_lon = numpy.ma.array(lon)
    lat = geo_handler.variables['latitude'][:]
    tie_lat = numpy.ma.array(lat)
    geo_handler.close()

    # Handle longitude continuity
    dlon = lon[1:, :] - lon[:-1, :]
    if 180.0 <= numpy.max(numpy.abs(dlon)):
        lon[lon < 0.0] = lon[lon < 0.0] + 360.0

    # Extract time coordinates information
    time_handler = netCDF4.Dataset(time_path, 'r')
    start_timestamp = time_handler.variables['time_stamp'][0]
    end_timestamp = time_handler.variables['time_stamp'][-1]
    timestamp_units = time_handler.variables['time_stamp'].units
    time_handler.close()

    # Format time information
    start_time = netCDF4.num2date(start_timestamp, timestamp_units)
    end_time = netCDF4.num2date(end_timestamp, timestamp_units)
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      end_time,
                                                      units='ms')

    parameters = ['{} TOA radiance'.format(bnd) for bnd in bandnames]
    metadata = {}
    metadata['product_name'] = product_name
    metadata['name'] = file_name
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = parameters
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'medium-resolution imaging spectrometer'
    metadata['sensor_name'] = 'OLCI'
    metadata['sensor_platform'] = 'Sentinel-3'

    # Crop high latitude to avoid projection issues
    LAT_MAX = 89.0
    ind_valid_cols = numpy.where(numpy.abs(tie_lat).max(axis=0) <= LAT_MAX)[0]
    slice_lat0 = slice(None)
    slice_lat1 = slice(numpy.min(ind_valid_cols),
                       numpy.max(ind_valid_cols) + 1)
    tie_lat = tie_lat[slice_lat0, slice_lat1]
    tie_lon = tie_lon[slice_lat0, slice_lat1]
    nrow, ncell = tie_lat.shape

    # Handle longitude continuity
    dlon = tie_lon[1:, :] - tie_lon[:-1, :]
    if 180.0 <= numpy.max(numpy.abs(dlon)):
        lon0 = tie_lon[0, 0] + 180.0
        tie_lon[:, :] = numpy.mod(tie_lon[:, :] - lon0, 360.0) + lon0

    # Compute GCPs
    tie_row = numpy.linspace(0, nrow - 1, num=tie_lon.shape[0])
    tie_cell = numpy.linspace(0, ncell - 1, num=tie_lon.shape[1])
    tie_facrow = (nrow - 1.) / (tie_lon.shape[0] - 1.)
    tie_faccell = (ncell - 1.) / (tie_lon.shape[1] - 1.)
    gcp_fac = 128
    gcp_fac = numpy.maximum(gcp_fac, numpy.maximum(tie_faccell, tie_facrow))
    gcp_nrow = numpy.ceil((nrow - 1.) / gcp_fac).astype('int') + 1
    gcp_ncell = numpy.ceil((ncell - 1.) / gcp_fac).astype('int') + 1
    tie_indrow = numpy.round(
        numpy.linspace(0, tie_lon.shape[0] - 1, num=gcp_nrow)).astype('int')
    tie_indcell = numpy.round(
        numpy.linspace(0, tie_lon.shape[1] - 1, num=gcp_ncell)).astype('int')
    gcp_lon = tie_lon[tie_indrow.reshape((-1, 1)),
                      tie_indcell.reshape((1, -1))]
    gcp_lat = tie_lat[tie_indrow.reshape((-1, 1)),
                      tie_indcell.reshape((1, -1))]
    gcp_row = numpy.tile(tie_row[tie_indrow].reshape((-1, 1)) + 0.5,
                         (1, gcp_ncell))
    gcp_cell = numpy.tile(tie_cell[tie_indcell].reshape((1, -1)) + 0.5,
                          (gcp_nrow, 1))
    gcp_hei = numpy.zeros(gcp_lon.shape)

    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcp_lon, gcp_lat, gcp_hei,
                                                gcp_cell, gcp_row)

    syntool_stats['lon_min'] = float(numpy.min(tie_lon))
    syntool_stats['lon_max'] = float(numpy.max(tie_lon))
    syntool_stats['lat_min'] = float(numpy.min(tie_lat))
    syntool_stats['lat_max'] = float(numpy.max(tie_lat))

    # Compute atmospheric radiance TOA correction
    L_toa = None
    if lut_path is not None:
        t_start = datetime.utcnow()
        # Compute atmospheric correction for the whole granule
        L_toa = atmospheric_correction(lut_path, infile, band_columns,
                                       nrow_all, ncell_all)
        # Extract the slices of atmospheric correction that match the ones of
        # the bands after the removal of the high latitude columns.
        for iband in range(len(L_toa)):
            L_toa[iband] = L_toa[iband][slice_lat0, slice_lat1]
            # An erroneous LUT could produce nan values for the atmospheric
            # correction. This is not acceptable.
            if numpy.any(~numpy.isfinite(L_toa[iband])):
                raise Exception('Infinite or nan value found in the '
                                'atmospheric correction, please check that the'
                                'LUT has valid values for the angles contained'
                                'in the instrument_data.nc file')
        t_stop = datetime.utcnow()
        syntool_stats['lut_computation'] = (t_stop - t_start).total_seconds()

    logger.info('Construct bands')
    t_start = datetime.utcnow()
    bands = []
    dset = netCDF4.Dataset(quality_path, 'r')
    quality_flags = dset.variables['quality_flags'][slice_lat0, slice_lat1]
    dset.close()

    bits = {
        'saturated@Oa21': 0,
        'saturated@Oa20': 1,
        'saturated@Oa19': 2,
        'saturated@Oa18': 3,
        'saturated@Oa17': 4,
        'saturated@Oa16': 5,
        'saturated@Oa15': 6,
        'saturated@Oa14': 7,
        'saturated@Oa13': 8,
        'saturated@Oa12': 9,
        'saturated@Oa11': 10,
        'saturated@Oa10': 11,
        'saturated@Oa09': 12,
        'saturated@Oa08': 13,
        'saturated@Oa07': 14,
        'saturated@Oa06': 15,
        'saturated@Oa05': 16,
        'saturated@Oa04': 17,
        'saturated@Oa03': 18,
        'saturated@Oa02': 19,
        'saturated@Oa01': 20,
        'dubious': 21,
        'sun-glint_risk': 22,
        'duplicated': 23,
        'cosmetic': 24,
        'invalid': 25,
        'straylight_risk': 26,
        'bright': 27,
        'tidal_region': 28,
        'fresh_inland_water': 29,
        'coastline': 30,
        'land': 31
    }

    off_flags = numpy.uint32(0)
    off_flags = off_flags + numpy.uint32(1 << bits['dubious'])
    off_flags = off_flags + numpy.uint32(1 << bits['invalid'])
    off_flags = off_flags + numpy.uint32(1 << bits['straylight_risk'])
    off_flags = off_flags + numpy.uint32(1 << bits['bright'])
    off_flags = off_flags + numpy.uint32(1 << bits['tidal_region'])
    # off_flags = off_flags + numpy.uint32(1 << bits['fresh_inland_water'])
    off_flags = off_flags + numpy.uint32(1 << bits['coastline'])
    off_flags = off_flags + numpy.uint32(1 << bits['land'])

    # Filter out values where any of the bands is flagged as saturated
    for bandname in bandnames:
        bit_name = 'saturated@{}'.format(bandname)
        off_flags = off_flags + numpy.uint32(1 << bits[bit_name])

    lat_mask = (numpy.abs(tie_lat) > lat_crop)
    data_mask = numpy.zeros(quality_flags.shape, dtype='bool')
    data_mask = (data_mask | lat_mask)

    if numpy.all(data_mask):
        logger.warn('No data to extract.')
        sys.exit(0)

    contrast_mask = numpy.zeros(quality_flags.shape, dtype='bool')
    contrast_mask = (contrast_mask | lat_mask)
    contrast_mask = (contrast_mask |
                     (numpy.bitwise_and(quality_flags, off_flags) > 0))
    t_stop = datetime.utcnow()
    syntool_stats['mask_computation'] = (t_stop - t_start).total_seconds()

    t_start = datetime.utcnow()
    _vmin = list(vmin)
    _vmax = list(vmax)

    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        logger.info('\tConstruct {} band'.format(bandname))
        fieldname = '{}_radiance'.format(bandname)
        file_path = os.path.join(infile, '{}.nc'.format(fieldname))
        f_handler = netCDF4.Dataset(file_path, 'r')
        band = f_handler.variables[fieldname][:]
        band = numpy.ma.array(band)
        f_handler.close()

        # Apply atmospheric correction
        band = band[slice_lat0, slice_lat1]
        if L_toa is not None:
            band -= L_toa[band_index][:, :]

        # Mask null and negative values: they are inferior or equal to
        # atmospheric correction and should probably have be flagged as clouds.
        mask_negative = (band <= 0.0)

        logger.info('\tSet contrast')
        valid_ratio_lower_threshold = 0.001  # 0.1%
        valid_data_mask = (band.mask | contrast_mask | mask_negative)
        valid_data = band.data[~valid_data_mask]
        valid_ratio = float(valid_data.size) / float(band.data.size)
        syntool_stats[bandname]['valid_ratio'] = valid_ratio
        if valid_ratio_lower_threshold >= valid_ratio:
            logger.warn('No valid values for {}'.format(bandname))
            logger.warn('Using default min/max.')

            # Use arbitrary extrema on land
            if _vmin[band_index] is None:
                _min = default_minmax[bandname][0]
                _vmin[band_index] = _min
                syntool_stats[bandname]['default_min'] = float(_min)
            if _vmax[band_index] is None:
                _max = default_minmax[bandname][1]
                _vmax[band_index] = _max
                syntool_stats[bandname]['default_max'] = float(_max)
        else:
            # TODO: add clipping for NIR
            # _min = numpy.clip(_min, 1.5, 10.0)
            # _max = numpy.clip(_max, 30.0, 60.0)
            if _vmin[band_index] is None:
                _min = numpy.percentile(valid_data, .5)
                _vmin[band_index] = _min
                syntool_stats[bandname]['p0050'] = float(_min)
                syntool_stats[bandname]['min'] = float(numpy.min(valid_data))
            if _vmax[band_index] is None:
                _max = numpy.percentile(valid_data, 99.99)
                _vmax[band_index] = _max
                p99 = numpy.percentile(valid_data, 99.0)
                syntool_stats[bandname]['p9900'] = float(p99)
                syntool_stats[bandname]['p9999'] = float(_max)
                syntool_stats[bandname]['max'] = float(numpy.max(valid_data))
        logger.info('\tContrast : vmin={} / vmax={}'.format(
            _vmin[band_index], _vmax[band_index]))

    min_values = [_vmin[band_index] for band_index in range(len(bandnames))]
    max_values = [_vmax[band_index] for band_index in range(len(bandnames))]

    t_stop = datetime.utcnow()
    syntool_stats['minmax_computation'] = (t_stop - t_start).total_seconds()
    syntool_stats['final_min'] = float(numpy.min(min_values))
    syntool_stats['final_max'] = float(numpy.max(max_values))

    _min = numpy.log(numpy.min(min_values))
    _max = numpy.log(numpy.max(max_values))
    scale = (_max - _min) / 254.
    offset = _min
    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        logger.info('\tConstruct {} band'.format(bandname))
        fieldname = '{}_radiance'.format(bandname)
        file_path = os.path.join(infile, '{}.nc'.format(fieldname))
        f_handler = netCDF4.Dataset(file_path, 'r')
        band = f_handler.variables[fieldname][:]
        band = numpy.ma.array(band)
        f_handler.close()

        # Apply atmospheric correction
        band = band[slice_lat0, slice_lat1]
        if L_toa is not None:
            band -= L_toa[band_index][:, :]

        # Mask null and negative values: they are inferior or equal to
        # atmospheric correction and should probably have be flagged as clouds.
        mask_negative = (band <= 0.0)

        # Compute the logarithm only for radiance values that are higher than
        # the atmospheric correction.
        bnd = numpy.log(band.data, where=(~mask_negative))

        logger.info('\tBytescaling')
        byte = bytescale(bnd, cmin=_min, cmax=_max, low=0, high=254)
        description = '{} radiance (log)'.format(bandname)
        if band.mask is not numpy.ma.nomask:
            byte[band.mask] = 255

        # mask data for extreme latitudes
        byte[data_mask] = 255

        # Pixels with a radiance equal or inferior to atmospheric correction
        # are clipped to the minimal value.
        if 0 < mask_negative.size:
            byte[numpy.where(mask_negative == True)] = 0  # noqa

        band_range = [_vmin[band_index], _vmax[band_index]]
        bands.append({
            'array': byte,
            'scale': scale,
            'offset': offset,
            'description': description,
            'unittype': '',
            'nodatavalue': 255,
            'parameter_range': band_range
        })
        if write_netcdf:
            bands[-1]['name'] = bandname
            bands[-1]['long_name'] = bandname
            bands[-1]['unittype'] = '1'

    logger.info('Make sure nodata are at the same locations in all bands')
    mask = numpy.any([_band['array'] == 255 for _band in bands], axis=0)
    for band in bands:
        band['array'][mask] = 255

    t_stop = datetime.utcnow()
    syntool_stats['bytescaling'] = (t_stop - t_start).total_seconds()

    if write_netcdf:
        metadata['spatial_resolution'] = 300
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           bands,
                           'swath',
                           ngcps=gcp_lon.shape)
    else:
        logger.info('Write geotiff')
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, bands)

    logger.info(datetime.utcnow() - t0)
    syntool_stats['total_time'] = (datetime.utcnow() - t0).total_seconds()
    if log_path is not None:
        import json
        stats_path = os.path.join(log_path, '{}.json'.format(file_name))
        with open(stats_path, 'w') as f:
            json.dump(syntool_stats, f)
def sar_doppler(infile, outdir, pngkml=False,
                vmin=-2.5, vmax=2.5, vmin_pal=-2.5, vmax_pal=2.5):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sardop = SAFEOCNNCFile(infile, product='DOPPLER')
    mission = sardop.read_global_attribute('missionName')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    else:
        raise Exception('S1A mission expected.')
    start_time = sardop.get_start_time()
    stop_time = sardop.get_end_time()
    heading = sardop.read_values('rvlHeading')
    if np.sin((90 - heading.mean()) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    sensor_mode = safe_name.split('_')[1]
    if sensor_mode not in ['S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'IW', 'EW']:
        raise Exception('S[1-6]/IW/EW modes expected.')
    sensor_swath = os.path.basename(infile).split('-')[1].upper()
    sensor_polarisation = sardop.read_global_attribute('polarisation')
    datagroup = safe_name.replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    if 'rvlSwath' in sardop.get_dimensions():
        nswath = sardop.get_dimsize('rvlSwath')
    else:
        nswath = 1
    for iswath in range(nswath):

        if nswath == 1:
            radvel = sardop.read_values('rvlRadVel')
            landflag = sardop.read_values('rvlLandFlag')
            lon = sardop.read_values('lon')
            lat = sardop.read_values('lat')
            name = dataname
        else:
            radvel = sardop.read_values('rvlRadVel')[:, :, iswath]
            landflag = sardop.read_values('rvlLandFlag')[:, :, iswath]
            lon = sardop.read_values('lon')[:, :, iswath]
            lat = sardop.read_values('lat')[:, :, iswath]
            valid = np.where((ma.getmaskarray(lon) == False) & \
                             (ma.getmaskarray(lat) == False))
            slices = [slice(valid[0].min(), valid[0].max() + 1),
                      slice(valid[1].min(), valid[1].max() + 1)]
            radvel = radvel[slices]
            landflag = landflag[slices]
            lon = lon[slices]
            lat = lat[slices]
            name = dataname + '-' + str(iswath+1)

        if sensor_pass == 'Ascending':
            radvel *= -1
        ngcps = np.ceil(np.array(lon.shape) / 10.) + 1
        pix = np.linspace(0, lon.shape[1] - 1, num=ngcps[1]).round().astype('int32')
        lin = np.linspace(0, lon.shape[0] - 1, num=ngcps[0]).round().astype('int32')
        pix2d, lin2d = np.meshgrid(pix, lin)
        gcplon = lon[lin2d, pix2d]
        gcplat = lat[lin2d, pix2d]
        gcppix = pix2d + 0.5
        gcplin = lin2d + 0.5
        gcphei = np.zeros(ngcps)
        if gcplon.min() < -135 and gcplon.max() > 135:
            gcplon[np.where(gcplon < 0)] += 360.

        # Construct metadata/geolocation/band(s)
        print 'Construct metadata/geolocation/band(s)'
        metadata = {}
        (dtime, time_range) = stfmt.format_time_and_range(start_time, stop_time,
                                                          units='ms')
        metadata['product_name'] = 'SAR_doppler'
        metadata['name'] = name
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        metadata['source_URI'] = infile
        metadata['source_provider'] = source_provider
        metadata['processing_center'] = '' #'OceanDataLab'
        metadata['conversion_software'] = 'Syntool'
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
        metadata['parameter'] = 'radial horizontal velocities'
        metadata['type'] = 'remote sensing'
        metadata['sensor_type'] = 'SAR'
        metadata['sensor_name'] = sensor_name
        metadata['sensor_platform'] = sensor_platform
        metadata['sensor_mode'] = sensor_mode
        metadata['sensor_swath'] = sensor_swath
        metadata['sensor_polarisation'] = sensor_polarisation
        metadata['sensor_pass'] = sensor_pass
        metadata['datagroup'] = datagroup
        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei,
                                                    gcppix, gcplin)
        band = []
        #indndv = np.where(landflag != 0)
        offset, scale = vmin, (vmax-vmin)/254.
        np.clip(radvel, vmin, vmax, out=radvel)
        array = np.round((radvel - offset) / scale).astype('uint8')
        #array[indndv] = 255
        colortable = stfmt.format_colortable('doppler', vmax=vmax, vmax_pal=vmax_pal,
                                             vmin=vmin, vmin_pal=vmin_pal)
        band.append({'array':array, 'scale':scale, 'offset':offset,
                     'description':'radial horizontal velocities', 'unittype':'m/s',
                     'nodatavalue':255, 'parameter_range':[vmin, vmax],
                     'colortable':colortable})
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
Beispiel #14
0
def smosstorm_smap_wind(infile,
                        outdir,
                        vmin=0.,
                        vmax=50.8,
                        vmin_pal=0.,
                        vmax_pal=50.):
    """
    """
    # Read data
    dataset = Dataset(infile)
    lon = dataset.variables['lon'][:]
    lat = dataset.variables['lat'][:]
    minute = dataset.variables['minute'][:]
    ## for wind we disable auto masking because of wrong valid_min/valid_max
    ## (valid_min/valid_max should be set for values before offset/scale)
    dataset.variables['wind'].set_auto_mask(False)
    wind = dataset.variables['wind'][:]
    wind = np.ma.masked_less(wind, 0)
    day = datetime(dataset.year_of_observation, dataset.month_of_observation,
                   dataset.getncattr('day_of_month_of observation'))
    dataset.close()
    name_pattern = '{}_{{}}_{{}}'.format(
        os.path.splitext(os.path.basename(infile))[0])
    if np.ma.is_masked(lat):
        raise Exception('Some lat are masked.')
    dlat = lat[1] - lat[0]
    if not np.all(np.isclose(lat[1:] - lat[:-1], dlat)):
        raise Exception('Unexpected not unique dlat.')
    if np.ma.is_masked(lon):
        raise Exception('Some lon are masked.')
    dlon = lon[1] - lon[0]
    if not np.all(np.isclose(lon[1:] - lon[:-1], dlon)):
        raise Exception('Unexpected not unique dlon.')
    if np.abs(lon[-1] + dlon - lon[0]) != 360:
        raise Exception('lon does not cover exactly 360 degrees.')
    if np.all(np.ma.getmaskarray(minute) | np.ma.getmaskarray(wind)):
        raise Exception('minute + wind is fully masked.')
    if minute.min() < 0 or minute.max() > 1440:
        raise Exception('Unexpected minute outside [0, 1440].')

    # Flip grid
    if dlat > 0:
        lat = lat[::-1]
        dlat = -dlat
        minute = minute[::-1, :, :]
        wind = wind[::-1, :, :]
    if dlon < 0:
        lon = lon[::-1]
        dlon = -dlon
        minute = minute[:, ::-1, :]
        wind = wind[:, ::-1, :]

    # Loop on node (ascending / descending) and pass
    nlon = lon.size
    pass_dminute = 49
    for inode in [0, 1]:
        valid_minute = minute[:, :, inode].compressed()
        if valid_minute.size == 0:
            continue
        hist, _ = np.histogram(valid_minute, bins=1441, range=(0, 1441))
        indminute = np.where(hist != 0)[0]
        dminute = indminute[1:] - indminute[:-1]
        indsplit = np.where(dminute > pass_dminute)[0]
        minute0 = np.concatenate((indminute[[0]], indminute[indsplit + 1]))
        minute1 = np.concatenate((indminute[indsplit], indminute[[-1]]))
        for _minute0, _minute1 in zip(minute0, minute1):
            indpass = np.where((minute[:, :, inode] >= _minute0) & \
                               (minute[:, :, inode] <= _minute1) & \
                               (np.ma.getmaskarray(wind[:, :, inode]) == False))
            if indpass[0].size == 0:
                continue
            lat_slice = slice(indpass[0].min(), indpass[0].max() + 1)
            indlon = np.unique(indpass[1])
            if indlon.size == 1:
                dindlon = np.array([0])
            else:
                dindlon = indlon[1:] - indlon[:-1]
            if (indlon[-1] - indlon[0]) <= (nlon - dindlon.max()):
                lon_slice = slice(indlon[0], indlon[-1] + 1)
                lat0 = lat[lat_slice.start]
                lon0 = lon[lon_slice.start]
                _wind = wind[lat_slice, lon_slice, inode].copy()
                _minute = minute[lat_slice, lon_slice, inode].copy()
            else:
                indsplit = dindlon.argmax()
                lon_slice0 = slice(indlon[indsplit + 1], nlon)
                lon_slice1 = slice(0, indlon[indsplit] + 1)
                lat0 = lat[lat_slice.start]
                lon0 = lon[lon_slice0.start]
                _wind = np.ma.concatenate(
                    (wind[lat_slice, lon_slice0,
                          inode], wind[lat_slice, lon_slice1, inode]),
                    axis=1)
                _minute = np.ma.concatenate(
                    (minute[lat_slice, lon_slice0,
                            inode], minute[lat_slice, lon_slice1, inode]),
                    axis=1)
            lon0 = np.mod(lon0 + 180., 360.) - 180.
            _wind[_minute < _minute0] = np.ma.masked
            _wind[_minute > _minute1] = np.ma.masked
            start_time = day + timedelta(seconds=_minute0 * 60)
            stop_time = day + timedelta(seconds=_minute1 * 60)
            name = name_pattern.format(start_time.strftime('%H%M'),
                                       stop_time.strftime('%H%M'))

            # Construct metadata/geolocation/band(s)
            print 'Construct metadata/geolocation/band(s)'
            metadata = {}
            (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                              stop_time,
                                                              units='ms')
            metadata['product_name'] = 'SMOSSTORM_SMAP_wind'
            metadata['name'] = name
            metadata['datetime'] = dtime
            metadata['time_range'] = time_range
            metadata['source_URI'] = infile
            metadata['source_provider'] = 'NASA'
            metadata['processing_center'] = 'Remote Sensing Systems'
            metadata['conversion_software'] = 'Syntool'
            metadata['conversion_version'] = '0.0.0'
            metadata['conversion_datetime'] = stfmt.format_time(
                datetime.utcnow())
            metadata['parameter'] = 'wind speed'
            geolocation = {}
            geolocation['projection'] = stfmt.format_gdalprojection()
            geolocation['geotransform'] = [
                lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
            ]
            band = []
            mask = np.ma.getmaskarray(_wind)
            _wind = np.ma.getdata(_wind)
            offset, scale = vmin, (vmax - vmin) / 254.
            np.clip(_wind, vmin, vmax, out=_wind)
            array = np.round((_wind - offset) / scale).astype('uint8')
            array[mask] = 255
            colortable = stfmt.format_colortable('matplotlib_jet',
                                                 vmax=vmax,
                                                 vmax_pal=vmax_pal,
                                                 vmin=vmin,
                                                 vmin_pal=vmin_pal)
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': 'wind speed',
                'unittype': 'm/s',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax],
                'colortable': colortable
            })

            # Write geotiff
            print 'Write geotiff'
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sar_doppler_exp(infile,
                    outdir,
                    pngkml=False,
                    vmin=-2.5,
                    vmax=2.5,
                    vmin_pal=-2.5,
                    vmax_pal=2.5):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sardop = Dataset(infile)
    mission = sardop.MISSIONNAME
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    else:
        raise Exception('S1A mission expected.')
    doptime = sardop.variables['rvlZeroDopplerTime'][:]
    start_time = datetime.strptime(''.join(list(doptime[0, 0, :])),
                                   '%Y-%m-%dT%H:%M:%S.%f')
    stop_time = datetime.strptime(''.join(list(doptime[-1, -1, :])),
                                  '%Y-%m-%dT%H:%M:%S.%f')
    heading = sardop.variables['rvlHeading'][:]
    if np.sin((90 - heading.mean()) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    # safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    # sensor_mode = safe_name.split('_')[1]
    # if sensor_mode not in ['S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'IW', 'EW']:
    #     raise Exception('S[1-6]/IW/EW modes expected.')
    # sensor_swath = os.path.basename(infile).split('-')[1].upper()
    # sensor_polarisation = sardop.read_global_attribute('polarisation')
    # datagroup = safe_name.replace('.SAFE', '')
    # pid = datagroup.split('_')[-1]
    # dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    dataname = os.path.splitext(os.path.basename(infile))[0]
    sensor_mode = dataname.split('_')[1]
    sensor_swath = sensor_mode
    sensor_polarisation = sardop.POLARISATION
    radvel = sardop.variables['rvlRadVel'][:]
    sweepangle = sardop.variables['rvlSweepAngle'][:]
    radvel = descalloping(radvel, sweepangle)
    radvel = smooth(radvel)
    inc = sardop.variables['rvlIncidenceAngle'][:]
    radvel /= np.sin(np.deg2rad(inc))
    #landflag = sardop.variables['rvlLandFlag'][:]
    lon = sardop.variables['rvlLon'][:]
    lat = sardop.variables['rvlLat'][:]
    if sensor_pass == 'Ascending':
        radvel *= -1
    ngcps = np.ceil(np.array(lon.shape) / 10.) + 1
    pix = np.linspace(0, lon.shape[1] - 1,
                      num=ngcps[1]).round().astype('int32')
    lin = np.linspace(0, lon.shape[0] - 1,
                      num=ngcps[0]).round().astype('int32')
    pix2d, lin2d = np.meshgrid(pix, lin)
    gcplon = lon[lin2d, pix2d]
    gcplat = lat[lin2d, pix2d]
    gcppix = pix2d + 0.5
    gcplin = lin2d + 0.5
    gcphei = np.zeros(ngcps)
    if gcplon.min() < -135 and gcplon.max() > 135:
        gcplon[np.where(gcplon < 0)] += 360.

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_doppler_exp'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'radial horizontal velocities'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    # metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    #indndv = np.where(landflag != 0)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(radvel, vmin, vmax, out=radvel)
    array = np.round((radvel - offset) / scale).astype('uint8')
    #array[indndv] = 255
    colortable = stfmt.format_colortable('doppler',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'radial horizontal velocities',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    # Write projected png/kml
    if pngkml == True:
        print 'Write projected png/kml'
        stfmt.write_pngkml_proj(tifffile)
def smosstorm_smos_wind_v4(infile,
                           outdir,
                           vmin=0.,
                           vmax=50.8,
                           vmin_pal=0.,
                           vmax_pal=50.):
    """
    """
    # Read/Process data
    dataset = Dataset(infile)
    if len(dataset.dimensions['time']) == 0:
        raise Exception('time dimension of null length.')
    # Get masked wind using appropriate flags
    wind = dataset.variables['wind_speed'][0, :, :]
    flags = dataset.variables['flags'][0, :, :]
    flags_bytes = [0, 3, 4, 5, 6]  # advised in product
    flags_mask = np.any(
        [np.bitwise_and(flags, 2**b) != 0 for b in flags_bytes], axis=0)
    mask = np.ma.getmaskarray(wind) | flags_mask
    if mask.all():
        raise Exception('Data is fully masked.')
    wind = np.ma.MaskedArray(np.ma.getdata(wind), mask=mask)
    # Get lon/lat
    lat = dataset.variables['lat'][:]
    dlat = lat[1] - lat[0]
    if not np.all(np.isclose(lat[1:] - lat[:-1], dlat)):
        raise Exception('Unexpected not unique dlat.')
    lon = dataset.variables['lon'][:]
    dlon = lon[1] - lon[0]
    if not np.all(np.isclose(lon[1:] - lon[:-1], dlon)):
        raise Exception('Unexpected not unique dlon.')
    # Get time
    time_units = dataset.variables['time'].units
    if 'dtime' in dataset.variables:
        # standard case
        if 'days' not in time_units:
            raise Exception('time units expected in days.')
        if 'days' not in dataset.variables['dtime'].units:
            raise Exception('dtime units expected in days.')
        time = dataset.variables['time'][0] + dataset.variables['dtime'][
            0, :, :]
    else:
        # rare case
        time = dataset.variables['time'][0, :, :]
    _start_time = datetime.strptime(dataset.time_coverage_start,
                                    '%Y%m%dT%H%M%SZ')
    _stop_time = datetime.strptime(dataset.time_coverage_end, '%Y%m%dT%H%M%SZ')
    dataset.close()
    # Keep only valid part of grid
    valid = np.where(np.ma.getmaskarray(wind) == False)
    lat_slice = slice(valid[0].min(), valid[0].max() + 1)
    lon_slice = slice(valid[1].min(), valid[1].max() + 1)
    wind = wind[lat_slice, lon_slice]
    lat = lat[lat_slice]
    lon = lon[lon_slice]
    time = time[lat_slice, lon_slice]
    # Set start_time/stop_time
    if np.ma.is_masked(time) and time.mask.all():
        raise Exception('time is fully masked in valid slice.')
    start_time = num2date(time.min(), time_units)
    stop_time = num2date(time.max(), time_units)
    del time
    if (start_time + timedelta(seconds=1)) < _start_time or \
       (stop_time - timedelta(seconds=1)) > _stop_time:
        raise Exception('time outside time coverage in global attributes.')
    # Flip grid
    if dlat > 0:
        wind = wind[::-1, :]
        lat = lat[::-1]
        dlat = -dlat
    if dlon < 0:
        wind = wind[:, ::-1]
        lon = lon[::-1]
        dlon = -dlon
    # Rearrange grid
    # (shift the left part of the grid to minimize valid extent)
    lon_min, lon_max = lon[0], lon[-1]
    if lon_min < -180. or lon_max > 180.:
        raise Exception('Unexpected lon outside [-180, 180].')
    valid_lon = lon[np.any(~np.ma.getmaskarray(wind), axis=0)]
    if valid_lon.size == 1:
        shifted_lon_min = lon_min - 1
        shifted_lon_max = lon_max + 1
    else:
        valid_dlon = valid_lon[1:] - valid_lon[:-1]
        shifted_lon_min = valid_lon[valid_dlon.argmax() + 1]
        shifted_lon_max = valid_lon[valid_dlon.argmax()] + 360.
    if (lon_max - lon_min) > (shifted_lon_max - shifted_lon_min):
        _wind = wind.copy()
        shifted_nlon = np.round(
            (shifted_lon_max - shifted_lon_min) / dlon).astype('int') + 1
        wind = np.ma.masked_all((_wind.shape[0], shifted_nlon),
                                dtype=_wind.dtype)
        indlon = np.round(np.mod(lon - shifted_lon_min, 360.) /
                          dlon).astype('int')
        ind = np.where(indlon < shifted_nlon)[0]
        wind[:, indlon[ind]] = _wind[:, ind]
        lon = np.linspace(shifted_lon_min,
                          shifted_lon_max,
                          num=shifted_nlon,
                          endpoint=True)

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SMOSSTORM_SMOS_wind_V4'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'IFREMER'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'wind speed'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon[0] - dlon / 2., dlon, 0, lat[0] - dlat / 2., 0, dlat
    ]
    band = []
    indndv = np.where(np.ma.getmaskarray(wind))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(wind.data, vmin, vmax, out=wind.data)
    array = np.round((wind.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'wind speed',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #17
0
def sar_wind(infile,
             outdir,
             pngkml=False,
             valid_percent_min=1.,
             vmin=0.,
             vmax=25.4,
             vmin_pal=0.,
             vmax_pal=50 * 0.514):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sarwind = SAFEOCNNCFile(infile, product='WIND')
    mission = sarwind.read_global_attribute('missionName')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    elif mission == 'S1B':
        sensor_name = 'Sentinel-1B'
        sensor_platform = 'Sentinel-1B'
        source_provider = 'ESA'
    else:
        raise Exception('S1A/S1B missions expected.')
    start_time = sarwind.get_start_time()
    stop_time = sarwind.get_end_time()
    heading = sarwind.read_values('owiHeading')
    if np.sin((90 - heading[0, 0]) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    sensor_mode = safe_name.split('_')[1]
    if sensor_mode not in ['S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'IW', 'EW']:
        raise Exception('S[1-6]/IW/EW modes expected.')
    sensor_swath = os.path.basename(infile).split('-')[1].upper()
    sensor_polarisation = sarwind.read_global_attribute('polarisation')
    datagroup = safe_name.replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    windspeed = sarwind.read_values('owiWindSpeed')
    if windspeed.shape == (1, 1):
        raise Exception('owiRaSize and owiAzSize equals 1 !')
    winddirection = sarwind.read_values('owiWindDirection')
    landflag = sarwind.read_values('owiLandFlag')
    inversionquality = sarwind.read_values('owiInversionQuality')
    windquality = sarwind.read_values('owiWindQuality')
    #pbright = sarwind.read_values('owiPBright')
    lon = sarwind.read_values('lon')
    lat = sarwind.read_values('lat')
    if np.ma.is_masked(lon) or np.ma.is_masked(lat):
        raise Exception('Some lon and/or lat is masked.')
    if np.all(lon == 0) or np.all(lat == 0):
        raise Exception('All lon and/or lat set to 0.')
    ngcps = np.ceil(np.array(lon.shape) / 10.).astype('int') + 1
    pix = np.linspace(0, lon.shape[1] - 1,
                      num=ngcps[1]).round().astype('int32')
    lin = np.linspace(0, lon.shape[0] - 1,
                      num=ngcps[0]).round().astype('int32')
    pix2d, lin2d = np.meshgrid(pix, lin)
    gcplon = lon[lin2d, pix2d]
    gcplat = lat[lin2d, pix2d]
    gcppix = pix2d + 0.5
    gcplin = lin2d + 0.5
    gcphei = np.zeros(ngcps)
    ## Make sure lon are continuous (no jump because of IDL crossing)
    ## (if IDL crossing, by convention we make lon to be around 180deg)
    # if gcplon.min() < -135 and gcplon.max() > 135:
    #     gcplon[np.where(gcplon < 0)] += 360.
    gcplonmid = gcplon[ngcps[0] / 2, ngcps[1] / 2]
    gcplon = np.mod(gcplon - (gcplonmid - 180.), 360.) + gcplonmid - 180.
    gcplonmin = gcplon.min()
    gcplon = gcplon - np.floor((gcplonmin + 180.) / 360.) * 360.

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_wind'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['wind speed', 'wind direction']
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    #mask = landflag != 0
    mask = (landflag != 0) | \
        ((windspeed == 0) & (winddirection == 180)) | \
        ((windspeed == 0) & (windquality == 3)) | \
        ((windspeed == 0) & (inversionquality == 2))
    mask = np.ma.getdata(mask)  # we don't want to sum on a masked mask
    valid_percent = np.sum(~mask) / float(mask.size) * 100
    if valid_percent <= valid_percent_min:
        raise Exception(
            'Not enough valid data: {:0.3f}%'.format(valid_percent))
    # if np.all(mask):
    #     raise Exception('Data is fully masked !')
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(windspeed, vmin, vmax, out=windspeed)
    array = np.round((windspeed - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('noaa_wind',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'wind speed',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    winddirection = np.mod(90. - winddirection + 180., 360.)
    array = np.round(winddirection / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'wind direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    # Write projected png/kml
    if pngkml == True:
        print 'Write projected png/kml'
        stfmt.write_pngkml_proj(tifffile)
def read_orbit(infile,
               outdir,
               vmin=-0.2,
               vmax=0.2,
               vmin_pal=-0.2,
               vmax_pal=0.2,
               dist_gcp=5,
               process_var=('ssha', 'swh', 'ws', 'sigma0'),
               keep_empty=False,
               log_path=None):
    """
    """
    t0 = datetime.utcnow()
    syntool_stats = {}
    if log_path is not None and not os.path.exists(log_path):
        try:
            os.makedirs(log_path)
        except OSError:
            _, e, _ = sys.exc_info()
            if e.errno != errno.EEXIST:
                raise

    listvar = []
    height = False
    wave = False
    if 'ssha' in process_var:
        height = True
    if 'swh' in process_var:
        wave = True
    if 'ws' in process_var:
        wind = True
    if 'sigma0' in process_var:
        sigma = True
    if height is False and wave is False and wind is False and sigma is False:
        sys.exit('please specify at least on variable to convert')
    L2id = '1Hz'  #'Ku'
    # Read variables
    dset = Dataset(os.path.join(infile, 'standard_measurement.nc'), 'r')
    ### For debug purposes
    ind_tmp = 0
    lon_0 = dset.variables[L2_MAPS[L2id]['lonname']][ind_tmp:]
    lon_0[lon_0 > 180] = lon_0[lon_0 > 180] - 360
    lat_0 = dset.variables[L2_MAPS[L2id]['latname']][ind_tmp:]
    mask_surface = dset.variables[L2_MAPS[L2id]['surface']][ind_tmp:]
    if height is True:
        ssha_0 = dset.variables[L2_MAPS[L2id]['hname']][ind_tmp:]
        ssha_fill_value = dset.variables[L2_MAPS[L2id]['hname']]._FillValue
        ssha_0[mask_surface > 1] = numpy.nan
    if wave is True:
        swh_0 = dset.variables[L2_MAPS[L2id]['wname']][ind_tmp:]
        swh_fill_value = dset.variables[L2_MAPS[L2id]['wname']]._FillValue
        swh_0[mask_surface > 1] = numpy.nan
    if wind is True:
        ws_0 = dset.variables[L2_MAPS[L2id]['winame']][ind_tmp:]
        ws_fill_value = dset.variables[L2_MAPS[L2id]['winame']]._FillValue
        ws_0[mask_surface > 1] = numpy.nan
    if sigma is True:
        sigma0_0 = dset.variables[L2_MAPS[L2id]['sname']][ind_tmp:]
        sigma0_fill_value = dset.variables[L2_MAPS[L2id]['sname']]._FillValue
        sigma0_0[mask_surface > 1] = numpy.nan

    # var_0 /=  dset.variables[L2_MAPS[L2id]['hname']].scale_factor
    time_0 = dset.variables[L2_MAPS[L2id]['timename']][ind_tmp:]
    time_units = dset.variables[L2_MAPS[L2id]['timename']].units
    dset.close()
    # Trick to deal with continuity in longitude
    dlon = abs(lon_0[1:] - lon_0[:-1])
    lref = lon_0[numpy.shape(lon_0)[0] / 2]
    lon_0 = numpy.mod(lon_0 - (lref - 180), 360) + (lref - 180)
    lon_0 = numpy.rad2deg(numpy.unwrap(numpy.deg2rad(lon_0)))

    # Interpolate on land
    dtime = time_0[1:] - time_0[:-1]
    dlon = abs(lon_0[1:] - lon_0[:-1])
    # delta = numpy.median(dtime)
    if len(time_0) > 3:
        delta = stats.mode(dtime)[0][0]
    else:
        logger.warn('orbit too short')
        return

    t_start = datetime.utcnow()
    ndelta = numpy.round(dtime / delta).astype('int')
    if L2id == '1Hz':
        dtime_threshold = 3
    else:
        dtime_threshold = 3 / 20
    ind_dtime = numpy.where(ndelta >= dtime_threshold)[0]
    if len(ind_dtime) == 0:
        time = time_0
        lon = lon_0
        lat = lat_0
        if height is True:
            ssha = ssha_0
        if wave is True:
            swh = swh_0
        if wind is True:
            ws = ws_0
        if sigma is True:
            sigma0 = sigma0_0
    else:
        time = []
        lon = []
        lat = []
        time.append(time_0[:ind_dtime[0] + 1])
        lon.append(lon_0[:ind_dtime[0] + 1])
        lat.append(lat_0[:ind_dtime[0] + 1])
        func_lon = interpolate.interp1d(time_0, lon_0, kind='cubic')
        func_lat = interpolate.interp1d(time_0, lat_0, kind='cubic')
        if height is True:
            ssha = []
            ssha.append(ssha_0[:ind_dtime[0] + 1])
        if wave is True:
            swh = []
            swh.append(swh_0[:ind_dtime[0] + 1])
        if wind is True:
            ws = []
            ws.append(ws_0[:ind_dtime[0] + 1])
        if sigma is True:
            sigma0 = []
            sigma0.append(sigma0_0[:ind_dtime[0] + 1])
        for i in range(len(ind_dtime)):
            time_fill = numpy.linspace(
                time_0[ind_dtime[i]],
                time_0[ind_dtime[i] + 1],
                num=(
                    (time_0[ind_dtime[i] + 1] - time_0[ind_dtime[i]]) / delta +
                    1))
            time.append(time_fill[1:-1])
            lon_tmp = func_lon(time_fill[1:-1])
            lat_tmp = func_lat(time_fill[1:-1])
            lon.append(lon_tmp)
            lat.append(lat_tmp)
            if height is True:
                ssha_fill = numpy.full(numpy.shape(time_fill[1:-1]), numpy.nan)
                ssha.append(ssha_fill)
            if wave is True:
                swh_fill = numpy.full(numpy.shape(time_fill[1:-1]), numpy.nan)
                swh.append(swh_fill)
            if wind is True:
                ws_fill = numpy.full(numpy.shape(time_fill[1:-1]), numpy.nan)
                ws.append(ws_fill)
            if sigma is True:
                sigma0_fill = numpy.full(numpy.shape(time_fill[1:-1]),
                                         numpy.nan)
                sigma0.append(sigma0_fill)
            if i != (len(ind_dtime) - 1):
                slice_ind = slice(ind_dtime[i] + 1, ind_dtime[i + 1] + 1)
                time.append(time_0[slice_ind])
                lon.append(lon_0[slice_ind])
                lat.append(lat_0[slice_ind])
                if height is True:
                    ssha.append(ssha_0[slice_ind])
                if wave is True:
                    swh.append(swh_0[slice_ind])
                if wind is True:
                    ws.append(ws_0[slice_ind])
                if sigma is True:
                    sigma0.append(sigma0_0[slice_ind])
            else:
                time.append(time_0[ind_dtime[i] + 1:])
                lon.append(lon_0[ind_dtime[i] + 1:])
                lat.append(lat_0[ind_dtime[i] + 1:])
                if height is True:
                    ssha.append(ssha_0[ind_dtime[i] + 1:])
                if wave is True:
                    swh.append(swh_0[ind_dtime[i] + 1:])
                if wind is True:
                    ws.append(ws_0[ind_dtime[i] + 1:])
                if sigma is True:
                    sigma0.append(sigma0_0[ind_dtime[i] + 1:])
        time = numpy.concatenate(time, axis=0)
        lon = numpy.concatenate(lon, axis=0)
        lat = numpy.concatenate(lat, axis=0)
        if height is True:
            ssha = numpy.concatenate(ssha, axis=0)
            ssha[ssha == ssha_fill_value] = numpy.nan
        if wave is True:
            swh = numpy.concatenate(swh, axis=0)
            swh[swh == swh_fill_value] = numpy.nan
        if wind is True:
            ws = numpy.concatenate(ws, axis=0)
            ws[ws == ws_fill_value] = numpy.nan
        if sigma is True:
            sigma0 = numpy.concatenate(sigma0, axis=0)
            sigma0[sigma0 == sigma0_fill_value] = numpy.nan
    t_stop = datetime.utcnow()
    syntool_stats['gapfill_computation'] = (t_stop - t_start).total_seconds()
    lon = lon - numpy.floor((numpy.min(lon) + 180.) / 360.) * 360.
    ntime = numpy.shape(time)[0]
    if height is True:
        nan_mask = numpy.isnan(ssha)
        nan_ind = numpy.where(nan_mask)
        ssha[nan_ind] = 0
        mask_gap_ssha = (nan_mask | (abs(ssha) > 50))
        ssha[nan_ind] = numpy.nan  # Restore nan values
        if not mask_gap_ssha.all():
            _min = numpy.nanmin(ssha[numpy.where(~mask_gap_ssha)])
            _max = numpy.nanmax(ssha[numpy.where(~mask_gap_ssha)])
            syntool_stats['ssha'] = {'min': _min, 'max': _max}
        listvar.append({
            'var': ssha,
            'mask_gap': mask_gap_ssha,
            'range': [-0.4, 0.4],
            'parameter': 'SSHA',
            'palette': 'matplotlib_jet',
            'range_pal': [-0.4, 0.4]
        })
    if wave is True:
        nan_mask = numpy.isnan(swh)
        nan_ind = numpy.where(nan_mask)
        swh[nan_ind] = 0
        mask_gap_swh = (nan_mask | (abs(swh) > 50))
        swh[nan_ind] = numpy.nan  # Restore nan values
        if not mask_gap_swh.all():
            _min = numpy.nanmin(swh[numpy.where(~mask_gap_swh)])
            _max = numpy.nanmax(swh[numpy.where(~mask_gap_swh)])
            syntool_stats['swh'] = {'min': _min, 'max': _max}
        listvar.append({
            'var': swh,
            'mask_gap': mask_gap_swh,
            'range': [0., 8.],
            'parameter': 'SWH',
            'palette': 'matplotlib_jet',
            'range_pal': [0., 8.]
        })
    if wind is True:
        nan_mask = numpy.isnan(ws)
        nan_ind = numpy.where(nan_mask)
        ws[nan_ind] = 0
        mask_gap_ws = (nan_mask | (abs(ws) > 200))
        ws[nan_ind] = numpy.nan  # Restore nan values
        if not mask_gap_ws.all():
            _min = numpy.nanmin(ws[numpy.where(~mask_gap_ws)])
            _max = numpy.nanmax(ws[numpy.where(~mask_gap_ws)])
            syntool_stats['ws'] = {'min': _min, 'max': _max}
        listvar.append({
            'var': ws,
            'mask_gap': mask_gap_ws,
            'range': [0., 25.],
            'parameter': 'Ws',
            'palette': 'noaa_wind',
            'range_pal': [0., 25.]
        })
    if sigma is True:
        nan_mask = numpy.isnan(sigma0)
        nan_ind = numpy.where(nan_mask)
        sigma0[nan_ind] = 0
        mask_gap_sigma0 = (nan_mask | (sigma0 > 100))
        sigma0[nan_ind] = numpy.nan  # Restore nan values
        if not mask_gap_sigma0.all():
            _min = numpy.nanmin(sigma0[numpy.where(~mask_gap_sigma0)])
            _max = numpy.nanmax(sigma0[numpy.where(~mask_gap_sigma0)])
            syntool_stats['sigma0'] = {'min': _min, 'max': _max}
        listvar.append({
            'var': sigma0,
            'mask_gap': mask_gap_sigma0,
            'range': [5., 25.],
            'parameter': 'Sigma0',
            'palette': 'matplotlib_jet',
            'range_pal': [5., 25.]
        })
    start_time = num2date(time[0], time_units)
    end_time = num2date(time[-1], time_units)
    # NOTE : lon/lat must be continuous even if crossing dateline
    # (ie. no [-180,180] clipping)
    # Make GCPs (mimic a swath of arbitrary width in lon/lat, here ~5km)
    # gcps = tools_for_gcp.make_gcps_v1(lon, lat, dist_gcp=dist_gcp)
    gcps = tools_for_gcp.make_gcps_v2(lon, lat, dist_gcp=dist_gcp)
    file_name, _ = os.path.splitext(os.path.basename(os.path.normpath(infile)))
    for i in range(len(listvar)):
        metadata = {}
        (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                          end_time,
                                                          units='s')
        metadata['product_name'] = (L2_MAPS[L2id]['productname'] + '_' +
                                    listvar[i]['parameter'])
        metadata['name'] = file_name
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(*gcps)
        band = []
        vmin = listvar[i]['range'][0]
        vmax = listvar[i]['range'][1]
        vmin_pal = listvar[i]['range_pal'][0]
        vmax_pal = listvar[i]['range_pal'][1]
        scale = (vmax - vmin) / 254.
        offset = vmin
        # Avoid warnings caused by NaN values
        #ssha[numpy.where(~(numpy.isfinite(mask_gap)))] = 255
        var = listvar[i]['var']
        mask_gap = listvar[i]['mask_gap']
        var[numpy.where(mask_gap)] = 255
        array = numpy.clip(numpy.round((var - offset) / scale), 0,
                           254).astype('uint8')
        array[numpy.where(mask_gap)] = 255
        array = array[:, numpy.newaxis]
        if not keep_empty and numpy.all(array == 255):
            parameter = listvar[i]['parameter']
            logger.warn('No valid values in this dataset for ' \
                        '{}'.format(parameter))
            logger.warn('Skipped.')
            continue
        colortable = stfmt.format_colortable(listvar[i]['palette'],
                                             vmax=vmax,
                                             vmax_pal=vmax_pal,
                                             vmin=vmin,
                                             vmin_pal=vmin_pal)
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': listvar[i]['parameter'],
            'unittype': 'm',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax],
            'colortable': colortable
        })
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)

    # Check GCPs
    # tools_for_gcp.check_gcps(tifffile, lon, lat, gcps[0], gcps[1])

    syntool_stats['total_time'] = (datetime.utcnow() - t0).total_seconds()
    if log_path is not None:
        import json
        stats_path = os.path.join(log_path, '{}.json'.format(file_name))
        with open(stats_path, 'w') as f:
            json.dump(syntool_stats, f)
Beispiel #19
0
def read_geometry(infile, bandnames, fname, sltype, view, product_name, vmin,
                  vmax, log_path):
    """ Read coordinates from geometry SLSTR file """
    # Initiate log file
    syntool_stats = {}
    for bandname in bandnames:
        syntool_stats[bandname] = {}
    if log_path is not None and not os.path.exists(log_path):
        try:
            os.makedirs(log_path)
        except OSError:
            _, e, _ = sys.exc_info()
            if e.errno != errno.EEXIST:
                raise

    if vmin is None:
        vmin = [None] * len(bandnames)
    if vmax is None:
        vmax = [None] * len(bandnames)

    full_path = os.path.normpath(infile)
    file_path = os.path.basename(full_path)
    file_name, _ = os.path.splitext(file_path)

    geo_filename = 'geodetic_{}{}.nc'.format(sltype, view)
    geo_path = os.path.join(infile, geo_filename)
    lat_varname = 'latitude_{}{}'.format(sltype, view)
    lon_varname = 'longitude_{}{}'.format(sltype, view)

    # Extract geo coordinates information
    geo_handler = netCDF4.Dataset(geo_path, 'r')
    nrow = geo_handler.dimensions['rows'].size
    nrow_all = nrow
    ncell = geo_handler.dimensions['columns'].size
    ncell_all = ncell
    lon = geo_handler.variables[lon_varname][:]
    tie_lon = numpy.ma.array(lon)
    tie_lon._sharedmask = False
    lat = geo_handler.variables[lat_varname][:]
    tie_lat = numpy.ma.array(lat)
    start_time_str = geo_handler.start_time
    stop_time_str = geo_handler.stop_time
    geo_handler.close()

    # Handle longitude range
    dlon = tie_lon[1:, :] - tie_lon[:-1, :]
    if 180.0 <= numpy.max(numpy.abs(dlon)):
        tie_lon[tie_lon < 0.0] = tie_lon[tie_lon < 0.0] + 360.0

    # Extract time coordinates information
    # Format time information
    start_time = datetime.strptime(start_time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
    end_time = datetime.strptime(stop_time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      end_time,
                                                      units='ms')
    month = start_time.month

    parameters = ['{} TOA {}'.format(bnd, fname) for bnd in bandnames]
    metadata = {}
    metadata['product_name'] = '{}'.format(product_name)
    metadata['name'] = file_name
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = parameters
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'dual scan temperature radiometer'
    metadata['sensor_name'] = 'SLSTR'
    metadata['sensor_platform'] = 'Sentinel-3'

    # Crop high latitude to avoid projection issues
    LAT_MAX = 89.0
    ind_valid_cols = numpy.where(numpy.abs(tie_lat).max(axis=0) <= LAT_MAX)[0]
    slice_lat0 = slice(None)
    slice_lat1 = slice(numpy.min(ind_valid_cols),
                       numpy.max(ind_valid_cols) + 1)
    tie_lat = tie_lat[slice_lat0, slice_lat1]
    tie_lon = tie_lon[slice_lat0, slice_lat1]
    nrow, ncell = tie_lat.shape

    # Handle longitude continuity
    dlon = tie_lon[1:, :] - tie_lon[:-1, :]
    if 180.0 <= numpy.max(numpy.abs(dlon)):
        lon0 = tie_lon[0, 0] + 180.0
        tie_lon._sharedmask = False
        tie_lon[:, :] = numpy.mod(tie_lon[:, :] - lon0, 360.0) + lon0

    # Compute GCPs
    tie_row = numpy.linspace(0, nrow - 1, num=tie_lon.shape[0])
    tie_cell = numpy.linspace(0, ncell - 1, num=tie_lon.shape[1])
    tie_facrow = (nrow - 1.) / (tie_lon.shape[0] - 1.)
    tie_faccell = (ncell - 1.) / (tie_lon.shape[1] - 1.)
    gcp_fac = 128
    gcp_fac = numpy.maximum(gcp_fac, numpy.maximum(tie_faccell, tie_facrow))
    gcp_nrow = numpy.ceil((nrow - 1.) / gcp_fac).astype('int') + 1
    gcp_ncell = numpy.ceil((ncell - 1.) / gcp_fac).astype('int') + 1
    tie_indrow = numpy.round(
        numpy.linspace(0, tie_lon.shape[0] - 1, num=gcp_nrow)).astype('int')
    tie_indcell = numpy.round(
        numpy.linspace(0, tie_lon.shape[1] - 1, num=gcp_ncell)).astype('int')
    gcp_lon = tie_lon[tie_indrow.reshape((-1, 1)),
                      tie_indcell.reshape((1, -1))]
    gcp_lat = tie_lat[tie_indrow.reshape((-1, 1)),
                      tie_indcell.reshape((1, -1))]
    gcp_row = numpy.tile(tie_row[tie_indrow].reshape((-1, 1)) + 0.5,
                         (1, gcp_ncell))
    gcp_cell = numpy.tile(tie_cell[tie_indcell].reshape((1, -1)) + 0.5,
                          (gcp_nrow, 1))
    gcp_hei = numpy.zeros(gcp_lon.shape)
    ngcps = gcp_lon.shape

    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcp_lon, gcp_lat, gcp_hei,
                                                gcp_cell, gcp_row)

    syntool_stats['lon_min'] = float(numpy.min(tie_lon))
    syntool_stats['lon_max'] = float(numpy.max(tie_lon))
    syntool_stats['lat_min'] = float(numpy.min(tie_lat))
    syntool_stats['lat_max'] = float(numpy.max(tie_lat))
    return (syntool_stats, metadata, geolocation, tie_lon, tie_lat, slice_lat0,
            slice_lat1, nrow_all, ncell_all, ngcps, month)
Beispiel #20
0
def read_orbit(infile,
               outdir,
               vmin=-0.2,
               vmax=0.2,
               vmin_pal=-0.2,
               vmax_pal=0.2,
               dist_gcp=None,
               write_netcdf=False):
    """
    """
    if (re.match(r'^dt_global_j2_adt_vfec_.*\.nc', os.path.basename(infile))
            is not None):
        L3id = 'J2_ADT'
        # vmin = -0.2; vmax = 0.2 ; vmin_pal = -0.2 ; vmax_pal = 0.2
    elif (re.match(r'^dt_global_al_adt_vfec.*\.nc', os.path.basename(infile))
          is not None):
        L3id = 'AL_ADT'
        # vmin = -0.2 ; vmax = 0.2 ; vmin_pal = -0.2 ; vmax_pal = 0.2
    elif (re.match(r'^dt_global_j2_sla_vfec.*\.nc', os.path.basename(infile))
          is not None):
        L3id = 'J2_SLA'
        # vmin = -0.2 ; vmax = 0.2 ; vmin_pal = -0.2 ; vmax_pal = 0.2
    elif (re.match(r'^dt_global_al_sla_vfec.*\.nc', os.path.basename(infile))
          is not None):
        L3id = 'AL_SLA'
        # vmin = -0.2 ; vmax = 0.2 ; vmin_pal = -0.2 ; vmax_pal = 0.2

    # Read
    dset = Dataset(infile)
    lon_all = dset.variables['longitude'][:]
    lat_all = dset.variables['latitude'][:]
    var_all = dset.variables[L3_MAPS[L3id]['hname']][:]
    time_all = dset.variables['time'][:]
    ipass_all = dset.variables['track'][:]
    cycle_all = dset.variables['cycle'][:]
    time_units = dset.variables['time'].units
    var_fill_value = dset.variables[L3_MAPS[L3id]['hname']]._FillValue
    var_all[var_all == var_fill_value] = np.nan
    dset.close()

    # Detect passes index
    dipass = ipass_all[1:] - ipass_all[:-1]
    ind_ipass = np.where(dipass != 0)[0]
    ind_ipass += 1
    ind_ipass = np.hstack([0, ind_ipass, len(ipass_all)])

    # Loop on all passes and compute geotiff for each pass
    for i in range(len(ind_ipass) - 1):
        lon_0 = lon_all[ind_ipass[i]:ind_ipass[i + 1]]
        # lon_0 = np.mod(lon_0 + 180, 360) - 180
        lref = lon_0[np.shape(lon_0)[0] / 2]
        lon_0 = np.mod(lon_0 - (lref - 180), 360) + (lref - 180)
        #lon_0 = np.mod(lon_0 - np.min(lon_0), 360) - lref # - 180
        lon_0 = np.rad2deg(np.unwrap(np.deg2rad(lon_0)))
        lat_0 = lat_all[ind_ipass[i]:ind_ipass[i + 1]]
        var_0 = var_all[ind_ipass[i]:ind_ipass[i + 1]]
        time_0 = time_all[ind_ipass[i]:ind_ipass[i + 1]]
        ipass_0 = ipass_all[ind_ipass[i]:ind_ipass[i + 1]]
        cycle_0 = cycle_all[ind_ipass[i]:ind_ipass[i + 1]]
        dtime = time_0[1:] - time_0[:-1]
        dlon = abs(lon_0[1:] - lon_0[:-1])
        if (np.array(dlon) > 180).any():
            lon_0 = np.mod(lon_0, 360)
        if len(time_0) > 3:
            delta = stats.mode(dtime)[0][0]
        else:
            continue
        ndelta = np.round(dtime / delta).astype('int')
        ind_dtime = np.where(ndelta >= 2)[0]
        if ind_dtime.size != 0:
            time = time_0[:ind_dtime[0] + 1]
            var = var_0[:ind_dtime[0] + 1]
        else:
            time = time_0
            var = var_0
        for i in range(len(ind_dtime)):
            # time_fill = np.linspace(time_0[ind_dtime[i]],
            #                         time_0[ind_dtime[i] + 1],
            #                         num=(time_0[ind_dtime[i] + 1]
            #                         - time_0[ind_dtime[i]]) / delta)
            time_fill = np.linspace(time_0[ind_dtime[i]],
                                    time_0[ind_dtime[i] + 1],
                                    num=ndelta[ind_dtime[i]],
                                    endpoint=False)
            var_fill = np.zeros(np.shape(time_fill[1:])) * np.nan
            # time = time.append(time_fill[1:])
            time = np.hstack([time, time_fill[1:]])
            # var = var.append(var_fill)
            var = np.hstack([var, var_fill])
            if i != (len(ind_dtime) - 1):
                time = np.hstack(
                    [time, time_0[ind_dtime[i] + 1:ind_dtime[i + 1] + 1]])
                var = np.hstack(
                    [var, var_0[ind_dtime[i] + 1:ind_dtime[i + 1] + 1]])
            else:
                time = np.hstack([time, time_0[ind_dtime[i] + 1:]])
                var = np.hstack([var, var_0[ind_dtime[i] + 1:]])
        # time = np.concatenate(time)
        # var = np.concatenate(var)
        func = interpolate.interp1d(time_0, lon_0, kind='quadratic')
        lon = func(time)
        func = interpolate.interp1d(time_0, lat_0, kind='quadratic')
        lat = func(time)
        ssha = var
        mask_gap = np.isnan(ssha)
        lon = lon - np.floor((np.min(lon) + 180.) / 360.) * 360.
        time = np.float64(time)
        start_time = num2date(time[0], time_units)
        end_time = num2date(time[-1], time_units)
        time = num2date(time, units=time_units)
        time_num = time
        for t in range(np.shape(time)[0]):
            time_num[t] = date2num(
                time[t],
                units='microseconds since 1970-01-01 00:00:00.000000Z')
            time_num[t] *= 10**(-6)
        #time = (num2date(time, time_units)
        #        - calendar.timegm(cnes_day.timetuple()))
        # for t in range(np.shape(time)[0]):
        #    time[t] = date2num(num2date(time[t], units) - cnes_day, unit=)
        #time = time.total_seconds()
        #time = (time + calendar.timegm(julian_day.timetuple())
        #        - calendar.timegm(cnes_day.timetuple()))
        # NOTE : lon/lat must be continuous even if crossing dateline
        # (ie. no [-180,180] clipping)
        # Make GCPs (mimic a swath of arbitrary width in lon/lat, here ~5km)
        # gcps = tools_for_gcp.make_gcps_v1(lon, lat, dist_gcp=dist_gcp)
        gcps = tools_for_gcp.make_gcps_v2(lon, lat, dist_gcp=dist_gcp)
        # Write geotiff
        # NOTE : product_name to be changed, set here for test
        metadata = {}
        (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                          end_time,
                                                          units='s')
        metadata['product_name'] = L3_MAPS[L3id]['productname']
        dname = (os.path.splitext(os.path.basename(infile))[0] + '_c' +
                 str(int(cycle_0[0])).zfill(4) + '_p' +
                 str(int(ipass_0[0])).zfill(3))
        metadata['name'] = dname
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        metadata['begin_time'] = start_time.strftime(TIMEFMT)
        metadata['end_time'] = end_time.strftime(TIMEFMT)
        metadata['source_URI'] = infile
        metadata['source_provider'] = 'AVISO'
        metadata['processing_center'] = ''
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datatime'] = stfmt.format_time(datetime.utcnow())
        metadata['type'] = 'along_track'
        metadata['cycle'] = int(cycle_0[0])
        metadata['pass'] = int(ipass_0[0])
        metadata['spatial_resolution'] = np.float32(7000)
        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['gcps'] = stfmt.format_gdalgcps(*gcps)
        band = []
        mask = np.ma.getmaskarray(ssha)
        if write_netcdf:
            vmin = np.nanmin(ssha)
            vmin_pal = vmin
            vmax = np.nanmax(ssha)
            vmax_pal = vmax
            #print('bla')
        scale = (vmax - vmin) / 254.
        offset = vmin
        mask = np.ma.getmaskarray(ssha)
        array = np.clip(np.round((ssha - offset) / scale), 0,
                        254).astype('uint8')
        array[mask] = 255
        array[mask_gap] = 255
        if write_netcdf is False:
            array = array[:, np.newaxis]
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmax=vmax,
                                             vmax_pal=vmax_pal,
                                             vmin=vmin,
                                             vmin_pal=vmin_pal)
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': L3_MAPS[L3id]['parameter'],
            'name': L3_MAPS[L3id]['hname'],
            'unittype': 'm',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax],
            'colortable': colortable
        })
        if write_netcdf is False:
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        else:
            geolocation['geotransform'] = [lon, lat]
            geolocation['time'] = time_num[:]
            netcdffile = stfmt.format_ncfilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            write_netcdf_1d(netcdffile,
                            metadata,
                            geolocation,
                            band,
                            model='along_track',
                            dgcps=1)
def sentinel2_rgb(
        infile,
        outdir,
        # For output resolution
        overview_index=None,
        downsampling=2,
        # For manual contrast
        vmin=[None, None, None],
        vmax=[None, None, None],
        # For auto contrast
        contrast_overview_index=2,
        landmaskpath=None,
        slope_threshold=-40.,
        debug_fig_dir=None,
        atmos_correction=0,
        atmos_lut_path=None,
        vmax_factor=None,
        # For output type
        write_netcdf=False):
    """
    """
    # Identify stitching groups
    print 'Identify stitching group(s)'
    groups = safemsil1c_stitching_groups(infile)
    projs = groups.keys()
    for proj, urls in groups.iteritems():
        print '    {} : {} granule(s)'.format(proj, len(urls))
    datagroup = make_datagroup(groups)

    # Set contrast
    if None in vmin or None in vmax:
        print 'Set contrast'
        vmin, vmax = set_contrast(list(vmin),
                                  list(vmax),
                                  groups,
                                  overview_index=contrast_overview_index,
                                  landmaskpath=landmaskpath,
                                  slope_threshold=slope_threshold,
                                  debug_fig_dir=debug_fig_dir,
                                  atmos_correction=atmos_correction,
                                  atmos_lut_path=atmos_lut_path)
    print 'vmin = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmin)
    print 'vmax = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmax)
    if vmax_factor is not None:
        print 'Apply vmax_factor={}'.format(vmax_factor)
        _vmax = []
        for vmi, vma in zip(vmin, vmax):
            _vmax.append(vmi + vmax_factor * (vma - vmi))
        vmax = _vmax
        print 'new vmax = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmax)

    # Build geotiff or netcdf
    print 'Build geotiff or netcdf'
    bandnames = ['B04', 'B03', 'B02']
    for proj in projs:
        # Open stitched mapper
        print '    {} : open mapper with overview {}'.format(
            proj, overview_index)
        t0 = datetime.utcnow()
        mapper = SAFEMSIL1CStitchedFile(groups[proj],
                                        native_resolution='10m',
                                        overview_index=overview_index,
                                        tight=True)
        mapper.open()
        print '        {}'.format(datetime.utcnow() - t0)

        # Construct bands
        bands = []
        qvalue = mapper.read_global_attribute('quantification_value')
        for iband, bandname in enumerate(bandnames):
            fieldname = '{}_digital_number'.format(bandname)
            print '    {} : read {}'.format(proj, fieldname)
            t0 = datetime.utcnow()
            band = mapper.read_values(fieldname)
            print '        {}'.format(datetime.utcnow() - t0)
            if downsampling != 1:
                print '    {} : downsample by {}'.format(proj, downsampling)
                t0 = datetime.utcnow()
                shp = list(band.shape)
                shp[0] -= np.mod(shp[0], downsampling)
                shp[1] -= np.mod(shp[1], downsampling)
                sli = [slice(0, shp[0]), slice(0, shp[1])]
                rshp = (shp[0] / downsampling, downsampling,
                        shp[1] / downsampling, downsampling)
                if not np.ma.is_masked(band):
                    mask = np.ma.nomask
                else:
                    mask = band[sli].mask.reshape(rshp).\
                           sum(axis=3, dtype='uint8').\
                           sum(axis=1, dtype='uint8') > 0
                band = np.ma.MaskedArray(band[sli].data.reshape(rshp).\
                                         mean(axis=3, dtype='uint16').\
                                         mean(axis=1, dtype='uint16'),
                                         mask=mask)
                del mask
                print '        {}'.format(datetime.utcnow() - t0)
            print '    {} : bytescale in [{}, {}]'.format(
                proj, vmin[iband], vmax[iband])
            t0 = datetime.utcnow()
            vmin_dn = np.round(vmin[iband] * qvalue)
            vmax_dn = np.round(vmax[iband] * qvalue)
            byte = bytescale(band.data,
                             cmin=vmin_dn,
                             cmax=vmax_dn,
                             low=0,
                             high=254)
            if band.mask is not np.ma.nomask:
                byte[band.mask] = 255
            del band
            scale = (vmax[iband] - vmin[iband]) / 254.
            offset = vmin[iband]
            description = '{} TOA reflectance'.format(bandname)
            bands.append({
                'array': byte,
                'scale': scale,
                'offset': offset,
                'description': description,
                'unittype': '',
                'nodatavalue': 255,
                'parameter_range': [vmin[iband], vmax[iband]]
            })
            print '        {}'.format(datetime.utcnow() - t0)

        # Make sure nodata are at the same locations in all bands
        mask = np.any([band['array'] == 255 for band in bands], axis=0)
        for band in bands:
            band['array'][mask] = 255

        # Construct metadata and geolocation
        print '    {} : construct metadata and geolocation'.format(proj)
        t0 = datetime.utcnow()
        cs_code = mapper.read_global_attribute('horizontal_cs_code')
        epsg_num = cs_code.lower().lstrip('epsg:')
        dataname = '{}-{}'.format(datagroup, epsg_num)
        start_time = mapper.get_start_time()
        end_time = mapper.get_end_time()
        (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                          end_time,
                                                          units='ms')
        sensor_pass = mapper.read_global_attribute(
            'sensing_orbit_direction').lower()
        metadata = {}
        metadata['product_name'] = 'Sentinel-2_RGB'
        metadata['name'] = dataname
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        metadata['source_URI'] = infile
        metadata['source_provider'] = 'ESA'
        metadata['processing_center'] = 'OceanDataLab'
        metadata['conversion_software'] = 'Syntool'
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
        metadata['parameter'] = [
            'B04 TOA reflectance', 'B03 TOA reflectance', 'B02 TOA reflectance'
        ]
        metadata['type'] = 'remote sensing'
        metadata['sensor_type'] = 'multi-spectral imager'
        metadata['sensor_name'] = 'MSI'
        metadata['sensor_platform'] = 'Sentinel-2'
        metadata['sensor_pass'] = sensor_pass
        metadata['datagroup'] = datagroup
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(int(epsg_num))
        ulx = mapper.read_global_attribute('ulx')
        dx = mapper.read_global_attribute('xdim') * downsampling
        uly = mapper.read_global_attribute('uly')
        dy = mapper.read_global_attribute('ydim') * downsampling
        geolocation = {}
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [ulx, dx, 0, uly, 0, dy]
        print '        {}'.format(datetime.utcnow() - t0)

        # Write geotiff or netcdf
        mapper.close()
        if write_netcdf == False:
            print '    {} : write geotiff'.format(proj)
            t0 = datetime.utcnow()
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile, metadata, geolocation, bands)
            print '        {}'.format(datetime.utcnow() - t0)
        elif write_netcdf == True:
            print '    {} : write geotiff'.format(proj)
            t0 = datetime.utcnow()
            ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
            bands[0]['name'] = 'B04_TOA_reflectance'
            bands[1]['name'] = 'B03_TOA_reflectance'
            bands[2]['name'] = 'B02_TOA_reflectance'
            resolution = min([abs(dy), abs(dx)])
            metadata['spatial_resolution'] = resolution
            dgcps_meter = 25000.
            dgcps = (np.round(dgcps_meter / resolution).astype('int'), ) * 2
            stfmt.write_netcdf(ncfile,
                               metadata,
                               geolocation,
                               bands,
                               'grid_proj',
                               dgcps=dgcps)
            print '        {}'.format(datetime.utcnow() - t0)
Beispiel #22
0
def sar_wave(infile, outdir, pngkml=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sarwave = SAFEOCNNCFile(infile, product='WAVE')
    mission = sarwave.read_global_attribute('missionName')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    else:
        raise Exception('S1A mission expected.')
    # start_time = sarwave.get_start_time() # WARNING : whole SAFE for imagettes !
    # stop_time = sarwave.get_end_time() # WARNING : whole SAFE for imagettes !
    start_t = sarwave.read_global_attribute('firstMeasurementTime')
    if '.' in start_t:
        start_time = datetime.strptime(start_t, '%Y-%m-%dT%H:%M:%S.%fZ')
    else:
        start_time = datetime.strptime(start_t, '%Y-%m-%dT%H:%M:%SZ')
    stop_t = sarwave.read_global_attribute('lastMeasurementTime')
    if '.' in stop_t:
        stop_time = datetime.strptime(stop_t, '%Y-%m-%dT%H:%M:%S.%fZ')
    else:
        stop_time = datetime.strptime(stop_t, '%Y-%m-%dT%H:%M:%SZ')
    heading = sarwave.read_values('oswHeading')
    if np.sin((90 - heading[0, 0]) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    sensor_mode = safe_name.split('_')[1]
    if sensor_mode not in ['WV', 'S1', 'S2', 'S3', 'S4', 'S5', 'S6']:
        raise Exception('WV/S[1-6] modes expected.')
    sensor_swath = os.path.basename(infile).split('-')[1].upper()
    sensor_polarisation = sarwave.read_global_attribute('polarisation')
    datagroup = safe_name.replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid

    # Make wave spectrum figure
    spec = sarwave.read_values('oswPolSpec')
    k = sarwave.read_values('oswK')
    phi = sarwave.read_values('oswPhi')
    npartitions = sarwave.get_dimsize('oswPartitions')
    partitions = sarwave.read_values('oswPartitions')
    # TMP Bug : there are now 3 partitions, numbered 0, 1 and 3 ...
    if npartitions == 3:
        indp2 = np.where(partitions == 2)
        indp3 = np.where(partitions == 3)
        if indp2[0].size == 0 and indp3[0].size != 0:
            partitions[indp3] = 2
    # /TMP
    hs = sarwave.read_values('oswHs')
    flag = sarwave.read_values('oswLandFlag')
    if sensor_mode == 'WV':
        imnum = int(
            os.path.splitext(os.path.basename(infile))[0].split('-')[-1])
    else:
        imnum = None
    spec_size = (512, 512)
    fontsize = 'small'
    cmap = getColorMap(rgbFile='wind.pal')
    fig = make_spec_fig(spec,
                        k,
                        phi,
                        heading,
                        npartitions,
                        partitions,
                        hs,
                        flag,
                        imnum=imnum,
                        spec_size=spec_size,
                        fontsize=fontsize,
                        cmap=cmap)
    rgb = fig2rgb(fig)
    plt.close(fig)

    # Make geolocation
    if sensor_mode == 'WV':
        lon = sarwave.read_values('lon')[0, 0]
        lat = sarwave.read_values('lat')[0, 0]
        grdrasize = sarwave.read_values('oswGroundRngSize')[0, 0]
        grdazsize = sarwave.read_values('oswAziSize')[0, 0]
        geod = pyproj.Geod(ellps='WGS84')
        lons = np.repeat(lon, 2)
        lats = np.repeat(lat, 2)
        az = heading[0, 0] + [0., 180.]
        dist = np.repeat(grdazsize / 2., 2)
        lonsmid, latsmid, dummy = geod.fwd(lons, lats, az, dist)
        lons = np.repeat(lonsmid, 2)
        lats = np.repeat(latsmid, 2)
        az = heading[0, 0] + [-90, 90., 90., -90.]
        dist = np.repeat(grdrasize / 2., 4)
        gcplon, gcplat, dummy = geod.fwd(lons, lats, az, dist)
        gcppix = np.array([0, spec_size[0], spec_size[0], 0])
        gcplin = np.array([0, 0, spec_size[1], spec_size[1]])
        if np.sin((90 - heading[0, 0]) * np.pi / 180) < 0:
            # descending pass
            gcppix = spec_size[0] - gcppix
            gcplin = spec_size[1] - gcplin
        gcphei = np.zeros(gcplin.size)
    else:
        gcplon = sarwave.read_values('lon')
        gcplat = sarwave.read_values('lat')
        gcphei = np.zeros(gcplon.shape)
        nra = sarwave.get_dimsize('oswRaSize')
        pix = np.arange(nra) * spec_size[0] + spec_size[0] / 2.
        naz = sarwave.get_dimsize('oswAzSize')
        lin = np.arange(naz - 1, -1, -1) * spec_size[1] + spec_size[1] / 2.
        gcppix, gcplin = np.meshgrid(pix, lin)
        if np.sin((90 - heading[0, 0]) * np.pi / 180) < 0:
            # descending pass
            gcppix = nra * spec_size[0] - gcppix
            gcplin = naz * spec_size[1] - gcplin
    if gcplon.min() < -135 and gcplon.max() > 135:
        gcplon[np.where(gcplon < 0)] += 360.

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_wave'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = ''  #'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ''
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    band.append({'array': rgb[:, :, 0]})
    band.append({'array': rgb[:, :, 1]})
    band.append({'array': rgb[:, :, 2]})

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    # Write projected png/kml
    if pngkml == True:
        print 'Write projected png/kml'
        stfmt.write_pngkml_proj(tifffile)
Beispiel #23
0
def viirs_chlora(infileid,
                 outdir,
                 download_dir='/tmp',
                 vmin=None,
                 vmax=None,
                 contrast='relative',
                 ngcps=(26, 32),
                 open_iterations=1,
                 nprocs=1,
                 pngkml=False,
                 write_netcdf=False):
    """
    """
    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48], [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88], [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Search/Download data
    print 'Search/Download data'
    if re.match(r'^V[0-9]{13}$', infileid) is None:
        raise Exception('Input for viirs_chlora is an ID '
                        '(e.g. V2014093110000)')
    product_id = 'VIIRSL2OC'
    date = datetime.strptime(infileid[1:], '%Y%j%H%M%S')
    viirsocfname = viirs.search_and_download(product_id, date, download_dir)

    # Read/Process data
    print 'Read/Process data'
    # Read from OC file
    viirsocfile = viirs.VIIRSL2File(viirsocfname)
    lon = viirsocfile.read_lon()
    lat = viirsocfile.read_lat()
    chlora = viirsocfile.read_chlora()
    attrs = viirsocfile.read_attributes()
    viirsocfile.close()
    if listbox is not None:
        mask_box = np.zeros(np.shape(chlora.data))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                                & (lon <= listbox[i][2])
                                & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = (mask_box == 0) | ma.getmaskarray(chlora)
    else:
        mask = ma.getmaskarray(chlora)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 16
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1], gcplon[mid, 1:],
                      gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 750.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon,
                                             gcplat,
                                             gcppixel,
                                             gcpline,
                                             rspxsize,
                                             rspysize,
                                             1,
                                             lon,
                                             lat,
                                             nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    chlora.data[mask] = np.nan
    rspchlora = resample.resample_bowtie_linear(pix,
                                                lin,
                                                chlora.data,
                                                scansize,
                                                rspxsize,
                                                rspysize,
                                                show=False)
    rspmask = ma.getmaskarray(rspchlora)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Take log and open mask
    finalchlora = ma.log(rspchlora)
    finalmask = ~binary_opening(
        ~rspmask, structure=np.ones((3, 3)), iterations=open_iterations)
    finalchlora.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalchlora.compressed(), 0.5)
        elif contrast == 'agulhas':
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            vmin = -0.5 * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) - 3.
        elif contrast == 'med' or contrast == 'nwe' or contrast == 'cwe':
            vmin = np.percentile(finalchlora.compressed(), 2)
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    else:
        if vmin != 0:
            vmin = math.log(vmin)
        else:
            vmin = np.percentile(finalchlora.compressed(), 0.5)
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalchlora.compressed(), 99.5)
        elif contrast == 'agulhas':
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            vmax = 0.5 * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 3.
        elif contrast == 'med':
            vmax = np.percentile(finalchlora.compressed(), 98)
        elif contrast == 'nwe':
            vmax = np.percentile(finalchlora.compressed(), 98)
        elif contrast == 'cwe':
            vmax = np.percentile(finalchlora.compressed(), 98)
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    else:
        if vmax != 0:
            vmax = math.log(vmax)
        else:
            vmax = np.percentile(finalchlora.compressed(), 98)

    # Flip (geotiff in "swath sense")
    finalchlora = finalchlora[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(attrs['start_time'],
                                                      attrs['stop_time'],
                                                      units='ms')
    metadata['product_name'] = 'Chlorophyll_a_concentration_VIIRS'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(viirsocfname))[0]
    else:
        metadata['name'] = '{}_{}'.format(
            os.path.splitext(os.path.basename(viirsocfname))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = viirsocfname
    metadata['source_provider'] = 'NOAA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'chlorophyll a concentration'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'VIIRS'
    metadata['sensor_platform'] = 'Suomi-NPP'
    metadata['sensor_pass'] = attrs['pass']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalchlora) == True)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(finalchlora.data, vmin, vmax, out=finalchlora.data)
    array = np.round((finalchlora.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('chla_jet',
                                         vmax=vmax,
                                         vmax_pal=vmax,
                                         vmin=vmin,
                                         vmin_pal=vmin)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'chlorophyll a concentration',
        'unittype': 'log(mg/m3)',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    if write_netcdf == False:
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'chlor_a'
        band[0]['long_name'] = 'Chlorophyll Concentration, OCI Algorithm'
        band[0][
            'standard_name'] = 'mass_concentration_chlorophyll_concentration_in_sea_water'
        band[0]['unittype'] = 'mg m^-3 (log)'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 749.810419844 749.810438261 749.810429577
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 737.874499629 739.856423757 738.87317625
        metadata['spatial_resolution'] = 750.
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'swath',
                           ngcps=gcplon.shape)
Beispiel #24
0
def sar_roughness(infile,
                  outdir,
                  pngkml=False,
                  contrast=None,
                  vmin=None,
                  vmax=None,
                  landmaskpath=None,
                  write_netcdf=False,
                  gcp2height=0):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sarmp = SAFEGeoTiffFile(infile)
    sarim = SARImage(sarmp)
    mission = sarim.get_info('mission')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    elif mission == 'S1B':
        sensor_name = 'Sentinel-1B'
        sensor_platform = 'Sentinel-1B'
        source_provider = 'ESA'
    else:
        raise Exception('Unknown mission')
    timefmt = '%Y-%m-%dT%H:%M:%S.%f'
    start_time = datetime.strptime(sarim.get_info('start_time'), timefmt)
    stop_time = datetime.strptime(sarim.get_info('stop_time'), timefmt)
    sensor_pass = sarim.get_info('pass')
    sensor_mode = sarim.get_info('mode')
    sensor_swath = sarim.get_info('swath')
    sensor_polarisation = sarim.get_info('polarisation')
    product = sarim.get_info('product')
    if product == 'GRD':
        spacing = [2, 2]
    elif product == 'SLC':
        if sensor_mode == 'WV':
            mspacing = (15, 15)
        elif re.match(r'^S[1-6]$', sensor_mode) != None:
            mspacing = (15, 15)
        elif sensor_mode == 'IW':
            raise Exception('sar_roughness for IW SLC ?')
        elif sensor_mode == 'EW':
            raise Exception('sar_roughness for EW SLC ?')
        else:
            raise Exception('Unkown S1 mode : {}'.format(sensor_mode))
        spacing = np.round(sarim.meters2pixels(mspacing))
    else:
        raise Exception('Unkown S1 product : {}'.format(product))
    mspacing = sarim.pixels2meters(spacing)
    datagroup = sarim.get_info('safe_name').replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    ssr = np.sqrt(sarim.get_data('roughness', spacing=spacing))
    ########## TMP calibration constant ##########
    # if sensor_mode == 'WV':
    #     caldir = '/home/cercache/project/mpc-sentinel1/analysis/s1_data_analysis/L1/WV/S1A_WV_SLC__1S/cal_cste'
    #     if sensor_polarisation == 'HH':
    #         if sensor_swath == 'WV1':
    #             caltmp = (55.80+56.91)/2.
    #             calname = 'cal_cste_hh_wv1.pkl'
    #         elif sensor_swath == 'WV2':
    #             caltmp = (40.65+40.32)/2.
    #             calname = 'cal_cste_hh_wv2.pkl'
    #     elif sensor_polarisation == 'VV':
    #         if sensor_swath == 'WV1':
    #             caltmp = 58.24
    #             calname = 'cal_cste_vv_wv1.pkl'
    #         elif sensor_swath == 'WV2':
    #             caltmp = 49.02
    #             calname = 'cal_cste_vv_wv2.pkl'
    #     calpath = os.path.join(caldir, calname)
    #     if os.path.exists(calpath) == True:
    #         caltmp = get_caltmp(calpath, start_time)
    # elif re.match(r'^S[1-6]$', sensor_mode) != None:
    #     if start_time < datetime(2014, 7, 16, 0, 0, 0):
    #         if sensor_mode == 'S6':
    #             raise Exception('S6 calibration missing')
    #         sm2cal = {'S1':58., 'S2':56., 'S3':52., 'S4':52., 'S5':49.}
    #     else:
    #         # from commissioning phase report
    #         sm2cal = {'S1':3., 'S2':5., 'S3':-1.5, 'S4':4., 'S5':1., 'S6':4.75}
    #     caltmp = sm2cal[sensor_mode]
    # elif sensor_mode == 'IW':
    #     if start_time < datetime(2014, 7, 16, 0, 0, 0):
    #         caltmp = 109.
    #     else:
    #         caltmp = 3. # from commissioning phase report
    # elif sensor_mode == 'EW':
    #     if start_time < datetime(2014, 7, 16, 0, 0, 0):
    #         caltmp = 94.
    #     else:
    #         caltmp = -1. # <- -2. # from commissioning phase report
    # else:
    #     raise Exception('Which tmp calibration constant for this mode ?')
    # print '--> caltmp=%f' % caltmp
    # ssr *= np.sqrt(10 ** (caltmp / 10.))
    ########## /TMP calibration constant ##########
    dim = ssr.shape
    # Set contrast
    if vmin == None or vmax == None:
        if contrast == None:
            if sensor_mode == 'WV':
                contrast = 'relative'
            else:
                contrast = 'sea'
        if contrast == 'relative':
            if sensor_mode == 'WV':
                noborder = [
                    slice(int(dim[0] * .05), int(dim[0] * .95)),
                    slice(int(dim[1] * .05), int(dim[1] * .95))
                ]
            else:
                noborder = [
                    slice(int(dim[0] * .05), int(dim[0] * .95)),
                    slice(int(dim[1] * .1), int(dim[1] * .9))
                ]
            values = ssr[noborder]
            if landmaskpath != None and os.path.exists(landmaskpath):
                lmspacing = np.round(sarim.meters2pixels(111.32 / 120 * 1000))
                lmspacing -= np.mod(lmspacing, spacing)
                lon = sarim.get_data('lon', spacing=lmspacing)
                lat = sarim.get_data('lat', spacing=lmspacing)
                lmdim = (lon.shape[0] + 1, lon.shape[1] + 1)
                landmask = np.ones(lmdim, dtype=bool)
                landmask[:-1, :-1] = get_landmask(lon, lat, landmaskpath)
                lmfac = lmspacing / spacing
                landmask = np.repeat(landmask, lmfac[0], axis=0)
                landmask = np.repeat(landmask, lmfac[1], axis=1)
                seaindex = np.where(landmask[noborder] == False)
                if seaindex[0].size >= ssr.size * 0.01:
                    values = values[seaindex]
            if vmin == None:
                vmin = scoreatpercentile(values, 0.1)
            if vmax == None:
                vmax = scoreatpercentile(values, 99.9)
        elif contrast == 'sea':
            if sensor_polarisation in ['HH', 'VV']:
                if vmin == None:
                    vmin = 0.
                if vmax == None:
                    vmax = 2.
            else:
                if vmin == None:
                    vmin = 1.
                if vmax == None:
                    vmax = 3.
        elif contrast == 'ice':
            if sensor_polarisation in ['HH', 'VV']:
                if vmin == None:
                    vmin = 0.
                if vmax == None:
                    vmax = 3.5
            else:
                if vmin == None:
                    vmin = 1.
                if vmax == None:
                    vmax = 5.
        else:
            raise Exception('Unknown contrast name.')
    print '--> vmin=%f vmax=%f' % (vmin, vmax)
    ssr = ssr[::-1, :]  # keep SAR orientation for geotiff
    geoloc = sarim.get_info('geolocation_grid')
    gcplin = (dim[0] * spacing[0] - 1 - geoloc['line'] + 0.5) / spacing[0]
    gcppix = (geoloc['pixel'] + 0.5) / spacing[1]
    gcplon = geoloc['longitude']
    gcplat = geoloc['latitude']
    gcphei = geoloc['height']
    if gcp2height is not None:
        geod = pyproj.Geod(ellps='WGS84')
        gcpforw, gcpback, _ = geod.inv(gcplon[:, :-1], gcplat[:, :-1],
                                       gcplon[:, 1:], gcplat[:, 1:])
        gcpforw = np.hstack((gcpforw, gcpforw[:, [-1]]))
        gcpback = np.hstack((gcpback[:, [0]], gcpback))
        gcpinc = geoloc['incidence_angle']
        mvdist = (gcp2height - gcphei) / np.tan(np.deg2rad(gcpinc))
        mvforw = gcpforw
        indneg = np.where(mvdist < 0)
        mvdist[indneg] = -mvdist[indneg]
        mvforw[indneg] = gcpback[indneg]
        _gcplon, _gcplat, _ = geod.fwd(gcplon, gcplat, mvforw, mvdist)
        gcplon = _gcplon
        gcplat = _gcplat
        gcphei.fill(gcp2height)
    if gcplon.min() < -135 and gcplon.max() > 135:
        gcplon[np.where(gcplon < 0)] += 360.
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    if sensor_polarisation in ['HH', 'VV']:
        metadata['product_name'] = 'SAR_roughness'
    else:
        metadata['product_name'] = 'SAR_roughness_crosspol'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface roughness'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = sarim._mapper._handler.GetGCPProjection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    scale = (vmax - vmin) / 254.
    offset = vmin
    indzero = np.where(ssr == 0)
    array = np.clip(np.round((ssr - offset) / scale), 0, 254).astype('uint8')
    array[indzero] = 255
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface roughness',
        'unittype': '',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax]
    })
    # Write
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'sea_surface_roughness'
        band[0]['long_name'] = 'sea surface roughness'
        band[0]['unittype'] = '1'
        metadata['spatial_resolution'] = mspacing.min()
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'swath',
                           ngcps=gcplon.shape)
def sar_doppler(infile, outdir):
    """
    """
    # tmp
    #infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110518_210602_000002143102_00330_48189_1274/SAR_doppler.nc'
    # infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110824_211403_000002143106_00014_49597_2093/SAR_doppler.nc'
    # outdir = '/local/home/data/syntool_inputs'
    # /tmp
    # Read/Process data
    print 'Read/Process data'
    sardop = NCFile(infile)
    product_ref = sardop.read_global_attribute('SOURCE_PRODUCT_REF')
    start_time = sardop.read_global_attribute('SOURCE_START_DATE')
    start_time = datetime.strptime(start_time, '%Y%m%d%H%M%S.%f')
    duration = sardop.read_global_attribute('SOURCE_ACQ_DURATION')
    stop_time = start_time + timedelta(seconds=duration)
    polarisation = sardop.read_global_attribute('SOURCE_POLARIZATION')
    lon = sardop.read_values('longitude')[::-1, :]
    lat = sardop.read_values('latitude')[::-1, :]
    #dopano = sardop.read_values('dopanomaly')[::-1, :]
    radvel = sardop.read_values('radial_vel')[::-1, :]
    validity = sardop.read_values('validity')[::-1, :]
    track_angle = sardop.read_global_attribute('SOURCE_TRACK_ANGLE')
    if track_angle < 0:
        radvel *= -1
    shp = lon.shape
    nlines = np.ceil(shp[0] / 4.) + 1
    lines = np.round(np.linspace(0, shp[0] - 1, num=nlines)).astype('int32')
    npixels = np.ceil(shp[1] / 4) + 1
    pixels = np.round(np.linspace(0, shp[1] - 1, num=npixels)).astype('int32')
    gcplin = np.tile(lines.reshape(nlines, 1), (1, npixels))
    gcppix = np.tile(pixels.reshape(1, npixels), (nlines, 1))
    gcplon = lon[gcplin, gcppix]
    gcplat = lat[gcplin, gcppix]
    gcphei = np.zeros((nlines, npixels))
    gcppix = gcppix + 0.5
    gcplin = gcplin + 0.5
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_doppler'
    metadata['name'] = product_ref
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'CLS'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'radial horizontal velocities'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = 'ASAR'
    metadata['sensor_platform'] = 'ENVISAT'
    metadata['sensor_mode'] = 'WSM'
    #metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = polarisation
    #metadata['sensor_pass'] = sensor_pass
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection(geogcs='WGS84')
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    # band = []
    # scale = (vmax-vmin)/254.
    # offset = vmin
    # indzero = np.where(validity == 0)
    # array = np.clip(np.round((radvel-offset)/scale), 0, 254).astype('uint8')
    # array[indzero] = 255
    # band.append({'array':array, 'scale':scale, 'offset':offset,
    #              'description':'radial horizontal velocities', 'unittype':'m/s',
    #              'nodatavalue':255, 'parameter_range':[vmin, vmax]})
    band = []
    cmap = doppler_colormap()
    norm = Normalize(vmin=-2.5, vmax=2.5)
    rgb = cmap(norm(radvel))
    indnodata = np.where(validity == 0)
    for ich in range(3):
        channel = np.round(rgb[:, :, ich] * 255).astype('uint8')
        channel[indnodata] = 0
        band.append({'array': channel, 'nodatavalue': 0})
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def ghrsst_seviri(infile,
                  outdir,
                  vmin=271.05,
                  vmax=309.15,
                  vmin_pal=273.,
                  vmax_pal=305.,
                  write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    seviri = GHRSSTNCFile(infile)
    sst = seviri.read_values('sea_surface_temperature')[::-1, :]
    mask = seviri.read_values('quality_level')[::-1, :]
    #sea_ice_fraction = seviri.read_values('sea_ice_fraction')[::-1, :]
    # lon = seviri.read_values('lon')
    # dlon = lon[1] - lon[0]
    # lon0 = lon[0] - dlon / 2
    # lat = seviri.read_values('lat')[::-1]
    # dlat = lat[1] - lat[0]
    # lat0 = lat[0] - dlat / 2
    lon0 = seviri.read_global_attribute('westernmost_longitude')
    dlon = float(seviri.read_global_attribute('geospatial_lon_resolution'))
    lat0 = seviri.read_global_attribute('northernmost_latitude')
    dlat = -float(seviri.read_global_attribute('geospatial_lat_resolution'))
    dtime = seviri.read_values('time')[0]
    dtime_units = seviri.read_field('time').units
    dtime = num2date(dtime, dtime_units)
    start_time = datetime.strptime(seviri.read_global_attribute('start_time'),
                                   '%Y%m%dT%H%M%SZ')
    stop_time = datetime.strptime(seviri.read_global_attribute('stop_time'),
                                  '%Y%m%dT%H%M%SZ')
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'SEVIRI_SST'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    band = []
    #indndv = np.where((sst.mask == True) | (sea_ice_fraction > 0))
    indndv = np.where((sst.mask == True) | (mask <= 3))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sst, vmin, vmax, out=sst)
    array = np.round((sst - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'sea_surface_temperature'
        band[0]['long_name'] = 'sea surface subskin temperature'
        band[0]['standard_name'] = 'sea_surface_subskin_temperature'
        metadata['spatial_resolution'] = min([abs(dlat), abs(dlon)]) * 111000.
        dgcps = np.round(1. / np.abs(np.array([dlat, dlon]))).astype('int')
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'grid_lonlat',
                           dgcps=dgcps)
def viirs_sst(infile, outdir, vmin=None, vmax=None, contrast='relative',
              ngcps=(41, 32), denoise_kernel='boxcar', denoise_width=27,
              open_iterations=1, nprocs=1,
              pngkml=False, write_netcdf=False, file_range=None):
    """
    """
    dic = None

    # Check file containing ranges
    if file_range is not None:
        if not os.path.isfile(file_range):
            raise Exception('file_range {} not found'.format(file_range))
        # Read a txt file which contains three columns: yearday,vmin,vmax
        with open(file_range, 'r') as f:
            dic = {}
            for line in f:
                (fdoy, fmin, fmax) = line.split(',')
                dic[int(fdoy)] = (float(fmin), float(fmax))

    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48],
                   [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88],
                   [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Read/Process data
    print 'Read/Process data'
    dataset = Dataset(infile)
    start_time = datetime.strptime(dataset.start_time, '%Y%m%dT%H%M%SZ')
    print start_time.day
    print start_time.month
    stop_time = datetime.strptime(dataset.stop_time, '%Y%m%dT%H%M%SZ')
    lon = dataset.variables['lon'][:, :]
    lat = dataset.variables['lat'][:, :]
    sst = np.ma.array(dataset.variables['sea_surface_temperature'][0, :, :])
    _bt11= dataset.variables['brightness_temperature_11um'][0, :, :]
    bt11 = np.ma.array(_bt11)
    quality_level = np.ma.array(dataset.variables['quality_level'][0, :, :])
    '''
    if file_shape is not None:
        with open(file_shape, 'r') as fshape:
            shape = shapely.wkt.load(fshape)
        box = shape.bounds
        index_in = np.where((lon >= box[0]) & (lat >= box[1])
                            & (lon <= box[2]) & (lat <= box[3]))
        index_out = np.where((lon < box[0]) | (lat < box[1])
                             | (lon > box[2]) | (lat > box[3]))
        sst[index_out] = np.nan
        print(np.shape(index_in))
        sys.exit(1)
        for i, j in zip(index_in[0], index_in[1]):
            p = Point(lon[i, j], lat[i, j])
            if p.within(shape) is False:
                sst[i, j] = np.nan
    '''
    if listbox is not None:
        mask_box = np.zeros(np.shape(sst))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                             & (lon <= listbox[i][2]) & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = ma.getmaskarray(sst) | ma.getmaskarray(bt11) | \
               (quality_level.data < 4) | (mask_box == 0)
    else:
        mask = ma.getmaskarray(sst) | ma.getmaskarray(bt11) | \
               (quality_level.data < 4)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 16
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1],
                      gcplon[mid, 1:], gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 750.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon, gcplat, gcppixel,
                                             gcpline, rspxsize, rspysize,
                                             1, lon, lat, nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    sst.data[mask] = np.nan
    bt11.data[mask] = np.nan
    val = np.dstack((sst.data, bt11.data))
    rspval = resample.resample_bowtie_linear(pix, lin, val, scansize,
                                             rspxsize, rspysize, show=False)
    rspsst = rspval[:, :, 0]
    rspbt11 = rspval[:, :, 1]
    rspmask = ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Denoise sst and open mask
    rspsst.mask = rspmask
    rspbt11.mask = rspmask
    finalsst = denoise_sst(rspsst, rspbt11, kernel=denoise_kernel,
                           width=denoise_width, show=False)
    finalmask = ~binary_opening(~rspmask, structure=np.ones((3, 3)),
                                iterations=open_iterations)
    finalsst.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalsst.compressed(), 0.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(start_time.timetuple().tm_yday)
        #    vmin = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. - 9.
        #    #par = [277.94999694824219, 42, 2.5500030517578125, -219]
        #    par = [278.09999084472656, 0.62831853071795862,
        #           2.4000091552734375, 0.1570796326794896]
        #    vmin = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific txt file is provided for the range
        elif dic is not None:
            dayofyear = float(start_time.timetuple().tm_yday)
            extrema = dic.get(dayofyear, dic[min(dic.keys(),
                           key=lambda k:abs(k - dayofyear))])
            vmin = extrema[0]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalsst.compressed(), 99.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(start_time.timetuple().tm_yday)
        #    vmax = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. + 4.
        #    #par = [300.59999084472656, 21, 2.8499908447265625, -191]
        #    par = [300.59999084472656, 0.29919930034188508,
        #           2.8499908447265625, 0.14959965017094254]
        #    vmax = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific text file is provided for the range
        elif dic is not None:
            dayofyear = float(start_time.timetuple().tm_yday)
            extrema = dic.get(dayofyear, dic[min(dic.keys(),
                           key=lambda k:abs(k - dayofyear))])
            vmax = extrema[1]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))

    # Flip (geotiff in "swath sense")
    finalsst = finalsst[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time, stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SST_VIIRS_denoised'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    else:
        metadata['name'] = '{}_{}'.format(os.path.splitext(os.path.basename(infile))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'NOAA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'VIIRS'
    metadata['sensor_platform'] = 'Suomi-NPP'
    #metadata['sensor_pass'] =
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalsst) == True)
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(finalsst.data, vmin, vmax, out=finalsst.data)
    array = np.round((finalsst.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax, vmax_pal=vmax,
                                         vmin=vmin, vmin_pal=vmin)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface temperature', 'unittype':'K',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    if write_netcdf == False:
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'denoised_sst'
        band[0]['long_name'] = 'denoised sea surface temperature'
        band[0]['standard_name'] = 'sea_surface_temperature'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 749.905437495 749.905892002 749.905827652
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 737.638084996 741.195663083 739.157662785
        metadata['spatial_resolution'] = 750.
        stfmt.write_netcdf(ncfile, metadata, geolocation, band, 'swath',
                           ngcps=gcplon.shape)
def aquarius_l3_sss(infile,
                    outdir,
                    vmin=31.825,
                    vmax=38.175,
                    vmin_pal=32,
                    vmax_pal=38):
    """
    """
    # Read/Process data
    logger.info('Read/Process data')
    dset = netCDF4.Dataset(infile, 'r')
    _time_start = datetime.datetime.strptime(dset.time_coverage_start,
                                             '%m-%d-%y')
    _time_stop = datetime.datetime.strptime(dset.time_coverage_end, '%m-%d-%y')
    time_start = _time_start + (_time_stop - _time_start) / 2
    time_stop = time_start + datetime.timedelta(days=1)

    lat = dset['lat'][::-1]
    lon = dset['lon'][:]
    sss = dset['sss_cap'][::-1, :]

    # Center on 0,0
    p_lon_idx = numpy.where(lon <= 180.)
    n_lon_idx = numpy.where(lon > 180.)
    p_lon = lon[p_lon_idx]
    n_lon = -360. + lon[n_lon_idx]
    lon[:len(n_lon)] = n_lon
    lon[len(n_lon):] = p_lon
    sss = numpy.roll(sss, len(n_lon), 1)

    # Construct metadata/geolocation/band(s)
    logger.info('Construct metadata/geolocation/band(s)')
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    now = datetime.datetime.utcnow()
    metadata = {}
    metadata['product_name'] = 'AQUARIUS_L3_SSS'
    if 'Sea_Surface_Salinity_Rain_Corrected' == dset['sss_cap'].long_name:
        metadata['product_name'] += '_RAIN_CORRECTED'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Jet Propulsion Laboratory'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    numpy.clip(sss.data, vmin, vmax, out=sss.data)
    array = numpy.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface salinity',
        'unittype': 'PSS',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    logger.info('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Beispiel #29
0
def modis_sst(infileid,
              outdir,
              download_dir='/tmp',
              vmin=None,
              vmax=None,
              contrast='relative',
              ngcps=(21, 25),
              resample_radius=5000.,
              resample_sigma=2500.,
              denoise_kernel='boxcar',
              denoise_width=20,
              open_iterations=1,
              nprocs=1,
              pngkml=False,
              write_netcdf=False,
              file_range=None):
    """
    """
    dic = None

    # Check file containing ranges
    if file_range is not None:
        if not os.path.isfile(file_range):
            raise Exception('file_range {} not found'.format(file_range))
        # Read a txt file which contains three columns: yearday,vmin,vmax
        with open(file_range, 'r') as f:
            dic = {}
            for line in f:
                (fdoy, fmin, fmax) = line.split(',')
                dic[int(fdoy)] = (float(fmin), float(fmax))

    # modissstfname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/A2011338122500.L2_LAC_SST'
    # modis02fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD021KM.A2011338.1225.005.2011339235825.hdf'
    # modis03fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD03.A2011338.1225.005.2011339233301.hdf'
    # modis35l2fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD35_L2.A2011338.1225.005.2011340001234.hdf'
    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48], [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88], [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Search/Download data
    print 'Search/Download data'
    if re.match(r'^[AT][0-9]{13}$', infileid) is None:
        raise Exception('Input for modis_sst is an ID '
                        '(e.g. A2011338122500 or T2014143234500)')
    platform = infileid[0]
    date = datetime.strptime(infileid[1:], '%Y%j%H%M%S')
    modissstid = {'A': 'MODISAL2SST', 'T': 'MODISTL2SST'}[platform]
    modissstfname = modis.search_and_download(modissstid, date, download_dir)
    modis02id = {'A': 'MYD021KM', 'T': 'MOD021KM'}[platform]
    modis02fname = modis.search_and_download(modis02id, date, download_dir)
    modis03id = {'A': 'MYD03', 'T': 'MOD03'}[platform]
    modis03fname = modis.search_and_download(modis03id, date, download_dir)
    modis35l2id = {'A': 'MYD35_L2', 'T': 'MOD35_L2'}[platform]
    modis35l2fname = modis.search_and_download(modis35l2id, date, download_dir)

    # Read/Process data
    print 'Read/Process data'
    # Read from SST file
    modissstfile = modis.MODISL2File(modissstfname)
    # lon = modissstfile.read_lon()
    # lat = modissstfile.read_lat()
    sst = modissstfile.read_sst() + 273.15
    attrs = modissstfile.read_attributes()
    modissstfile.close()
    # Read from radiances file
    modis02file = modis.MODIS02File(modis02fname)
    rad11 = modis02file.read_radiance(31)
    modis02file.close()
    bt11 = modis.modis_bright(rad11, 31, 1)
    # Read from geolocation file
    modis03file = modis.MODIS03File(modis03fname)
    lon = modis03file.read_lon()
    lat = modis03file.read_lat()
    modis03file.close()
    # Read from cloud mask file
    modis35l2file = modis.MODIS35L2File(modis35l2fname)
    cloudmask = modis35l2file.read_cloudmask(byte=0)
    modis35l2file.close()
    cloudy = (np.bitwise_and(cloudmask, 2) == 0) & \
             (np.bitwise_and(cloudmask, 4) == 0)
    land = np.bitwise_and(cloudmask, 128) == 128  # Desert or Land
    # land = (np.bitwise_and(cloudmask, 128) == 128) | \
    #        (np.bitwise_and(cloudmask, 64) == 64) # Desert or Land or Coastal
    if listbox is not None:
        mask_box = np.zeros(np.shape(sst))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                                & (lon <= listbox[i][2])
                                & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = cloudy | land | ma.getmaskarray(sst) | ma.getmaskarray(bt11) | (
            mask_box == 0)
    else:
        mask = cloudy | land | ma.getmaskarray(sst) | ma.getmaskarray(bt11)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 10
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    #gcps = resample.get_gcps_from_bowtie_old(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1], gcplon[mid, 1:],
                      gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 1000.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon,
                                             gcplat,
                                             gcppixel,
                                             gcpline,
                                             rspxsize,
                                             rspysize,
                                             1,
                                             lon,
                                             lat,
                                             nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    sst.data[mask] = np.nan
    bt11.data[mask] = np.nan
    val = np.dstack((sst.data, bt11.data))
    rspval = resample.resample_bowtie_linear(pix,
                                             lin,
                                             val,
                                             scansize,
                                             rspxsize,
                                             rspysize,
                                             show=False)
    rspsst = rspval[:, :, 0]
    rspbt11 = rspval[:, :, 1]
    rspmask = ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Resample with pyresample in lon/lat space
    # rsplin, rsppix = np.mgrid[0:rspysize, 0:rspxsize] + 0.5
    # rsplon, rsplat = resample.get_points_from_gcps(gcplon, gcplat, gcppixel,
    #                                                gcpline, rspxsize, rspysize,
    #                                                0, rsppix, rsplin, nprocs=nprocs)
    # # Test resample grid
    # import matplotlib.pyplot as plt
    # plt.plot(lon.flatten(), lat.flatten(), '+b')
    # plt.plot(rsplon.flatten(), rsplat.flatten(), '+g')
    # plt.plot(gcplon.flatten(), gcplat.flatten(), 'xr')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # # \Test resample grid
    # # Test radius / sigma
    # resample_radius = 5000.
    # resample_sigma = 2500.
    # sst.mask = False
    # #sst.mask = sst.mask | (sst.data < 273.15+5) | (sst.data > 273.15+30)
    # rspsst = resample.resample_gauss(lon, lat, sst, rsplon, rsplat,
    #                                  resample_radius, resample_sigma,
    #                                  nprocs=nprocs, show=True)
    # import pdb ; pdb.set_trace()
    # # \Test radius / sigma
    # valid = np.where(mask == False)
    # rspsst = resample.resample_gauss(lon[valid], lat[valid], sst[valid],
    #                                  rsplon, rsplat,
    #                                  resample_radius, resample_sigma,
    #                                  fill_value=None, nprocs=nprocs,
    #                                  show=False)
    # rspbt11 = resample.resample_gauss(lon[valid], lat[valid], bt11[valid],
    #                                   rsplon, rsplat,
    #                                   resample_radius, resample_sigma,
    #                                   fill_value=None, nprocs=nprocs,
    #                                   show=False)
    # rspmask = resample.resample_nearest(lon, lat, mask,
    #                                     rsplon, rsplat,
    #                                     resample_radius,
    #                                     fill_value=True, nprocs=nprocs,
    #                                     show=False)
    # rspmask = rspmask | ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)

    # Denoise sst and open mask
    rspsst.mask = rspmask
    rspbt11.mask = rspmask
    finalsst = denoise_sst(rspsst,
                           rspbt11,
                           kernel=denoise_kernel,
                           width=denoise_width,
                           show=False)
    #finalsst = rspsst
    finalmask = ~binary_opening(
        ~rspmask, structure=np.ones((3, 3)), iterations=open_iterations)
    #finalmask = rspmask
    finalsst.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalsst.compressed(), 0.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(attrs['start_time'].timetuple().tm_yday)
        #    vmin = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. - 9.
        #    #par = [277.94999694824219, 42, 2.5500030517578125, -219]
        #    par = [278.09999084472656, 0.62831853071795862,
        #           2.4000091552734375, 0.1570796326794896]
        #    vmin = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific txt file is provided for the range
        elif dic is not None:
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            # Read a txt file which contains three columns: yearday,vmin,vmax
            extrema = dic.get(
                dayofyear, dic[min(dic.keys(),
                                   key=lambda k: abs(k - dayofyear))])
            vmin = extrema[0]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalsst.compressed(), 99.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(attrs['start_time'].timetuple().tm_yday)
        #    vmax = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. + 4.
        #    #par = [300.59999084472656, 21, 2.8499908447265625, -191]
        #    par = [300.59999084472656, 0.29919930034188508,
        #           2.8499908447265625, 0.14959965017094254]
        #    vmax = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific text file is provided for the range
        elif dic is not None:
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            extrema = dic.get(
                dayofyear, dic[min(dic.keys(),
                                   key=lambda k: abs(k - dayofyear))])
            vmax = extrema[1]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))

    # Flip (geotiff in "swath sense")
    finalsst = finalsst[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(attrs['start_time'],
                                                      attrs['stop_time'],
                                                      units='ms')
    metadata['product_name'] = 'SST_MODIS_denoised'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(modissstfname))[0]
    else:
        metadata['name'] = '{}_{}'.format(
            os.path.splitext(os.path.basename(modissstfname))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = [
        modissstfname, modis02fname, modis03fname, modis35l2fname
    ]
    metadata['source_provider'] = 'NASA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'MODIS'
    metadata['sensor_platform'] = attrs['platform']
    metadata['sensor_pass'] = attrs['pass']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalsst) == True)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(finalsst.data, vmin, vmax, out=finalsst.data)
    array = np.round((finalsst.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax,
                                         vmin=vmin,
                                         vmin_pal=vmin)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'denoised_sst'
        band[0]['long_name'] = 'denoised sea surface temperature'
        band[0]['standard_name'] = 'sea_surface_temperature'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 999.763079208 999.763084628 999.763082543
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 1006.4149472 1008.60679776 1007.5888004
        metadata['spatial_resolution'] = 1000.
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'swath',
                           ngcps=gcplon.shape)
def sar_xspec(infile, outdir, pngkml=False, vmax_re=None, vmax_im=None,
              make_rgb=True, ncolors=74):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sarim = sarimage(infile)
    mission = sarim.get_info('mission')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    else:
        raise Exception('S1A mission expected.')
    product = sarim.get_info('product')
    if product != 'SLC':
        raise Exception('SLC expected.')
    timefmt = '%Y-%m-%dT%H:%M:%S.%f'
    start_time = datetime.strptime(sarim.get_info('start_time'), timefmt)
    stop_time = datetime.strptime(sarim.get_info('stop_time'), timefmt)
    sensor_pass = sarim.get_info('pass')
    sensor_mode = sarim.get_info('mode')
    sensor_swath = sarim.get_info('swath')
    sensor_polarisation = sarim.get_info('polarisation')
    datagroup = sarim.get_info('safe_name').replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    # Compute SAR Xspec and make figures
    if sensor_mode == 'WV':
        azi_periodo_size = 1024
        azi_dist, ran_dist = 20000., 20000. # ignored in WV case
        xspec_size = (512, 512)
        fontsize = 'small'
    elif re.match(r'^S[1-6]$', sensor_mode) != None:
        azi_periodo_size = 1024
        azi_dist, ran_dist = 10000., 10000.
        xspec_size = (512, 512) #(256, 256)
        fontsize = 'small' #'x-small'
    elif sensor_mode in ['IW', 'EW']:
        azi_periodo_size = 512
        azi_dist, ran_dist = 10000., 10000.
        xspec_size = (512, 512) #(256, 256)
        fontsize = 'small' #'x-small'
    else:
        raise Exception('Which settings for this mode ?')
    sarxspec = sarimage2sarxspec_loop(sarim, azi_dist=azi_dist, ran_dist=ran_dist,
                                      azi_periodo_size=azi_periodo_size)
    cmap_re, cmap_im = get_cmaps(ncolors=ncolors)
    fig_re = make_sarxspec_fig(sarxspec, part='real', tau=1, kmax=2*np.pi/75,
                               kmin=2*np.pi/400, xspec_size=xspec_size,
                               uniq_vmax=True, north_oriented=True,
                               klim=[2*np.pi/400, 2*np.pi/200, 2*np.pi/100],
                               north_arrow=False, index_pos=None, vmax_pos='tr',
                               nvar_pos=None, fontsize=fontsize,
                               vmax=vmax_re, cmap=cmap_re)
    if sensor_mode == 'WV':
        ax = fig_re.gca()
        imnum = sarim.get_info('image_number')
        imnumtxt = '#{:03d}'.format(imnum)
        ax.text(0.51, 0.99, imnumtxt, transform=ax.transAxes,
                ha='left', va='top', fontsize=fontsize)
    if make_rgb == True:
        rgb_re = fig2rgb(fig_re)
        #print nuniqcolors(rgb_re)
    else:
        img_re, pal_re = fig2imgpal(fig_re, cmap_re)
    plt.close(fig_re)
    fig_im = make_sarxspec_fig(sarxspec, part='imag', tau=1, kmax=2*np.pi/75,
                               kmin=2*np.pi/400, xspec_size=xspec_size,
                               uniq_vmax=True, north_oriented=True,
                               klim=[2*np.pi/400, 2*np.pi/200, 2*np.pi/100],
                               north_arrow=False, index_pos=None, vmax_pos='tr',
                               nvar_pos=None, fontsize=fontsize,
                               vmax=vmax_im, cmap=cmap_im)
    if sensor_mode == 'WV':
        ax = fig_im.gca()
        imnum = sarim.get_info('image_number')
        imnumtxt = '#{:03d}'.format(imnum)
        ax.text(0.51, 0.99, imnumtxt, transform=ax.transAxes,
                ha='left', va='top', fontsize=fontsize)
    if make_rgb == True:
        rgb_im = fig2rgb(fig_im)
        #print nuniqcolors(rgb_im)
    else:
        img_im, pal_im = fig2imgpal(fig_im, cmap_im)
    plt.close(fig_im)
    if make_rgb == True:
        nlin, npix = rgb_re.shape[0:2]
    else:
        nlin, npix = img_re.shape
    # Handle GCPS
    # geoloc = sarim.get_info('geolocation_grid')
    # pix = np.array([0, geoloc['npixels']-1, geoloc['npixels']-1, 0])
    # lin = np.array([0, 0, geoloc['nlines']-1, geoloc['nlines']-1])
    # gcplon = geoloc['longitude'][lin, pix]
    # gcplat = geoloc['latitude'][lin, pix]
    # gcphei = np.zeros(4)
    # gcppix = np.array([0, 512, 512, 0])
    # gcplin = np.array([0, 0, 512, 512])
    #############################################
    # geoloc = sarim.get_info('geolocation_grid')
    # gcplon = geoloc['longitude']
    # gcplat = geoloc['latitude']
    # gcphei = np.zeros(gcplon.shape)
    # geod = pyproj.Geod(ellps='WGS84')
    # nglin, ngpix = geoloc['nlines'], geoloc['npixels']
    # ra_geo_spacing = geod.inv(gcplon[nglin/2, 0:-1], gcplat[nglin/2, 0:-1],
    #                           gcplon[nglin/2, 1:], gcplat[nglin/2, 1:])[2]
    # ra_geo_dist = np.hstack((0., ra_geo_spacing.cumsum()))
    # ra_geo_ndist = ra_geo_dist/ra_geo_dist[-1]
    # gcppix = np.tile((ra_geo_ndist*npix).reshape((1, -1)), (nglin, 1))
    # az_geo_spacing = geod.inv(gcplon[0:-1, ngpix/2], gcplat[0:-1, ngpix/2],
    #                           gcplon[1:, ngpix/2], gcplat[1:, ngpix/2])[2]
    # az_geo_dist = np.hstack((0., az_geo_spacing.cumsum()))
    # az_geo_ndist = az_geo_dist/az_geo_dist[-1]
    # gcplin = np.tile((az_geo_ndist*nlin).reshape((-1, 1)), (1, ngpix))
    # import pdb ; pdb.set_trace()
    #############################################
    #import pdb ; pdb.set_trace()
    ext_min = sarxspec[0][0].get_info('extent')[0:2]
    ext_max = sarxspec[-1][-1].get_info('extent')[2:4]
    # geoloc = sarim.get_info('geolocation_grid')
    # nglin, ngpix = geoloc['nlines'], geoloc['npixels']
    nglin, ngpix = len(sarxspec)+1, len(sarxspec[0])+1
    pix = np.hstack((np.round(np.linspace(ext_min[1], ext_max[1], num=ngpix)),
                     np.ones(nglin)*ext_max[1],
                     np.round(np.linspace(ext_max[1], ext_min[1], num=ngpix)),
                     np.ones(nglin)*ext_min[1]))
    lin = np.hstack((np.ones(ngpix)*ext_min[0],
                     np.round(np.linspace(ext_min[0], ext_max[0], num=nglin)),
                     np.ones(ngpix)*ext_max[0],
                     np.round(np.linspace(ext_max[0], ext_min[0], num=nglin))))
    lon, lat = np.zeros(pix.size), np.zeros(pix.size)
    for ipt in range(pix.size):
        ext = [lin[ipt], pix[ipt], lin[ipt], pix[ipt]]
        lon[ipt] = sarim.get_data('lon', extent=ext, spacing=1)
        lat[ipt] = sarim.get_data('lat', extent=ext, spacing=1)
    ndist = np.zeros(pix.size)
    lim = [0, ngpix, ngpix+nglin, 2*ngpix+nglin, 2*ngpix+2*nglin]
    geod = pyproj.Geod(ellps='WGS84')
    for iside in range(4):
        pt0, pt1 = lim[iside], lim[iside+1]-1
        ddist = geod.inv(lon[pt0:pt1], lat[pt0:pt1], lon[pt0+1:pt1+1],
                         lat[pt0+1:pt1+1])[2]
        dist = ddist.cumsum()
        ndist[pt0:pt1+1] = np.hstack((0., dist))/dist.max()
    gcppix = np.hstack((ndist[lim[0]:lim[1]-1]*npix, np.ones(nglin-1)*npix,
                        (1-ndist[lim[2]:lim[3]-1])*npix, np.zeros(nglin-1)))
    gcplin = np.hstack((np.zeros(ngpix-1), ndist[lim[1]:lim[2]-1]*nlin,
                        np.ones(ngpix-1)*nlin, (1-ndist[lim[3]:lim[4]-1])*nlin))
    gcplon = np.hstack((lon[lim[0]:lim[1]-1], lon[lim[1]:lim[2]-1],
                        lon[lim[2]:lim[3]-1], lon[lim[3]:lim[4]-1]))
    gcplat = np.hstack((lat[lim[0]:lim[1]-1], lat[lim[1]:lim[2]-1],
                        lat[lim[2]:lim[3]-1], lat[lim[3]:lim[4]-1]))
    gcphei = np.zeros(gcplon.size)
    #import pdb ; pdb.set_trace()
    if gcplon.min() < -135 and gcplon.max() > 135:
        gcplon[np.where(gcplon < 0)] += 360.
    #############################################
    if sensor_pass == 'Descending':
        gcppix = npix-gcppix
        gcplin = nlin-gcplin
    gcplin = nlin-gcplin # because fig will be read and wrote from top to bottom
    # Loop on part and write
    for part in ['real', 'imag']:
        print part
        if part == 'real':
            product = 'SAR_cross-spectrum_real'
            nameext = '-xspec_re'
            if make_rgb == True:
                rgb = rgb_re
            else:
                img = img_re
                pal = pal_re
        elif part == 'imag':
            product = 'SAR_cross-spectrum_imaginary'
            nameext = '-xspec_im'
            if make_rgb == True:
                rgb = rgb_im
            else:
                img = img_im
                pal = pal_im
        # Construct metadata/geolocation/band(s)
        print 'Construct metadata/geolocation/band(s)'
        metadata = {}
        (dtime, time_range) = stfmt.format_time_and_range(start_time, stop_time,
                                                          units='ms')
        metadata['product_name'] = product
        metadata['name'] = dataname + nameext
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        metadata['source_URI'] = infile
        metadata['source_provider'] = source_provider
        metadata['processing_center'] = 'OceanDataLab'
        metadata['conversion_software'] = 'Syntool'
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
        metadata['parameter'] = ''
        metadata['type'] = 'remote sensing'
        metadata['sensor_type'] = 'SAR'
        metadata['sensor_name'] = sensor_name
        metadata['sensor_platform'] = sensor_platform
        metadata['sensor_mode'] = sensor_mode
        metadata['sensor_swath'] = sensor_swath
        metadata['sensor_polarisation'] = sensor_polarisation
        metadata['sensor_pass'] = sensor_pass
        metadata['datagroup'] = datagroup
        geolocation = {}
        geolocation['projection'] = sarim._mapper._handler.GetGCPProjection()
        geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei,
                                                    gcppix, gcplin)
        band = []
        if make_rgb == True:
            band.append({'array': rgb[:, :, 0]})
            band.append({'array': rgb[:, :, 1]})
            band.append({'array': rgb[:, :, 2]})
        else:
            band.append({'array': img, 'nodatavalue': 255,
                         'colortable': palette2colortable(pal)})
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)