Пример #1
0
def ascat_sea_ice_roughness(infile, outdir,
                            vmin=0., vmax=0.122409712058,
                            vmin_pal=0., vmax_pal=0.122409712058):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    dset = Dataset(infile)
    dset.variables['sigma_40_mask'].set_auto_maskandscale(False)
    roughness = dset.variables['sigma_40_mask'][0, :, :].astype('uint8')
    indnodata = np.where((roughness == 0) | (roughness == 255))
    roughness = roughness * dset.variables['sigma_40_mask'].scale_factor + \
                dset.variables['sigma_40_mask'].add_offset
    dtime = num2date(dset.variables['time'][0], dset.variables['time'].units)
    pole = dset.pole
    name = os.path.splitext(os.path.basename(infile))[0]
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'ASCAT_sea_ice_roughness'
    metadata['name'] = name
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = ['-12h', '+12h']
    metadata['source_URI'] = infile
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea ice roughness'
    geolocation = {}
    srs = osr.SpatialReference()
    if pole == 'north':
        srs.ImportFromEPSG(3411) # use 3413 if problems with ellipsoid
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [-3850000, 12500, 0, 5850000, 0, -12500]
    elif pole == 'south':
        srs.ImportFromEPSG(3412) # use 3976 if problems with ellipsoid
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [-3950000, 12500, 0, 4350000, 0, -12500]
    else:
        raise Exception('Which pole ?')
    #test_xy(dset, srs.ExportToProj4(), geolocation['geotransform'])
    band = []
    np.clip(roughness, vmin, vmax, out=roughness)
    offset, scale = vmin, (vmax - vmin) / 254.
    array = np.round((roughness - offset) / scale).astype('uint8')
    array[indnodata] = 255
    colortable = stfmt.format_colortable('pesket', vmin=vmin, vmax=vmax,
                                         vmin_pal=vmin_pal, vmax_pal=vmax_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea ice roughness', 'unittype':'',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def smos_l4_ecmwf_sst(infile, outdir,
                      vmin=271.05, vmax=309.15, vmin_pal=273., vmax_pal=305.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    if (time_stop - time_start) == timedelta(days=6):
        time_stop += timedelta(days=1)
    lat = smos.read_values('lat')[::-1]
    lon = smos.read_values('lon')
    sst = smos.read_values('sea_surface_temperature')[::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start, time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_ECMWF_SST'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer/CNES'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0-dlon/2., dlon, 0,
                                   lat0-dlat/2., 0, dlat]
    band = []
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(sst.data, vmin, vmax, out=sst.data)
    array = np.round((sst.data - offset) / scale).astype('uint8')
    array[sst.mask] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax, vmax_pal=vmax_pal,
                                         vmin=vmin, vmin_pal=vmin_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface temperature', 'unittype':'K',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #3
0
def smos_l3_bec_sss(infile, outdir,
                vmin=31.825, vmax=38.175, vmin_pal=32, vmax_pal=38):
    """
    """
    # Read/Process data
    logger.info('Read/Process data')
    smos = netCDF4.Dataset(infile, 'r')
    time_start = netCDF4.num2date(smos['time'][0], smos['time'].units)
    time_stop = time_start + datetime.timedelta(days=1)
    lat = smos['lat'][::-1]
    lon = smos['lon'][:]
    sss = smos['oa_sss'][0, ::-1, :]

    # Construct metadata/geolocation/band(s)
    logger.info('Construct metadata/geolocation/band(s)')
    dtime, time_range = stfmt.format_time_and_range(time_start, time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    now = datetime.datetime.utcnow()
    metadata = {}
    metadata['product_name'] = 'SMOS_L3_BEC_SSS'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    src = 'SMOS Barcelona Expert Centre, ICM-CSIC / UPC, Barcelona, Spain'
    metadata['source_provider'] = src
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0-dlon/2., dlon, 0,
                                   lat0-dlat/2., 0, dlat]
    band = []
    offset, scale = vmin, (vmax-vmin)/254.
    numpy.clip(sss.data, vmin, vmax, out=sss.data)
    array = numpy.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin, vmax=vmax,
                                         vmin_pal=vmin_pal, vmax_pal=vmax_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface salinity', 'unittype':'PSS',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    # Write geotiff
    logger.info('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def write_geotiff(metadata, geolocation, band, infile, outdir):
    """"""
    now = datetime.datetime.utcnow()

    metadata['product_name'] = 'AQUARIUS_L2_SSS'
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Jet Propulsion Laboratory'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'

    # Write geotiff
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sentinel3_slstr_rad(infile,
                        outdir,
                        vmin=None,
                        vmax=None,
                        max_sunglint=150,
                        min_percentile=2.0,
                        channels='false_rgb',
                        write_netcdf=False,
                        lut_path=None,
                        log_path=None,
                        lat_crop=80.0):

    t0 = datetime.utcnow()
    # Process nadir data
    view = 'n'
    fname = 'radiance'
    sltype = 'a'
    if type(channels) is list or type(channels) is tuple:
        bandnames = channels
        product_name = 'Sentinel-3_SLSTR'
    elif 'false_rgb' == channels:
        bandnames = ('S3', 'S2', 'S1')
        product_name = 'Sentinel-3_SLSTR_false_RGB'
    else:
        raise Exception('channels must be either "false_rgb",'
                        ' or a tupple of bands')

    # convert band into column number in the LUT
    band_columns = [int(x[1:]) + 3 - 1 for x in bandnames]

    # Read coordinates and compute gcps
    (syntool_stats, metadata, geolocation, tie_lon, tie_lat, slice_lat0,
     slice_lat1, nrow_all, ncell_all, ngcps,
     __) = slstr.read_geometry(infile, bandnames, fname, sltype, view,
                               product_name, vmin, vmax, log_path)

    # Compute atmospheric radiance TOA correction
    L_toa = None
    if lut_path is not None:
        t_start = datetime.utcnow()
        # Compute atmospheric correction for the whole granule
        L_toa = atmospheric_correction(lut_path, infile, view, sltype,
                                       band_columns, bandnames, nrow_all,
                                       ncell_all)
        # Extract the slices of atmospheric correction that match the ones of
        # the bands after the removal of the high latitude columns.
        for iband in range(len(L_toa)):
            L_toa[iband] = L_toa[iband][slice_lat0, slice_lat1]
            # An erroneous LUT could produce nan values for the atmospheric
            # correction. This is not acceptable.
            if numpy.any(~numpy.isfinite(L_toa[iband])):
                raise Exception('Infinite or nan value found in the '
                                'atmospheric correction, please check that the'
                                'LUT has valid values for the angles contained'
                                'in the geometrie_t{}.nc file'.format(view))
        t_stop = datetime.utcnow()
        syntool_stats['lut_computation'] = (t_stop - t_start).total_seconds()

    # Compute masks
    logger.info('Build masks')
    t_start = datetime.utcnow()
    quality_flags = slstr.read_mask(infile, sltype, view, slice_lat0,
                                    slice_lat1)

    try:
        contrast_mask, data_mask = build_mask_rgb(channels, quality_flags,
                                                  tie_lat, lat_crop)
    except OnlyNightData:
        logger.warn('No day data in granule.')
        sys.exit(0)
    t_stop = datetime.utcnow()
    syntool_stats['mask_computation'] = (t_stop - t_start).total_seconds()

    # Read band to compute histograms
    logger.info('Construct bands')
    t_start = datetime.utcnow()

    bands = []
    # Initialize min and max values
    if vmin is None:
        vmin = [None] * len(bandnames)
    if vmax is None:
        vmax = [None] * len(bandnames)
    _vmin = list(vmin)
    _vmax = list(vmax)
    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        fieldname = slstr.get_field_name(fname, bandname, sltype, view)
        band = slstr.read_band(infile, bandname, fieldname, slice_lat0,
                               slice_lat1)

        # Apply atmospheric correction
        if L_toa is not None:
            band -= L_toa[band_index][:, :]

        # Mask null and negative values: they are inferior or equal to
        # atmospheric correction and should probably have been flagged as
        # clouds.
        mask_negative = (band <= 0.0)

        logger.info('\tSet contrast')
        valid_ratio_lower_threshold = 0.001  # 0.1%

        # Select valid data to compute histograms
        valid_data_mask = (band.mask | contrast_mask | mask_negative)
        valid_data = get_valid_data_rgb(band, valid_data_mask, max_sunglint)

        # No need to produce an output if all data values are masked
        if numpy.all(data_mask):
            logger.warn('No valid value found for band {}'.format(bandname))
            sys.exit(0)

        # Retrieve minimum and maximum values from default or valid_data
        # histograms
        valid_ratio = float(valid_data.size) / float(band.data.size)
        syntool_stats[bandname]['valid_ratio'] = valid_ratio
        if valid_ratio_lower_threshold >= valid_ratio:
            _min, _max = slstr.apply_default_min_max(default_minmax, bandname,
                                                     _vmin[band_index],
                                                     _vmax[band_index],
                                                     syntool_stats)

        else:
            _min, _max = slstr.fromband_min_max(valid_data,
                                                bandname,
                                                _vmin[band_index],
                                                _vmax[band_index],
                                                syntool_stats,
                                                min_percentile=min_percentile,
                                                max_percentile=99.99)

        # take default min and max if min or max are too high (lake,
        # inland sea, clouds)
        _max = min(_max, max_sunglint)
        if (_min > 100):
            _min = default_minmax[bandname][0]
            _max = default_minmax[bandname][1]

        _vmin[band_index] = _min
        _vmax[band_index] = _max
        logger.info('\tContrast : vmin={} / vmax={}'.format(
            _vmin[band_index], _vmax[band_index]))
    min_values = [_vmin[band_index] for band_index in range(len(bandnames))]
    max_values = [_vmax[band_index] for band_index in range(len(bandnames))]

    t_stop = datetime.utcnow()
    syntool_stats['minmax_computation'] = (t_stop - t_start).total_seconds()
    syntool_stats['final_min'] = float(numpy.min(min_values))
    syntool_stats['final_max'] = float(numpy.max(max_values))

    _min = numpy.min(min_values)
    _max = numpy.max(max_values)
    _min = numpy.log(_min)
    _max = numpy.log(_max)
    scale = (_max - _min) / 254.
    offset = _min
    # Construct bands
    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        fieldname = slstr.get_field_name(fname, bandname, sltype, view)
        band = slstr.read_band(infile, bandname, fieldname, slice_lat0,
                               slice_lat1)

        # Apply atmospheric correction
        if L_toa is not None:
            band -= L_toa[band_index][:, :]

        # Mask null and negative values: they are inferior or equal to
        # atmospheric correction and should probably have been flagged.
        mask_negative = (band <= 0.0)

        # Compute the logarithm only for radiance values that are higher than
        # the atmospheric correction.
        bnd = band.data
        bnd = numpy.log(band.data, where=(~mask_negative))

        logger.info('\tBytescaling')
        byte = bytescale(bnd, cmin=_min, cmax=_max, low=0, high=254)
        description = '{} {} (log)'.format(bandname, fname)
        if band.mask is not numpy.ma.nomask:
            byte[band.mask] = 255

        # Pixels with a radiance equal or inferior to atmospheric correction
        # are clipped to the minimal value.
        if 0 < mask_negative.size:
            byte[numpy.where(mask_negative == True)] = 0  # noqa

        # mask night data for rgb and invalid data for ir (cloud, land,
        # range value). Also mask data for extreme latitudes
        byte[data_mask] = 255

        band_range = [_vmin[band_index], _vmax[band_index]]
        bands.append({
            'array': byte,
            'plot': band.data[~mask_negative],
            'scale': scale,
            'offset': offset,
            'description': description,
            'unittype': '',
            'nodatavalue': 255,
            'parameter_range': band_range
        })

        if write_netcdf:
            bands[-1]['name'] = bandname
            bands[-1]['long_name'] = bandname
            bands[-1]['unittype'] = '1'

    logger.info('Make sure nodata are at the same locations in all bands')
    mask = numpy.any([_band['array'] == 255 for _band in bands], axis=0)
    for band in bands:
        band['array'][mask] = 255

    t_stop = datetime.utcnow()
    syntool_stats['bytescaling'] = (t_stop - t_start).total_seconds()

    if write_netcdf:
        metadata['spatial_resolution'] = 500
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           bands,
                           'swath',
                           ngcps=ngcps)
    else:
        logger.info('Write geotiff')
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, bands)

    logger.info(datetime.utcnow() - t0)
    syntool_stats['total_time'] = (datetime.utcnow() - t0).total_seconds()
    if log_path is not None:
        import json
        full_path = os.path.normpath(infile)
        file_path = os.path.basename(full_path)
        file_name, _ = os.path.splitext(file_path)
        stats_path = os.path.join(log_path, '{}.json'.format(file_name))
        with open(stats_path, 'w') as f:
            json.dump(syntool_stats, f)
def smos_l4_oscar_current(infile,
                          outdir,
                          vmin=0.,
                          vmax=5.08,
                          vmin_pal=0.,
                          vmax_pal=2.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    if (time_stop - time_start) == timedelta(days=6):
        time_stop += timedelta(days=1)
    lat = smos.read_values('lat')[::-1]
    lon = smos.read_values('lon')
    ucur = smos.read_values('Zonal_component_surface_currents')[::-1, :]
    vcur = smos.read_values('Meridional_component_surface_currents')[::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_OSCAR_current'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer/CNES'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    mask = ucur.mask | vcur.mask
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #7
0
def viirs_chlora(infileid,
                 outdir,
                 download_dir='/tmp',
                 vmin=None,
                 vmax=None,
                 contrast='relative',
                 ngcps=(26, 32),
                 open_iterations=1,
                 nprocs=1,
                 pngkml=False,
                 write_netcdf=False):
    """
    """
    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48], [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88], [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Search/Download data
    print 'Search/Download data'
    if re.match(r'^V[0-9]{13}$', infileid) is None:
        raise Exception('Input for viirs_chlora is an ID '
                        '(e.g. V2014093110000)')
    product_id = 'VIIRSL2OC'
    date = datetime.strptime(infileid[1:], '%Y%j%H%M%S')
    viirsocfname = viirs.search_and_download(product_id, date, download_dir)

    # Read/Process data
    print 'Read/Process data'
    # Read from OC file
    viirsocfile = viirs.VIIRSL2File(viirsocfname)
    lon = viirsocfile.read_lon()
    lat = viirsocfile.read_lat()
    chlora = viirsocfile.read_chlora()
    attrs = viirsocfile.read_attributes()
    viirsocfile.close()
    if listbox is not None:
        mask_box = np.zeros(np.shape(chlora.data))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                                & (lon <= listbox[i][2])
                                & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = (mask_box == 0) | ma.getmaskarray(chlora)
    else:
        mask = ma.getmaskarray(chlora)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 16
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1], gcplon[mid, 1:],
                      gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 750.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon,
                                             gcplat,
                                             gcppixel,
                                             gcpline,
                                             rspxsize,
                                             rspysize,
                                             1,
                                             lon,
                                             lat,
                                             nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    chlora.data[mask] = np.nan
    rspchlora = resample.resample_bowtie_linear(pix,
                                                lin,
                                                chlora.data,
                                                scansize,
                                                rspxsize,
                                                rspysize,
                                                show=False)
    rspmask = ma.getmaskarray(rspchlora)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Take log and open mask
    finalchlora = ma.log(rspchlora)
    finalmask = ~binary_opening(
        ~rspmask, structure=np.ones((3, 3)), iterations=open_iterations)
    finalchlora.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalchlora.compressed(), 0.5)
        elif contrast == 'agulhas':
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            vmin = -0.5 * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) - 3.
        elif contrast == 'med' or contrast == 'nwe' or contrast == 'cwe':
            vmin = np.percentile(finalchlora.compressed(), 2)
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    else:
        if vmin != 0:
            vmin = math.log(vmin)
        else:
            vmin = np.percentile(finalchlora.compressed(), 0.5)
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalchlora.compressed(), 99.5)
        elif contrast == 'agulhas':
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            vmax = 0.5 * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 3.
        elif contrast == 'med':
            vmax = np.percentile(finalchlora.compressed(), 98)
        elif contrast == 'nwe':
            vmax = np.percentile(finalchlora.compressed(), 98)
        elif contrast == 'cwe':
            vmax = np.percentile(finalchlora.compressed(), 98)
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    else:
        if vmax != 0:
            vmax = math.log(vmax)
        else:
            vmax = np.percentile(finalchlora.compressed(), 98)

    # Flip (geotiff in "swath sense")
    finalchlora = finalchlora[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(attrs['start_time'],
                                                      attrs['stop_time'],
                                                      units='ms')
    metadata['product_name'] = 'Chlorophyll_a_concentration_VIIRS'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(viirsocfname))[0]
    else:
        metadata['name'] = '{}_{}'.format(
            os.path.splitext(os.path.basename(viirsocfname))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = viirsocfname
    metadata['source_provider'] = 'NOAA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'chlorophyll a concentration'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'VIIRS'
    metadata['sensor_platform'] = 'Suomi-NPP'
    metadata['sensor_pass'] = attrs['pass']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalchlora) == True)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(finalchlora.data, vmin, vmax, out=finalchlora.data)
    array = np.round((finalchlora.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('chla_jet',
                                         vmax=vmax,
                                         vmax_pal=vmax,
                                         vmin=vmin,
                                         vmin_pal=vmin)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'chlorophyll a concentration',
        'unittype': 'log(mg/m3)',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    if write_netcdf == False:
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'chlor_a'
        band[0]['long_name'] = 'Chlorophyll Concentration, OCI Algorithm'
        band[0][
            'standard_name'] = 'mass_concentration_chlorophyll_concentration_in_sea_water'
        band[0]['unittype'] = 'mg m^-3 (log)'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 749.810419844 749.810438261 749.810429577
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 737.874499629 739.856423757 738.87317625
        metadata['spatial_resolution'] = 750.
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'swath',
                           ngcps=gcplon.shape)
Пример #8
0
def current(infile,
            outdir,
            vmin=0.,
            vmax=1.50,
            vmin_pal=0.,
            vmax_pal=1.5,
            write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^dt_global_allsat_madt_uv.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Surface_height'
        # vmin = 0.; vmax = 2.; vmin_pal = 0.; vmax_pal = 2.
    elif (re.match(r'^dt_global_allsat_msla_uv.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Sea_Level_Anomaly'
        # vmin = 0; vmax = 1; vmin_pal = 0.; vmax_pal = 1
    elif re.match(r'^mdt.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Dynamic_Topo'
        # vmin = 0; vmax = 1.5; vmin_pal = 0; vmax_pal = 1.5
    elif re.match(r'^Tide_.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Tide'
        # vmin = 0; vmax = 1.5; vmin_pal = 0; vmax_pal = 1.5
    else:
        raise Exception('Unknown file.')
    # /TMP
    ucur = ncfile.read_values(L4_MAPS[l4id]['uname'])[0, ::-1, :]
    vcur = ncfile.read_values(L4_MAPS[l4id]['vname'])[0, ::-1, :]
    lon = ncfile.read_values('lon')[:].astype('float64')
    lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    ucur = ucur[:, indsorted]
    vcur = vcur[:, indsorted]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    if l4id in [
            'Mean_Dynamic_Topo',
    ]:
        dtime = datetime(2014, 12, 1)
    else:
        dtime_units = ncfile.read_field('time').units
        dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    if l4id in [
            'Tide',
    ]:
        ucur = ucur[:-1, ::]
        vcur = vcur[:-1, ::]
        geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    else:
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    mask = ucur.mask | vcur.mask
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': L4_MAPS[l4id]['productname'],
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def aquarius_l3_sss(infile,
                    outdir,
                    vmin=31.825,
                    vmax=38.175,
                    vmin_pal=32,
                    vmax_pal=38):
    """
    """
    # Read/Process data
    logger.info('Read/Process data')
    dset = netCDF4.Dataset(infile, 'r')
    _time_start = datetime.datetime.strptime(dset.time_coverage_start,
                                             '%m-%d-%y')
    _time_stop = datetime.datetime.strptime(dset.time_coverage_end, '%m-%d-%y')
    time_start = _time_start + (_time_stop - _time_start) / 2
    time_stop = time_start + datetime.timedelta(days=1)

    lat = dset['lat'][::-1]
    lon = dset['lon'][:]
    sss = dset['sss_cap'][::-1, :]

    # Center on 0,0
    p_lon_idx = numpy.where(lon <= 180.)
    n_lon_idx = numpy.where(lon > 180.)
    p_lon = lon[p_lon_idx]
    n_lon = -360. + lon[n_lon_idx]
    lon[:len(n_lon)] = n_lon
    lon[len(n_lon):] = p_lon
    sss = numpy.roll(sss, len(n_lon), 1)

    # Construct metadata/geolocation/band(s)
    logger.info('Construct metadata/geolocation/band(s)')
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    now = datetime.datetime.utcnow()
    metadata = {}
    metadata['product_name'] = 'AQUARIUS_L3_SSS'
    if 'Sea_Surface_Salinity_Rain_Corrected' == dset['sss_cap'].long_name:
        metadata['product_name'] += '_RAIN_CORRECTED'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Jet Propulsion Laboratory'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(now)
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    numpy.clip(sss.data, vmin, vmax, out=sss.data)
    array = numpy.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface salinity',
        'unittype': 'PSS',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    logger.info('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #10
0
def mercator_current(infile, outdir, date=None,
                     vmin=0., vmax=5.08, vmin_pal=0., vmax_pal=2.):
    """ """
    if date is None:
        raise Exception('mercator_current conversion needs a date !')
    # Read/Process data
    print 'Read/Process data'
    ncfile = Dataset(infile)
    time = ncfile.variables['time_counter']
    time_index = np.where(num2date(time[:], time.units) == date)[0]
    if time_index.size != 1:
        raise Exception('Date not found in mercator file !')
    time_index = time_index[0]
    dtime = num2date(time[time_index], time.units)
    lat = ncfile.variables['latitude'][::-1]
    lon = ncfile.variables['longitude'][:]
    ucur = ncfile.variables['u'][time_index, 0, ::-1, :]
    vcur = ncfile.variables['v'][time_index, 0, ::-1, :]
    if isinstance(ucur, np.ma.MaskedArray) == False:
        ucur = np.ma.masked_invalid(ucur)
    if isinstance(vcur, np.ma.MaskedArray) == False:
        vcur = np.ma.masked_invalid(vcur)
    if 'daily' in os.path.basename(infile) and \
       'agulhas' in os.path.basename(infile):
        product_name = 'MERCATOR_current_daily_agulhas'
        time_range = ['-12h', '+12h']
    else:
        raise Exception('Mercator product not taken into account for now.')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = product_name
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0] + '_' +\
                       dtime.strftime('%Y%m%dT%H%M%S')
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'MERCATOR'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    dlon = lon[1] - lon[0]
    dlat = lat[1] - lat[0]
    geolocation['geotransform'] = [lon[0]-dlon/2., dlon, 0,
                                   lat[0]-dlat/2., 0, dlat]
    band = []
    mask = np.ma.getmaskarray(ucur) | np.ma.getmaskarray(vcur)
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(np.arctan2(vcur.data, ucur.data)*180./np.pi+360., 360.)
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet', vmin=vmin, vmax=vmax,
                                         vmin_pal=vmin_pal, vmax_pal=vmax_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'current velocity', 'unittype':'m/s',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})
    array = np.round(curdir/360.*254.).astype('uint8')
    array[mask] = 255
    band.append({'array':array, 'scale':360./254., 'offset':0.,
                 'description':'current direction', 'unittype':'deg',
                 'nodatavalue':255, 'parameter_range':[0, 360.]})
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #11
0
def _to_geotiff(infile, outdir, vmin, vmax, vmin_pal, vmax_pal, nrow, ncell,
                dtime, time_range, datagroup, platform, dset):
    """ """
    # It is necessary to create a 1-row overlap between the two half-orbits in
    # order for the merger to build a continuous shape
    extra_row = 1

    side_slices = {
        'left': slice(0, ncell / 2),
        'right': slice(ncell / 2, ncell)
    }

    for side in side_slices:
        side_slice = side_slices[side]

        side_lat = dset.variables['lat'][:, side_slice]
        side_lon = dset.variables['lon'][:, side_slice]

        # Make longitudes continuous but detect gaps bigger than 90 degrres:
        # it will be necessary to split the dataset where these gaps appear
        # because gdal may not be able to interpolate GCPs correctly later
        # in the ingestor (it says the shape intersects itself).
        side_lon, splits = make_continuous_lon(side_lon, max_gap=60.)

        # Add begin and end to the splits list
        splits.append(0)
        splits.append(nrow)

        # Add an artificial split in the middle of the swath
        half_split = nrow / 2
        splits.append(half_split)

        # Make sure there are no duplicates and sort the indices in ascending
        # order
        splits = list(set(splits))
        splits.sort()

        l = len(splits)
        parts = []
        extra_row_part = -1
        for i in range(l - 1):
            extra_row = 0
            if splits[i + 1] == half_split:
                # Since the half split does not correspond to a void area in
                # the original data, add an extra row so that there will be no
                # visible cut when the ingestor merges the parts back together
                extra_row_part = i
                extra_row = 1
            part = slice(splits[i], extra_row + splits[i + 1])
            parts.append(part)
        nparts = len(parts)

        last_lon = None
        last_lat = None
        for part_no in range(0, nparts):
            dataset_name = '{}_{}_{}'.format(datagroup, side, part_no)

            part_slice = parts[part_no]
            swath_slice = [part_slice, side_slice]

            lat = side_lat[part_slice]
            lon = side_lon[part_slice]

            # Make sure there are no longitudes < -180 or the rastertiles
            # plugin of syntool-ingestor will not produce anything
            while 180. < np.min(lon):
                lon = lon - 360.
            while -180. > np.min(lon):
                lon = lon + 360.

            # Extract wind speed (module)
            wind_speed = dset.variables['wind_speed'][swath_slice]

            # Extract wind direction and make it counter-clockwise from east
            # (as expected by the ingestor)
            wind_dir = dset.variables['wind_dir'][swath_slice]
            wind_dir = np.mod(90. - wind_dir, 360.)

            # Build GCPs
            dgcp = 16.
            ngcps = (np.ceil(np.array(lon.shape) / dgcp) + 1.).astype('int32')
            pix = np.linspace(0, lon.shape[1] - 1, num=ngcps[1]).round()
            lin = np.linspace(0, lon.shape[0] - 1, num=ngcps[0]).round()
            pix2d, lin2d = np.meshgrid(pix.astype('int32'),
                                       lin.astype('int32'))
            gcplon = lon[lin2d, pix2d]
            gcplat = lat[lin2d, pix2d]
            gcppix = pix2d + 0.5
            gcplin = lin2d + 0.5
            gcphei = np.zeros(ngcps)

            if part_no == extra_row_part + 1:
                # Overwrite first GCP, but first you must make sure that
                # longitudes are in the same -180/+180 range.
                lon0 = gcplon[1, 0] - 180.
                last_lon[:] = np.mod(last_lon[:] - lon0, 360.) + lon0
                gcplon[0] = last_lon
                gcplat[0] = last_lat

            if part_no == extra_row_part:
                # Mask last row since it also exists in the next part
                wind_speed[-extra_row, :] = numpy.ma.masked
                wind_dir[-extra_row, :] = numpy.ma.masked
                # Store last GCPs lat/lon
                extra_row = 1
                last_lon = gcplon[-extra_row]
                last_lat = gcplat[-extra_row]

            # Construct metadata/geolocation/band(s)
            print('Construct metadata/geolocation/band(s)')
            utcnow = datetime.utcnow()
            metadata = {}
            metadata['product_name'] = '{}_ASCAT_L2B'.format(platform)
            metadata['datagroup'] = datagroup
            metadata['name'] = dataset_name
            metadata['datetime'] = dtime
            metadata['time_range'] = time_range
            metadata['source_URI'] = infile
            metadata['conversion_software'] = 'Syntool'
            metadata['conversion_version'] = '0.0.0'
            metadata['conversion_datetime'] = stfmt.format_time(utcnow)
            metadata['parameter'] = ['wind speed', 'wind direction']
            metadata['original_x_size'] = ncell
            metadata['original_y_size'] = nrow
            geolocation = {}
            geolocation['projection'] = stfmt.format_gdalprojection()
            geolocation['gcps'] = stfmt.format_gdalgcps(
                gcplon, gcplat, gcphei, gcppix, gcplin)
            print('Write geotiff')

            band = []
            offset, scale = vmin, (vmax - vmin) / 254.
            clipped = np.clip(np.ma.getdata(wind_speed), vmin, vmax)
            array = np.round((clipped - offset) / scale).astype('uint8')
            array[np.ma.getmaskarray(wind_speed)] = 255
            colortable = stfmt.format_colortable('noaa_wind',
                                                 vmax=vmax,
                                                 vmax_pal=vmax_pal,
                                                 vmin=vmin,
                                                 vmin_pal=vmin_pal)
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': 'wind speed',
                'unittype': 'm/s',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax],
                'colortable': colortable
            })
            clipped = np.clip(np.ma.getdata(wind_dir), 0, 360)
            array = np.round(clipped / 360. * 254.).astype('uint8')
            array[np.ma.getmaskarray(wind_dir)] = 255
            band.append({
                'array': array,
                'scale': 360. / 254.,
                'offset': 0.,
                'description': 'wind direction',
                'unittype': 'deg',
                'nodatavalue': 255,
                'parameter_range': [0, 360.]
            })

            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def ww3_model_wave(infile,
                   outdir,
                   date=None,
                   max_forecast_hours=None,
                   vmin=0.,
                   vmax=25.4,
                   vmin_pal=0.,
                   vmax_pal=10.,
                   v2=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    ww3 = {}
    ww3['time'] = ncfile.read_values('time')
    ww3['time_units'] = ncfile.read_field('time').units
    ww3uniqtime = ww3['time'].size == 1
    if ww3uniqtime:  # assume 3h
        ww3['deltatime'] = 180
    else:
        t01 = num2date(np.array(ww3['time'][0:2]), ww3['time_units'])
        ww3['deltatime'] = np.round(
            (t01[1] - t01[0]).total_seconds() / 60.).astype('int')
    if date != None:
        tind = np.where(ww3['time'] == date2num(date, ww3['time_units']))[0]
        if tind.size == 1:
            tsl = slice(tind[0], tind[0] + 1)
            ww3['time'] = ww3['time'][tsl]
        else:
            raise Exception('Date not found in WW3 file.')
    else:
        tsl = slice(0, ww3['time'].size)
    if max_forecast_hours is not None:
        if ww3['time'].size != 1:
            raise Exception(
                'max_forecast_hours option works with only 1 time.')
        ww3time = num2date(ww3['time'][0], ww3['time_units'])
        if 'date_cycle' not in ncfile.read_global_attributes():
            raise Exception(
                'max_forecast_hours option works with date_cycle attribute.')
        cycletime_str = ncfile.read_global_attribute('date_cycle')
        if 10 == len(cycletime_str):
            cycletime_format = '%Y%m%d%H'
        elif 20 == len(cycletime_str) and cycletime_str.endswith('Z'):
            cycletime_format = '%Y-%m-%dT%H:%M:%SZ'
        else:
            raise Exception(
                'Cycletime format is not supported: {}'.format(cycletime_str))

        cycletime = datetime.strptime(cycletime_str, cycletime_format)
        forecast_hours = (ww3time - cycletime).total_seconds() / 3600.
        if forecast_hours > max_forecast_hours:
            raise Exception('Exceeds max_forecast_hours.')
    slices = [
        tsl,
        slice(0, ncfile.get_dimsize('latitude')),
        slice(0, ncfile.get_dimsize('longitude'))
    ]
    # slices = [tsl,
    #           slice(ncfile.get_dimsize('latitude')-1, None, -1),
    #           slice(0, 2*ncfile.get_dimsize('longitude'))]
    ncfieldnames = ncfile.get_fieldnames()
    fieldnames = [
        'hs', 'phs0', 'phs1', 'phs2', 'phs3', 'ptp0', 'ptp1', 'ptp2', 'ptp3',
        'pdir0', 'pdir1', 'pdir2', 'pdir3'
    ]
    ww3['source'] = [infile]
    for fieldname in fieldnames:
        if fieldname in ncfieldnames:
            ww3[fieldname] = ncfile.read_values(fieldname,
                                                slices=slices)[:, ::-1, :]
        else:
            infile2 = split_ww3_fname(infile, fieldname)
            if not os.path.exists(infile2):
                infile2 = other_split_ww3_fname(infile, fieldname)
            if os.path.exists(infile2):
                ncfile2 = NCFile(infile2)
                ww3[fieldname] = ncfile2.read_values(fieldname,
                                                     slices=slices)[:, ::-1, :]
                ncfile2.close()
                ww3['source'].append(infile2)
    ww3['npart'] = 0
    for indp in range(4):
        pin = [name + str(indp) in ww3 for name in ['phs', 'ptp', 'pdir']]
        if all(pin):
            ww3['npart'] += 1
        else:
            break
    if ww3['npart'] == 0:
        raise Exception('Could not find all partition variables.')
    ww3['area'] = ncfile.read_global_attribute('area')
    if 'global' in ww3['area'].lower():
        ww3['lon'] = ncfile.read_values('lon')
        ww3['lat'] = ncfile.read_values('lat')[::-1]
        ww3['lon_res'] = float(
            ncfile.read_global_attribute('longitude_resolution'))
        ww3['lat_res'] = float(
            ncfile.read_global_attribute('latitude_resolution'))
    elif ww3['area'] == 'ARCTIC-12km':
        ww3['lon'] = ncfile.read_values('lon')[::-1, :]
        ww3['lat'] = ncfile.read_values('lat')[::-1, :]
    else:
        raise Exception('Not implemented : area = "{}"'.format(ww3['area']))
    if 'run_time' in ncfile.read_global_attributes():
        run_time = ncfile.read_global_attribute('run_time')
        ww3['rundtime'] = datetime.strptime(run_time, '%Y-%m-%dT%H:%M:%SZ')
    ncfile.close()

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    #metadata['product_name'] =
    #metadata['name'] =
    #metadata['datetime'] =
    metadata['time_range'] = [
        '-{:d}m'.format(ww3['deltatime'] / 2),
        '+{:d}m'.format(ww3['deltatime'] / 2)
    ]
    metadata['source_URI'] = ww3['source']
    metadata['source_provider'] = ['SHOM', 'Ifremer']
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    #metadata['parameter'] =
    metadata['type'] = 'model'
    #metadata['model_longitude_resolution'] =
    #metadata['model_latitude_resolution'] =
    if 'rundtime' in ww3:
        metadata['model_analysis_datetime'] = stfmt.format_time(
            ww3['rundtime'])
    geolocation = {}
    if 'global' in ww3['area'].lower():
        metadata['model_longitude_resolution'] = ww3['lon_res']
        metadata['model_latitude_resolution'] = ww3['lat_res']
        geolocation['projection'] = stfmt.format_gdalprojection()
        lon0, dlon = ww3['lon'][0], ww3['lon'][1] - ww3['lon'][0]
        lat0, dlat = ww3['lat'][0], ww3['lat'][1] - ww3['lat'][0]
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
        ww3['grid'] = 'GLOBAL'
    elif ww3['area'] == 'ARCTIC-12km':
        import pyproj
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(3411)
        proj = pyproj.Proj(srs.ExportToProj4())
        x01, dummy = proj(ww3['lon'][:, [0, -1]], ww3['lat'][:, [0, -1]])
        x0, x1 = x01.mean(axis=0)
        dx = (x1 - x0) / (ww3['lon'].shape[1] - 1)
        dummy, y01 = proj(ww3['lon'][[0, -1], :], ww3['lat'][[0, -1], :])
        y0, y1 = y01.mean(axis=1)
        dy = (y1 - y0) / (ww3['lon'].shape[0] - 1)
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [
            x0 - dx / 2., dx, 0, y0 - dy / 2., 0, dy
        ]
        # geolocation['geotransform'] = [-2600051.73564, 12500.2285676, 0,
        #                                2787547.79214, 0, -12500.2262608]
        ww3['grid'] = 'ARCTIC'
    else:
        raise Exception('Not implemented : area = "{}"'.format(ww3['area']))

    # Loop on time
    for itime in range(ww3['time'].size):
        dtime = num2date(ww3['time'][itime], ww3['time_units'])
        metadata['datetime'] = stfmt.format_time(dtime)
        if len(ww3['source']) == 1:
            basename = os.path.splitext(os.path.basename(infile))[0]
            if not ww3uniqtime:
                _date = dtime.strftime('%Y%m%d')
                _datetime = dtime.strftime('%Y%m%dT%H')
                if _date in basename and _datetime not in basename:
                    basename = basename.replace(_date,
                                                dtime.strftime('%Y%m%dT%HZ'))
                else:
                    raise Exception
        else:
            basename = 'WW3-' + ww3['grid'] + '-' + dtime.strftime(
                '%Y%m%dT%HZ')
            if 'hindcast' in infile.lower():
                basename = 'HINDCAST_' + basename

        ### Total HS ###
        # Update metadata
        metadata['product_name'] = 'WW3_model_wave_hs'
        if v2 == True:
            metadata['product_name'] += '_v2'
        metadata['name'] = basename + '-hs'
        metadata['parameter'] = 'wave significant height'
        # Make band
        band = []
        hs = ww3['hs'][itime, :, :]
        offset, scale = vmin, (vmax - vmin) / 254.0
        np.clip(hs, vmin, vmax, out=hs)
        array = np.round((hs - offset) / scale).astype('uint8')
        array[hs.mask] = 255
        colortable = stfmt.format_colortable('matplotlib_jet',
                                             vmin=vmin,
                                             vmax=vmax,
                                             vmin_pal=vmin_pal,
                                             vmax_pal=vmax_pal)
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': metadata['parameter'],
            'unittype': 'm',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax],
            'colortable': colortable
        })
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)

        ### Partitions ###
        phslst = [
            ww3['phs' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        phs = np.ma.dstack(phslst)
        ptplst = [
            ww3['ptp' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        ptp = np.ma.dstack(ptplst)
        pdirlst = [
            ww3['pdir' + str(i)][itime, :, :] for i in range(ww3['npart'])
        ]
        pdir = np.ma.dstack(pdirlst)
        # Reorder partitions by HS -> Keep WW3 order
        # phs.data[phs.mask] = -1000 # make sure masked values don't interfere with sorting
        # index = np.ogrid[:phs.shape[0], :phs.shape[1], :phs.shape[2]]
        # index[2] = (-phs).argsort(axis=2, kind='mergesort')
        # phs = phs[index]
        # ptp = ptp[index]
        # pdir = pdir[index]
        # pdir from_direction -> to_direction
        pdir = np.mod(pdir + 180.0, 360.0)
        # pdir clockwise from north -> counter clockwise from east
        pdir = np.mod(90.0 - pdir, 360.0)
        # Write each partition in a geotiff
        for i in range(ww3['npart']):
            # Update metadata
            lpartnum = 'partition ' + str(i)
            spartnum = 'part' + str(i)
            metadata['product_name'] = 'WW3_model_wave_' + spartnum
            if v2 == True:
                metadata['product_name'] += '_v2'
            metadata['name'] = basename + '-' + spartnum
            metadata['parameter'] = [
                'wave significant height ' + lpartnum,
                'wave peak period ' + lpartnum,
                'wave mean direction ' + lpartnum
            ]
            # Make bands
            band = []
            iphs, iptp, ipdir = phs[:, :, i], ptp[:, :, i], pdir[:, :, i]
            # HS
            #_vmin, _vmax = 0.0, 25.4
            offset, scale = vmin, (vmax - vmin) / 254.0
            np.clip(iphs, vmin, vmax, out=iphs)
            array = np.round((iphs - offset) / scale).astype('uint8')
            array[iphs.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][0],
                'unittype': 'm',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax]
            })
            # Period
            _vmin, _vmax = 0.0, 25.4
            offset, scale = _vmin, (_vmax - _vmin) / 254.0
            np.clip(iptp, _vmin, _vmax, out=iptp)
            array = np.round((iptp - offset) / scale).astype('uint8')
            array[iptp.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][1],
                'unittype': 's',
                'nodatavalue': 255,
                'parameter_range': [_vmin, _vmax]
            })
            # Direction
            _vmin, _vmax = 0.0, 360.0
            offset, scale = _vmin, (_vmax - _vmin) / 254.0
            np.clip(ipdir, _vmin, _vmax, out=ipdir)
            array = np.round((ipdir - offset) / scale).astype('uint8')
            array[ipdir.mask] = 255
            band.append({
                'array': array,
                'scale': scale,
                'offset': offset,
                'description': metadata['parameter'][2],
                'unittype': 'degree',
                'nodatavalue': 255,
                'parameter_range': [vmin, vmax]
            })
            # Write geotiff
            print 'Write geotiff'
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile,
                                metadata,
                                geolocation,
                                band,
                                drv_opts=['PHOTOMETRIC=MINISBLACK'])
Пример #13
0
def smos_l4_locean_sss(infile,
                       outdir,
                       vmin=31.825,
                       vmax=38.175,
                       vmin_pal=32,
                       vmax_pal=38):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    smos = SMOSNCFile(infile)
    time_start_units = smos.read_field('date_start').units
    time_start = num2date(smos.read_values('date_start')[0], time_start_units)
    time_stop_units = smos.read_field('date_stop').units
    time_stop = num2date(smos.read_values('date_stop')[0], time_stop_units)
    time_stop = time_stop + timedelta(days=1)
    lat = smos.read_values('latitude')[::-1]
    lon = smos.read_values('longitude')
    sss = smos.read_values('Time_interpolated_ISAS_sss')[0][::-1, :]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    dtime, time_range = stfmt.format_time_and_range(time_start,
                                                    time_stop,
                                                    units='h')
    lat0, dlat = lat[0], lat[1] - lat[0]
    lon0, dlon = lon[0], lon[1] - lon[0]
    metadata = {}
    metadata['product_name'] = 'SMOS_L4_LOCEAN_ISAS_SSS'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'SEANOE'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface salinity'
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sss.data, vmin, vmax, out=sss.data)
    array = np.round((sss.data - offset) / scale).astype('uint8')
    array[sss.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface salinity',
        'unittype': 'PSS',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #14
0
def bathymetry_gebco(infile,
                     outdir,
                     vmin=-6000,
                     vmax=0,
                     vmin_pal=-6000.,
                     vmax_pal=0.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    bat = ncfile.read_values('elevation')[:, :]
    bat = bat.astype('float32')
    bat[(bat < 0) & (bat >= -25)] = -25
    bat[(bat < -25) & (bat >= -50)] = -50
    bat[(bat < -50) & (bat >= -100)] = -100
    bat[(bat < -100) & (bat >= -500)] = -500
    bat[(bat < -500) & (bat >= -1000)] = -1000
    bat[(bat < -1000) & (bat >= -2000)] = -2000
    bat[(bat < -2000) & (bat >= -3000)] = -3000
    bat[(bat < -3000) & (bat >= -4000)] = -4000
    bat[(bat < -4000) & (bat >= -5000)] = -5000
    bat[(bat < -5000) & (bat >= -6000)] = -6000
    bat[(bat < -6000) & (bat >= -10000)] = -10000
    mask = [bat >= 0]
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(bat, vmin, vmax, out=bat)
    bat -= offset
    bat /= scale
    bat = np.round(bat).astype('uint8')
    lon = ncfile.read_values('lon')[:2:1]
    lat = ncfile.read_values('lat')[:2:1]
    lon0 = lon[0]
    dlon = lon[-1] - lon[0]
    lat0 = lat[0]
    dlat = lat[-1] - lat[0]

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'GEBCO bathymetry'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(datetime(2012, 1, 1))
    metadata['time_range'] = ['-3660d', '+3660d']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'GEBCO'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'bathymetry '
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    bat[mask] = 255
    colortable = stfmt.format_colortable('ibcso',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)

    band.append({
        'array': bat,
        'scale': scale,
        'offset': offset,
        'description': 'bathymetry',
        'unittype': 'm',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #15
0
def odyssea_sst(infile,
                outdir,
                vmin=271.05,
                vmax=309.15,
                vmin_pal=273.,
                vmax_pal=305.,
                write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    odyssea = GHRSSTNCFile(infile)
    sst = odyssea.read_values('analysed_sst')[::-1, :]
    mask = odyssea.read_values('mask')[::-1, :]
    #sea_ice_fraction = odyssea.read_values('sea_ice_fraction')[::-1, :]
    # lon = odyssea.read_values('lon')
    # dlon = lon[1] - lon[0]
    # lon0 = lon[0] - dlon / 2
    # lat = odyssea.read_values('lat')[::-1]
    # dlat = lat[1] - lat[0]
    # lat0 = lat[0] - dlat / 2
    lon0 = odyssea.read_global_attribute('westernmost_longitude')
    dlon = float(odyssea.read_global_attribute('geospatial_lon_resolution'))
    lat0 = odyssea.read_global_attribute('northernmost_latitude')
    dlat = -float(odyssea.read_global_attribute('geospatial_lat_resolution'))
    dtime = odyssea.read_values('time')[0]
    dtime_units = odyssea.read_field('time').units
    dtime = num2date(dtime, dtime_units)
    odyssea_id = odyssea.read_global_attribute('id')
    if 'glob' in odyssea_id.lower():
        product_name = 'ODYSSEA_SST'
    elif 'saf' in odyssea_id.lower():
        product_name = 'ODYSSEA_SAF_SST'
    elif 'med' in odyssea_id.lower():
        product_name = 'ODYSSEA_MED_SST'
        #vmin_pal=283. ; vmax_pal=300.
    elif 'nwe' in odyssea_id.lower():
        product_name = 'ODYSSEA_NWE_SST'
    elif 'bra' in odyssea_id.lower():
        product_name = 'ODYSSEA_BRA_SST'
    elif 'nseabaltic' in odyssea_id.lower():
        product_name = 'DMI-OI_NSEABALTIC_SST'
    else:
        raise Exception('Unknown odyssea ID : {}'.format(odyssea_id))

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = product_name
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = ['-12h', '+12h']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    band = []
    #indndv = np.where((sst.mask == True) | (sea_ice_fraction > 0))
    indndv = np.where((sst.mask == True) | (mask != 1))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sst, vmin, vmax, out=sst)
    array = np.round((sst - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'analysed_sst'
        band[0]['long_name'] = 'analysed sea surface temperature'
        band[0]['standard_name'] = 'sea_surface_temperature'
        metadata['spatial_resolution'] = min([abs(dlat), abs(dlon)]) * 111000.
        dgcps = np.round(1. / np.abs(np.array([dlat, dlon]))).astype('int')
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'grid_lonlat',
                           dgcps=dgcps)
Пример #16
0
def sar_wind(infile,
             outdir,
             pngkml=False,
             valid_percent_min=1.,
             vmin=0.,
             vmax=25.4,
             vmin_pal=0.,
             vmax_pal=50 * 0.514):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sarwind = SAFEOCNNCFile(infile, product='WIND')
    mission = sarwind.read_global_attribute('missionName')
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    elif mission == 'S1B':
        sensor_name = 'Sentinel-1B'
        sensor_platform = 'Sentinel-1B'
        source_provider = 'ESA'
    else:
        raise Exception('S1A/S1B missions expected.')
    start_time = sarwind.get_start_time()
    stop_time = sarwind.get_end_time()
    heading = sarwind.read_values('owiHeading')
    if np.sin((90 - heading[0, 0]) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    sensor_mode = safe_name.split('_')[1]
    if sensor_mode not in ['S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'IW', 'EW']:
        raise Exception('S[1-6]/IW/EW modes expected.')
    sensor_swath = os.path.basename(infile).split('-')[1].upper()
    sensor_polarisation = sarwind.read_global_attribute('polarisation')
    datagroup = safe_name.replace('.SAFE', '')
    pid = datagroup.split('_')[-1]
    dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    windspeed = sarwind.read_values('owiWindSpeed')
    if windspeed.shape == (1, 1):
        raise Exception('owiRaSize and owiAzSize equals 1 !')
    winddirection = sarwind.read_values('owiWindDirection')
    landflag = sarwind.read_values('owiLandFlag')
    inversionquality = sarwind.read_values('owiInversionQuality')
    windquality = sarwind.read_values('owiWindQuality')
    #pbright = sarwind.read_values('owiPBright')
    lon = sarwind.read_values('lon')
    lat = sarwind.read_values('lat')
    if np.ma.is_masked(lon) or np.ma.is_masked(lat):
        raise Exception('Some lon and/or lat is masked.')
    if np.all(lon == 0) or np.all(lat == 0):
        raise Exception('All lon and/or lat set to 0.')
    ngcps = np.ceil(np.array(lon.shape) / 10.).astype('int') + 1
    pix = np.linspace(0, lon.shape[1] - 1,
                      num=ngcps[1]).round().astype('int32')
    lin = np.linspace(0, lon.shape[0] - 1,
                      num=ngcps[0]).round().astype('int32')
    pix2d, lin2d = np.meshgrid(pix, lin)
    gcplon = lon[lin2d, pix2d]
    gcplat = lat[lin2d, pix2d]
    gcppix = pix2d + 0.5
    gcplin = lin2d + 0.5
    gcphei = np.zeros(ngcps)
    ## Make sure lon are continuous (no jump because of IDL crossing)
    ## (if IDL crossing, by convention we make lon to be around 180deg)
    # if gcplon.min() < -135 and gcplon.max() > 135:
    #     gcplon[np.where(gcplon < 0)] += 360.
    gcplonmid = gcplon[ngcps[0] / 2, ngcps[1] / 2]
    gcplon = np.mod(gcplon - (gcplonmid - 180.), 360.) + gcplonmid - 180.
    gcplonmin = gcplon.min()
    gcplon = gcplon - np.floor((gcplonmin + 180.) / 360.) * 360.

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_wind'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['wind speed', 'wind direction']
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    #mask = landflag != 0
    mask = (landflag != 0) | \
        ((windspeed == 0) & (winddirection == 180)) | \
        ((windspeed == 0) & (windquality == 3)) | \
        ((windspeed == 0) & (inversionquality == 2))
    mask = np.ma.getdata(mask)  # we don't want to sum on a masked mask
    valid_percent = np.sum(~mask) / float(mask.size) * 100
    if valid_percent <= valid_percent_min:
        raise Exception(
            'Not enough valid data: {:0.3f}%'.format(valid_percent))
    # if np.all(mask):
    #     raise Exception('Data is fully masked !')
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(windspeed, vmin, vmax, out=windspeed)
    array = np.round((windspeed - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('noaa_wind',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'wind speed',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    winddirection = np.mod(90. - winddirection + 180., 360.)
    array = np.round(winddirection / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'wind direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    # Write projected png/kml
    if pngkml == True:
        print 'Write projected png/kml'
        stfmt.write_pngkml_proj(tifffile)
def arome_model_wind(infile, outdir,
                     vmin=0., vmax=25.4, vmin_pal=0., vmax_pal=50*0.514):
    """
    """
    # Read/Process data
    windfield = Dataset(infile)
    time = windfield.variables['time'][:]
    time_units = windfield.variables['time'].units
    lon = windfield.variables['longitude'][:]
    lat = windfield.variables['latitude'][:]
    #lon0 = lon[0]
    #lat0 = lat[0]
    #dlon = lon[1]-lon[0]
    #dlat = lat[1]-lat[0]
    lon0 = -8.
    lat0 = 53.
    dlon = 0.025
    dlat = -0.025
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    for itime in range(time.size):
        dtime = num2date(time[itime], time_units)
        u10 = windfield.variables['u10'][itime, :, :]
        v10 = windfield.variables['v10'][itime, :, :]
        metadata = {}
        metadata['product_name'] = 'AROME_model_wind'
        metadata['name'] = 'AROME_'+dtime.strftime('%Y%m%dT%HZ')
        metadata['datetime'] = stfmt.format_time(dtime)
        metadata['time_range'] = ['-90m', '+90m']
        metadata['source_URI'] = infile
        metadata['source_provider'] = 'METEO FRANCE'
        metadata['processing_center'] = ''
        metadata['conversion_software'] = 'Syntool'
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
        metadata['parameter'] = ['wind speed', 'wind direction']
        metadata['type'] = 'model'
        metadata['model_longitude_resolution'] = abs(dlon)
        metadata['model_latitude_resolution'] = abs(dlat)
        #metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
        geolocation = {}
        geolocation['projection'] = stfmt.format_gdalprojection()
        geolocation['geotransform'] = [lon[0]-dlon/2., dlon, 0,
                                       lat[0]-dlat/2., 0, dlat]
        band = []
        indndv = np.where(np.ma.getmaskarray(u10) | np.ma.getmaskarray(v10))
        windspeed = np.sqrt(u10**2 + v10**2)
        winddirection = np.mod(np.arctan2(v10, u10)*180./np.pi+360., 360.)
        offset, scale = vmin, (vmax-vmin)/254.
        np.clip(windspeed, vmin, vmax, out=windspeed)
        array = np.round((windspeed - offset) / scale).astype('uint8')
        array[indndv] = 255
        colortable = stfmt.format_colortable('noaa_wind', vmax=vmax, vmax_pal=vmax_pal,
                                             vmin=vmin, vmin_pal=vmin_pal)
        band.append({'array':array, 'scale':scale, 'offset':offset,
                     'description':'wind speed', 'unittype':'m/s',
                     'nodatavalue':255, 'parameter_range':[vmin, vmax],
                     'colortable':colortable})
        array = np.round(winddirection/360.*254.).astype('uint8')
        array[indndv] = 255
        band.append({'array':array, 'scale':360./254., 'offset':0.,
                     'description':'wind direction', 'unittype':'deg',
                     'nodatavalue':255, 'parameter_range':[0, 360.]})
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sar_doppler_exp(infile,
                    outdir,
                    pngkml=False,
                    vmin=-2.5,
                    vmax=2.5,
                    vmin_pal=-2.5,
                    vmax_pal=2.5):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    sardop = Dataset(infile)
    mission = sardop.MISSIONNAME
    if mission == 'S1A':
        sensor_name = 'Sentinel-1A'
        sensor_platform = 'Sentinel-1A'
        source_provider = 'ESA'
    else:
        raise Exception('S1A mission expected.')
    doptime = sardop.variables['rvlZeroDopplerTime'][:]
    start_time = datetime.strptime(''.join(list(doptime[0, 0, :])),
                                   '%Y-%m-%dT%H:%M:%S.%f')
    stop_time = datetime.strptime(''.join(list(doptime[-1, -1, :])),
                                  '%Y-%m-%dT%H:%M:%S.%f')
    heading = sardop.variables['rvlHeading'][:]
    if np.sin((90 - heading.mean()) * np.pi / 180) > 0:
        sensor_pass = '******'
    else:
        sensor_pass = '******'
    # safe_name = os.path.basename(os.path.dirname(os.path.dirname(infile)))
    # sensor_mode = safe_name.split('_')[1]
    # if sensor_mode not in ['S1', 'S2', 'S3', 'S4', 'S5', 'S6', 'IW', 'EW']:
    #     raise Exception('S[1-6]/IW/EW modes expected.')
    # sensor_swath = os.path.basename(infile).split('-')[1].upper()
    # sensor_polarisation = sardop.read_global_attribute('polarisation')
    # datagroup = safe_name.replace('.SAFE', '')
    # pid = datagroup.split('_')[-1]
    # dataname = os.path.splitext(os.path.basename(infile))[0] + '-' + pid
    dataname = os.path.splitext(os.path.basename(infile))[0]
    sensor_mode = dataname.split('_')[1]
    sensor_swath = sensor_mode
    sensor_polarisation = sardop.POLARISATION
    radvel = sardop.variables['rvlRadVel'][:]
    sweepangle = sardop.variables['rvlSweepAngle'][:]
    radvel = descalloping(radvel, sweepangle)
    radvel = smooth(radvel)
    inc = sardop.variables['rvlIncidenceAngle'][:]
    radvel /= np.sin(np.deg2rad(inc))
    #landflag = sardop.variables['rvlLandFlag'][:]
    lon = sardop.variables['rvlLon'][:]
    lat = sardop.variables['rvlLat'][:]
    if sensor_pass == 'Ascending':
        radvel *= -1
    ngcps = np.ceil(np.array(lon.shape) / 10.) + 1
    pix = np.linspace(0, lon.shape[1] - 1,
                      num=ngcps[1]).round().astype('int32')
    lin = np.linspace(0, lon.shape[0] - 1,
                      num=ngcps[0]).round().astype('int32')
    pix2d, lin2d = np.meshgrid(pix, lin)
    gcplon = lon[lin2d, pix2d]
    gcplat = lat[lin2d, pix2d]
    gcppix = pix2d + 0.5
    gcplin = lin2d + 0.5
    gcphei = np.zeros(ngcps)
    if gcplon.min() < -135 and gcplon.max() > 135:
        gcplon[np.where(gcplon < 0)] += 360.

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_doppler_exp'
    metadata['name'] = dataname
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = source_provider
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'radial horizontal velocities'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = sensor_name
    metadata['sensor_platform'] = sensor_platform
    metadata['sensor_mode'] = sensor_mode
    metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = sensor_polarisation
    metadata['sensor_pass'] = sensor_pass
    # metadata['datagroup'] = datagroup
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    band = []
    #indndv = np.where(landflag != 0)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(radvel, vmin, vmax, out=radvel)
    array = np.round((radvel - offset) / scale).astype('uint8')
    #array[indndv] = 255
    colortable = stfmt.format_colortable('doppler',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'radial horizontal velocities',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    # Write projected png/kml
    if pngkml == True:
        print 'Write projected png/kml'
        stfmt.write_pngkml_proj(tifffile)
def sentinel3_slstr_bt(infile,
                       outdir,
                       vmin=None,
                       vmax=None,
                       min_percentile=2.0,
                       channels='ir',
                       file_range=None,
                       write_netcdf=False,
                       log_path=None,
                       lat_crop=80.0):

    t0 = datetime.utcnow()
    # Process nadir data
    view = 'n'
    fname = 'BT'
    sltype = 'i'
    if type(channels) is list or type(channels) is tuple:
        bandnames = channels
        product_name = 'Sentinel-3_SLSTR'
    elif 'ir' == channels:
        bandnames = ('S8', )
        product_name = 'Sentinel-3_SLSTR_IR'
    else:
        raise Exception('channels must be either "ir", or a tuple of band')

    # Read coordinates and compute gcps
    (syntool_stats, metadata, geolocation, tie_lon, tie_lat, slice_lat0,
     slice_lat1, __, __, ngcps,
     month) = slstr.read_geometry(infile, bandnames, fname, sltype, view,
                                  product_name, vmin, vmax, log_path)

    # Compute masks
    logger.info('Build masks')
    t_start = datetime.utcnow()
    quality_flags = slstr.read_mask(infile, sltype, view, slice_lat0,
                                    slice_lat1)
    raw_cloud_flags = slstr.read_cloud_mask(infile, sltype, view, slice_lat0,
                                            slice_lat1)
    contrast_mask, data_mask = build_mask_ir(channels, quality_flags,
                                             raw_cloud_flags, tie_lat,
                                             lat_crop)
    t_stop = datetime.utcnow()
    syntool_stats['mask_computation'] = (t_stop - t_start).total_seconds()

    # Read band to compute histograms
    logger.info('Construct bands')
    t_start = datetime.utcnow()

    bands = []
    # Initialize min and max values
    if vmin is None:
        vmin = [None] * len(bandnames)
    if vmax is None:
        vmax = [None] * len(bandnames)
    _vmin = list(vmin)
    _vmax = list(vmax)
    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        fieldname = slstr.get_field_name(fname, bandname, sltype, view)
        band = slstr.read_band(infile, bandname, fieldname, slice_lat0,
                               slice_lat1)

        logger.info('\tSet contrast')
        valid_ratio_lower_threshold = 0.001  # 0.1%

        # Select valid data to compute histograms
        valid_data_mask = (band.mask | contrast_mask)
        valid_data, extra_data_mask, updated_min = get_valid_data_ir(
            tie_lon, tie_lat, file_range, band, valid_data_mask, bandname,
            month)

        if extra_data_mask is not None:
            data_mask = (data_mask | extra_data_mask)
        if updated_min is not None:
            _vmin = [
                updated_min,
            ]
            _min = updated_min

        # No need to produce an output if all data values are masked
        if numpy.all(data_mask):
            logger.warn('No valid value found for band {}'.format(bandname))
            sys.exit(0)

        # Retrieve minimum and maximum values from default or valid_data
        # histograms
        valid_ratio = float(valid_data.size) / float(band.data.size)
        syntool_stats[bandname]['valid_ratio'] = valid_ratio
        if valid_ratio_lower_threshold >= valid_ratio:
            _min, _max = slstr.apply_default_min_max(default_minmax, bandname,
                                                     _vmin[band_index],
                                                     _vmax[band_index],
                                                     syntool_stats)

        else:
            _min, _max = slstr.fromband_min_max(valid_data,
                                                bandname,
                                                _vmin[band_index],
                                                _vmax[band_index],
                                                syntool_stats,
                                                min_percentile=min_percentile,
                                                max_percentile=99.99)

        _vmin[band_index] = _min
        _vmax[band_index] = _max
        logger.info('\tContrast : vmin={} / vmax={}'.format(
            _vmin[band_index], _vmax[band_index]))
    min_values = [_vmin[band_index] for band_index in range(len(bandnames))]
    max_values = [_vmax[band_index] for band_index in range(len(bandnames))]

    t_stop = datetime.utcnow()
    syntool_stats['minmax_computation'] = (t_stop - t_start).total_seconds()
    syntool_stats['final_min'] = float(numpy.min(min_values))
    syntool_stats['final_max'] = float(numpy.max(max_values))

    _min = numpy.min(min_values)
    _max = numpy.max(max_values)
    scale = (_max - _min) / 254.
    offset = _min
    # Construct bands
    for band_index in range(len(bandnames)):
        bandname = bandnames[band_index]
        fieldname = slstr.get_field_name(fname, bandname, sltype, view)
        band = slstr.read_band(infile, bandname, fieldname, slice_lat0,
                               slice_lat1)

        bnd = band.data

        logger.info('\tBytescaling')
        byte = bytescale(bnd, cmin=_min, cmax=_max, low=0, high=254)
        description = '{} {} (log)'.format(bandname, fname)
        if band.mask is not numpy.ma.nomask:
            byte[band.mask] = 255

        # mask night data for rgb and invalid data for ir (cloud, land,
        # range value). Also mask data for extreme latitudes
        byte[data_mask] = 255

        band_range = [_vmin[band_index], _vmax[band_index]]
        description = '{} {}'.format(bandname, fname)  # no log for IR
        colortable = stfmt.format_colortable('cerbere_medspiration',
                                             vmax=_max,
                                             vmax_pal=_max,
                                             vmin=_min,
                                             vmin_pal=_min)
        bands.append({
            'array': byte,
            'plot': band.data,
            'scale': scale,
            'offset': offset,
            'description': description,
            'unittype': '',
            'nodatavalue': 255,
            'parameter_range': band_range,
            'colortable': colortable
        })

        if write_netcdf:
            bands[-1]['name'] = bandname
            bands[-1]['long_name'] = bandname
            bands[-1]['unittype'] = '1'

    logger.info('Make sure nodata are at the same locations in all bands')
    mask = numpy.any([_band['array'] == 255 for _band in bands], axis=0)
    for band in bands:
        band['array'][mask] = 255

    t_stop = datetime.utcnow()
    syntool_stats['bytescaling'] = (t_stop - t_start).total_seconds()

    if write_netcdf:
        metadata['spatial_resolution'] = 1000
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           bands,
                           'swath',
                           ngcps=ngcps)
    else:
        logger.info('Write geotiff')
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, bands)

    logger.info(datetime.utcnow() - t0)
    syntool_stats['total_time'] = (datetime.utcnow() - t0).total_seconds()
    if log_path is not None:
        import json
        full_path = os.path.normpath(infile)
        file_path = os.path.basename(full_path)
        file_name, _ = os.path.splitext(file_path)
        stats_path = os.path.join(log_path, '{}.json'.format(file_name))
        with open(stats_path, 'w') as f:
            json.dump(syntool_stats, f)
def ghrsst_oi_mw_sst(infile,
                     outdir,
                     vmin=271.05,
                     vmax=309.15,
                     vmin_pal=273.,
                     vmax_pal=305.):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ghrsst = GHRSSTNCFile(infile)
    sst = ghrsst.read_values('analysed_sst')[::-1, :]
    lon = ghrsst.read_values('lon')[:]
    indsorted = np.argsort(lon)
    sst = sst[:, indsorted]
    mask = ghrsst.read_values('mask')[::-1, :]
    mask = mask[:, indsorted]
    #sea_ice_fraction = ghrsst.read_values('sea_ice_fraction')[::-1, :]
    # lon = ghrsst.read_values('lon')
    # dlon = lon[1] - lon[0]
    # lon0 = lon[0] - dlon / 2
    # lat = ghrsst.read_values('lat')[::-1]
    # dlat = lat[1] - lat[0]
    # lat0 = lat[0] - dlat / 2
    lon0 = ghrsst.read_global_attribute('westernmost_longitude')
    dlonstring = ghrsst.read_global_attribute('geospatial_lon_resolution')
    dlon = float(dlonstring.strip(" deg"))
    lat0 = ghrsst.read_global_attribute('northernmost_latitude')
    dlatstring = ghrsst.read_global_attribute('geospatial_lat_resolution')
    dlat = -float(dlatstring.strip(" deg"))
    dtime = ghrsst.read_values('time')[0]
    dtime_units = ghrsst.read_field('time').units
    dtime = num2date(dtime, dtime_units)
    ghrsst_id = ghrsst.read_global_attribute('id')
    if 'glob' in ghrsst_id.lower():
        product_name = 'REMSS_MWOI_SST'
    else:
        raise Exception('Unknown REMSS ID : {}'.format(ghrsst_id))

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = product_name
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = ['-12h', '+12h']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2, dlon, 0, lat0 - dlat / 2, 0, dlat
    ]
    band = []
    #indndv = np.where((sst.mask == True) | (sea_ice_fraction > 0))
    indndv = np.where((sst.mask == True) | (mask != 65))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sst, vmin, vmax, out=sst)
    array = np.round((sst - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sar_doppler(infile, outdir):
    """
    """
    # tmp
    #infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110518_210602_000002143102_00330_48189_1274/SAR_doppler.nc'
    # infile = '/local/home/fab/data/sar/ASA/agulhas/ASA_WSM_1PNPDE20110824_211403_000002143106_00014_49597_2093/SAR_doppler.nc'
    # outdir = '/local/home/data/syntool_inputs'
    # /tmp
    # Read/Process data
    print 'Read/Process data'
    sardop = NCFile(infile)
    product_ref = sardop.read_global_attribute('SOURCE_PRODUCT_REF')
    start_time = sardop.read_global_attribute('SOURCE_START_DATE')
    start_time = datetime.strptime(start_time, '%Y%m%d%H%M%S.%f')
    duration = sardop.read_global_attribute('SOURCE_ACQ_DURATION')
    stop_time = start_time + timedelta(seconds=duration)
    polarisation = sardop.read_global_attribute('SOURCE_POLARIZATION')
    lon = sardop.read_values('longitude')[::-1, :]
    lat = sardop.read_values('latitude')[::-1, :]
    #dopano = sardop.read_values('dopanomaly')[::-1, :]
    radvel = sardop.read_values('radial_vel')[::-1, :]
    validity = sardop.read_values('validity')[::-1, :]
    track_angle = sardop.read_global_attribute('SOURCE_TRACK_ANGLE')
    if track_angle < 0:
        radvel *= -1
    shp = lon.shape
    nlines = np.ceil(shp[0] / 4.) + 1
    lines = np.round(np.linspace(0, shp[0] - 1, num=nlines)).astype('int32')
    npixels = np.ceil(shp[1] / 4) + 1
    pixels = np.round(np.linspace(0, shp[1] - 1, num=npixels)).astype('int32')
    gcplin = np.tile(lines.reshape(nlines, 1), (1, npixels))
    gcppix = np.tile(pixels.reshape(1, npixels), (nlines, 1))
    gcplon = lon[gcplin, gcppix]
    gcplat = lat[gcplin, gcppix]
    gcphei = np.zeros((nlines, npixels))
    gcppix = gcppix + 0.5
    gcplin = gcplin + 0.5
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SAR_doppler'
    metadata['name'] = product_ref
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ESA'
    metadata['processing_center'] = 'CLS'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'radial horizontal velocities'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'SAR'
    metadata['sensor_name'] = 'ASAR'
    metadata['sensor_platform'] = 'ENVISAT'
    metadata['sensor_mode'] = 'WSM'
    #metadata['sensor_swath'] = sensor_swath
    metadata['sensor_polarisation'] = polarisation
    #metadata['sensor_pass'] = sensor_pass
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection(geogcs='WGS84')
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcphei, gcppix,
                                                gcplin)
    # band = []
    # scale = (vmax-vmin)/254.
    # offset = vmin
    # indzero = np.where(validity == 0)
    # array = np.clip(np.round((radvel-offset)/scale), 0, 254).astype('uint8')
    # array[indzero] = 255
    # band.append({'array':array, 'scale':scale, 'offset':offset,
    #              'description':'radial horizontal velocities', 'unittype':'m/s',
    #              'nodatavalue':255, 'parameter_range':[vmin, vmax]})
    band = []
    cmap = doppler_colormap()
    norm = Normalize(vmin=-2.5, vmax=2.5)
    rgb = cmap(norm(radvel))
    indnodata = np.where(validity == 0)
    for ich in range(3):
        channel = np.round(rgb[:, :, ich] * 255).astype('uint8')
        channel[indnodata] = 0
        band.append({'array': channel, 'nodatavalue': 0})
    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #22
0
def eodyn_current(infile,
                  outdir,
                  vmin=0.,
                  vmax=5.08,
                  vmin_pal=0.,
                  vmax_pal=2.,
                  write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
#        l4id = 'e-Odyn' #ncfile.read_global_attribute('id')
    elif re.match(r'^e-Odyn_.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'e-Odyn'
    else:
        raise Exception('Unknown GlobCurrent L4 file.')
    # /TMP
    ucur = ncfile.read_values(L4_MAPS[l4id]['uname'])[::, ::-1, 0]
    ucur = np.transpose(ucur)
    vcur = ncfile.read_values(L4_MAPS[l4id]['vname'])[::, ::-1, 0]
    vcur = np.transpose(vcur)
    masku = [ucur == -9999]
    maskv = [vcur == -9999]
    if l4id not in ['CourantGeostr']:
        lon = ncfile.read_values('lon')[0:2].astype('float64')
        lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
        for i in range(2):  # avoid rounding errors
            lon[i] = np.round(lon[i] * 10000) / 10000
            lat[i] = np.round(lat[i] * 10000) / 10000
    else:
        lon = ncfile.read_values('lon')[:]
        shift = -np.where(lon < 0)[0][0]
        ucur = np.roll(ucur, shift, axis=1)
        vcur = np.roll(vcur, shift, axis=1)
        lon = lon[shift:shift + 2]
        lat = ncfile.read_values('lat')[-1:-3:-1]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    #dtime_units = ncfile.read_field('time').units
    #dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    timefmt = '%Y-%m-%dT%H:%M:%S.%fZ'
    start_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_start'), timefmt)
    stop_time = datetime.strptime(
        ncfile.read_global_attribute('time_coverage_end'), timefmt)

    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    #metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'e-Odyn'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    mask = ucur.mask | vcur.mask
    print(mask)
    curvel = np.sqrt(ucur.data**2 + vcur.data**2)
    curdir = np.mod(
        np.arctan2(vcur.data, ucur.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    print(array)
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    array[masku] = 255
    array[maskv] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })
    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        # u/v -> bands
        band = []
        mask = ucur.mask | vcur.mask
        vmin = -vmax
        offset, scale = vmin, (vmax - vmin) / 254.
        u = np.clip(ucur.data, vmin, vmax)
        array = np.round((u - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current u',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        v = np.clip(vcur.data, vmin, vmax)
        array = np.round((v - offset) / scale).astype('uint8')
        array[mask] = 255
        band.append({
            'array': array,
            'scale': scale,
            'offset': offset,
            'description': 'current v',
            'unittype': 'm/s',
            'nodatavalue': 255,
            'parameter_range': [vmin, vmax]
        })
        # Write
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           dgcpy=1.,
                           dgcpx=1.)
Пример #23
0
def sea_level_gridded(infile,
                      outdir,
                      vmin=-1.,
                      vmax=1.0,
                      vmin_pal=-1.,
                      vmax_pal=1.,
                      write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^MSL_Map_MERGED_Global_IB_RWT_NoGIA.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'Mean_Sea_Level'
        # vmin = -10; vmax = 10; vmin_pal = -10.; vmax_pal = 10
    elif (re.match(r'^dt_global_allsat_madt_h.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Surface_height'
        # vmin = -2; vmax = 2; vmin_pal = -2; vmax_pal = 2
    elif (re.match(r'^dt_global_allsat_msla_h.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Sea_Level_Anomaly'
        # vmin = -0.2; vmax = 0.2; vmin_pal = -0.2; vmax_pal = 0.2
    elif re.match(r'^mdt.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Dynamic_Topo'
        # vmin = -1.5; vmax = 1.5; vmin_pal = -1.5; vmax_pal = 1.5
    elif re.match(r'^mss.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Mean_Sea_Surface'
        # vmin = -80.; vmax = 80.; vmin_pal = -80.; vmax_pal = 80.
    elif (re.match(r'^nrt_merged_mswh.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Sea_Wave_Height'
        # vmin = 0.; vmax = 6.0; vmin_pal = 0.; vmax_pal = 6.0
    elif (re.match(r'^nrt_merged_mwind.*\.nc', os.path.basename(infile))
          is not None):
        l4id = 'Wind'
        # vmin = 0.; vmax = 20.0; vmin_pal = 0.; vmax_pal = 20.0
    elif re.match(r'^Tide.*\.nc', os.path.basename(infile)) is not None:
        l4id = 'Tide'
        # vmin = -1.5; vmax = 1.5; vmin_pal = -1.5; vmax_pal = 1.50
    else:
        raise Exception('Unknown file.')
    # /TMP
    if l4id in [
            'Mean_Sea_Level',
    ]:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[::-1, :]
        lon = ncfile.read_values('longitude')[:].astype('float64')
        lat = ncfile.read_values('latitude')[-1:-3:-1].astype('float64')
    elif l4id in ['Mean_Sea_Surface', 'Sea_Wave_Height', 'Wind']:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[:, ::-1]
        h = np.transpose(h)
        lon = ncfile.read_values('NbLongitudes')[:].astype('float64')
        lat = ncfile.read_values('NbLatitudes')[-1:-3:-1].astype('float64')
    else:
        h = ncfile.read_values(L4_MAPS[l4id]['hname'])[0, ::-1, :]
        lon = ncfile.read_values('lon')[:].astype('float64')
        lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    h = h[:, indsorted]
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    if l4id in ['Mean_Sea_Level', 'Mean_Dynamic_Topo', 'Mean_Sea_Surface']:
        dtime = datetime(2014, 12, 1)
    elif l4id in ['Sea_Wave_Height', 'Wind']:
        dtime = datetime(int(infile[-20:-16]), int(infile[-16:-14]),
                         int(infile[-14:-12]))
    else:
        dtime_units = ncfile.read_field('time').units
        dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # rundtime = ncfile.read_global_attribute('date_modified')
    # rundtime = datetime.strptime(rundtime, '%Y%m%dT%H%M%SZ')
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    if l4id in ['Mean_Sea_Surface', 'Sea_Wave_Height', 'Wind']:
        geolocation['geotransform'] = [
            lon0 - dlon, dlon, 0, lat0 - dlat, 0, dlat
        ]
    elif l4id in [
            'Tide',
    ]:
        h = h[:-1, ::]
        geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    else:
        geolocation['geotransform'] = [
            lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
        ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(h, vmin, vmax, out=h)
    array = np.round((h - offset) / scale).astype('uint8')
    array[h.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': L4_MAPS[l4id]['hname'],
        'unittype': 'm',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def ghrsst_seviri(infile,
                  outdir,
                  vmin=271.05,
                  vmax=309.15,
                  vmin_pal=273.,
                  vmax_pal=305.,
                  write_netcdf=False):
    """
    """
    # Read/Process data
    print 'Read/Process data'
    seviri = GHRSSTNCFile(infile)
    sst = seviri.read_values('sea_surface_temperature')[::-1, :]
    mask = seviri.read_values('quality_level')[::-1, :]
    #sea_ice_fraction = seviri.read_values('sea_ice_fraction')[::-1, :]
    # lon = seviri.read_values('lon')
    # dlon = lon[1] - lon[0]
    # lon0 = lon[0] - dlon / 2
    # lat = seviri.read_values('lat')[::-1]
    # dlat = lat[1] - lat[0]
    # lat0 = lat[0] - dlat / 2
    lon0 = seviri.read_global_attribute('westernmost_longitude')
    dlon = float(seviri.read_global_attribute('geospatial_lon_resolution'))
    lat0 = seviri.read_global_attribute('northernmost_latitude')
    dlat = -float(seviri.read_global_attribute('geospatial_lat_resolution'))
    dtime = seviri.read_values('time')[0]
    dtime_units = seviri.read_field('time').units
    dtime = num2date(dtime, dtime_units)
    start_time = datetime.strptime(seviri.read_global_attribute('start_time'),
                                   '%Y%m%dT%H%M%SZ')
    stop_time = datetime.strptime(seviri.read_global_attribute('stop_time'),
                                  '%Y%m%dT%H%M%SZ')
    (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                      stop_time,
                                                      units='ms')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'SEVIRI_SST'
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'Ifremer'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['longitude_resolution'] = abs(dlon)
    metadata['latitude_resolution'] = abs(dlat)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon0, dlon, 0, lat0, 0, dlat]
    band = []
    #indndv = np.where((sst.mask == True) | (sea_ice_fraction > 0))
    indndv = np.where((sst.mask == True) | (mask <= 3))
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(sst, vmin, vmax, out=sst)
    array = np.round((sst - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax_pal,
                                         vmin=vmin,
                                         vmin_pal=vmin_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'sea_surface_temperature'
        band[0]['long_name'] = 'sea surface subskin temperature'
        band[0]['standard_name'] = 'sea_surface_subskin_temperature'
        metadata['spatial_resolution'] = min([abs(dlat), abs(dlon)]) * 111000.
        dgcps = np.round(1. / np.abs(np.array([dlat, dlon]))).astype('int')
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'grid_lonlat',
                           dgcps=dgcps)
Пример #25
0
def fsle_gridded(infile,
                 outdir,
                 vmin=-1.,
                 vmax=0.,
                 vmin_pal=-1.,
                 vmax_pal=0.,
                 write_netcdf=False):
    """
    """
    # Read/Process data
    print('Read/Process data')
    ncfile = NCFile(infile)
    if 'id' in ncfile.read_global_attributes():
        l4id = ncfile.read_global_attribute('id')
    elif (re.match(r'^dt_global_allsat_madt_fsle.*\.nc',
                   os.path.basename(infile)) is not None):
        l4id = 'FSLE'
    else:
        raise Exception('Unknown file.')
    h = ncfile.read_values(L4_MAPS[l4id]['hname'])[0, ::-1, :]
    lon = ncfile.read_values('lon')[:].astype('float64')
    lat = ncfile.read_values('lat')[-1:-3:-1].astype('float64')
    lon[lon > 180.] = lon[lon > 180.] - 360.
    indsorted = np.argsort(lon)
    lon = lon[indsorted]
    h = h[:, indsorted]

    for i in range(2):  # avoid rounding errors
        lon[i] = np.round(lon[i] * 10000) / 10000
        lat[i] = np.round(lat[i] * 10000) / 10000
    lon0, dlon, lat0, dlat = lon[0], lon[1] - lon[0], lat[0], lat[1] - lat[0]
    dlon = 0.04
    dlat = -0.04
    dtime_units = ncfile.read_field('time').units
    dtime = num2date(ncfile.read_values('time')[0], dtime_units)
    # Construct metadata/geolocation/band(s)
    print('Construct metadata/geolocation/band(s)')
    metadata = {}
    metadata['product_name'] = L4_MAPS[l4id]['productname']
    metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = L4_MAPS[l4id]['timerange']
    metadata['source_URI'] = infile
    metadata['source_provider'] = '*****@*****.**'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = [L4_MAPS[l4id]['parameter']]
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon0 - dlon / 2., dlon, 0, lat0 - dlat / 2., 0, dlat
    ]
    band = []
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(h, vmin, vmax, out=h)
    array = np.round((h - offset) / scale).astype('uint8')
    array[h.mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet_r',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'FSLE',
        'unittype': 'day',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    # Write geotiff
    print('Write geotiff')
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
def sentinel2_rgb(
        infile,
        outdir,
        # For output resolution
        overview_index=None,
        downsampling=2,
        # For manual contrast
        vmin=[None, None, None],
        vmax=[None, None, None],
        # For auto contrast
        contrast_overview_index=2,
        landmaskpath=None,
        slope_threshold=-40.,
        debug_fig_dir=None,
        atmos_correction=0,
        atmos_lut_path=None,
        vmax_factor=None,
        # For output type
        write_netcdf=False):
    """
    """
    # Identify stitching groups
    print 'Identify stitching group(s)'
    groups = safemsil1c_stitching_groups(infile)
    projs = groups.keys()
    for proj, urls in groups.iteritems():
        print '    {} : {} granule(s)'.format(proj, len(urls))
    datagroup = make_datagroup(groups)

    # Set contrast
    if None in vmin or None in vmax:
        print 'Set contrast'
        vmin, vmax = set_contrast(list(vmin),
                                  list(vmax),
                                  groups,
                                  overview_index=contrast_overview_index,
                                  landmaskpath=landmaskpath,
                                  slope_threshold=slope_threshold,
                                  debug_fig_dir=debug_fig_dir,
                                  atmos_correction=atmos_correction,
                                  atmos_lut_path=atmos_lut_path)
    print 'vmin = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmin)
    print 'vmax = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmax)
    if vmax_factor is not None:
        print 'Apply vmax_factor={}'.format(vmax_factor)
        _vmax = []
        for vmi, vma in zip(vmin, vmax):
            _vmax.append(vmi + vmax_factor * (vma - vmi))
        vmax = _vmax
        print 'new vmax = {:0.4f} / {:0.4f} / {:0.4f}'.format(*vmax)

    # Build geotiff or netcdf
    print 'Build geotiff or netcdf'
    bandnames = ['B04', 'B03', 'B02']
    for proj in projs:
        # Open stitched mapper
        print '    {} : open mapper with overview {}'.format(
            proj, overview_index)
        t0 = datetime.utcnow()
        mapper = SAFEMSIL1CStitchedFile(groups[proj],
                                        native_resolution='10m',
                                        overview_index=overview_index,
                                        tight=True)
        mapper.open()
        print '        {}'.format(datetime.utcnow() - t0)

        # Construct bands
        bands = []
        qvalue = mapper.read_global_attribute('quantification_value')
        for iband, bandname in enumerate(bandnames):
            fieldname = '{}_digital_number'.format(bandname)
            print '    {} : read {}'.format(proj, fieldname)
            t0 = datetime.utcnow()
            band = mapper.read_values(fieldname)
            print '        {}'.format(datetime.utcnow() - t0)
            if downsampling != 1:
                print '    {} : downsample by {}'.format(proj, downsampling)
                t0 = datetime.utcnow()
                shp = list(band.shape)
                shp[0] -= np.mod(shp[0], downsampling)
                shp[1] -= np.mod(shp[1], downsampling)
                sli = [slice(0, shp[0]), slice(0, shp[1])]
                rshp = (shp[0] / downsampling, downsampling,
                        shp[1] / downsampling, downsampling)
                if not np.ma.is_masked(band):
                    mask = np.ma.nomask
                else:
                    mask = band[sli].mask.reshape(rshp).\
                           sum(axis=3, dtype='uint8').\
                           sum(axis=1, dtype='uint8') > 0
                band = np.ma.MaskedArray(band[sli].data.reshape(rshp).\
                                         mean(axis=3, dtype='uint16').\
                                         mean(axis=1, dtype='uint16'),
                                         mask=mask)
                del mask
                print '        {}'.format(datetime.utcnow() - t0)
            print '    {} : bytescale in [{}, {}]'.format(
                proj, vmin[iband], vmax[iband])
            t0 = datetime.utcnow()
            vmin_dn = np.round(vmin[iband] * qvalue)
            vmax_dn = np.round(vmax[iband] * qvalue)
            byte = bytescale(band.data,
                             cmin=vmin_dn,
                             cmax=vmax_dn,
                             low=0,
                             high=254)
            if band.mask is not np.ma.nomask:
                byte[band.mask] = 255
            del band
            scale = (vmax[iband] - vmin[iband]) / 254.
            offset = vmin[iband]
            description = '{} TOA reflectance'.format(bandname)
            bands.append({
                'array': byte,
                'scale': scale,
                'offset': offset,
                'description': description,
                'unittype': '',
                'nodatavalue': 255,
                'parameter_range': [vmin[iband], vmax[iband]]
            })
            print '        {}'.format(datetime.utcnow() - t0)

        # Make sure nodata are at the same locations in all bands
        mask = np.any([band['array'] == 255 for band in bands], axis=0)
        for band in bands:
            band['array'][mask] = 255

        # Construct metadata and geolocation
        print '    {} : construct metadata and geolocation'.format(proj)
        t0 = datetime.utcnow()
        cs_code = mapper.read_global_attribute('horizontal_cs_code')
        epsg_num = cs_code.lower().lstrip('epsg:')
        dataname = '{}-{}'.format(datagroup, epsg_num)
        start_time = mapper.get_start_time()
        end_time = mapper.get_end_time()
        (dtime, time_range) = stfmt.format_time_and_range(start_time,
                                                          end_time,
                                                          units='ms')
        sensor_pass = mapper.read_global_attribute(
            'sensing_orbit_direction').lower()
        metadata = {}
        metadata['product_name'] = 'Sentinel-2_RGB'
        metadata['name'] = dataname
        metadata['datetime'] = dtime
        metadata['time_range'] = time_range
        metadata['source_URI'] = infile
        metadata['source_provider'] = 'ESA'
        metadata['processing_center'] = 'OceanDataLab'
        metadata['conversion_software'] = 'Syntool'
        metadata['conversion_version'] = '0.0.0'
        metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
        metadata['parameter'] = [
            'B04 TOA reflectance', 'B03 TOA reflectance', 'B02 TOA reflectance'
        ]
        metadata['type'] = 'remote sensing'
        metadata['sensor_type'] = 'multi-spectral imager'
        metadata['sensor_name'] = 'MSI'
        metadata['sensor_platform'] = 'Sentinel-2'
        metadata['sensor_pass'] = sensor_pass
        metadata['datagroup'] = datagroup
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(int(epsg_num))
        ulx = mapper.read_global_attribute('ulx')
        dx = mapper.read_global_attribute('xdim') * downsampling
        uly = mapper.read_global_attribute('uly')
        dy = mapper.read_global_attribute('ydim') * downsampling
        geolocation = {}
        geolocation['projection'] = srs.ExportToWkt()
        geolocation['geotransform'] = [ulx, dx, 0, uly, 0, dy]
        print '        {}'.format(datetime.utcnow() - t0)

        # Write geotiff or netcdf
        mapper.close()
        if write_netcdf == False:
            print '    {} : write geotiff'.format(proj)
            t0 = datetime.utcnow()
            tifffile = stfmt.format_tifffilename(outdir,
                                                 metadata,
                                                 create_dir=True)
            stfmt.write_geotiff(tifffile, metadata, geolocation, bands)
            print '        {}'.format(datetime.utcnow() - t0)
        elif write_netcdf == True:
            print '    {} : write geotiff'.format(proj)
            t0 = datetime.utcnow()
            ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
            bands[0]['name'] = 'B04_TOA_reflectance'
            bands[1]['name'] = 'B03_TOA_reflectance'
            bands[2]['name'] = 'B02_TOA_reflectance'
            resolution = min([abs(dy), abs(dx)])
            metadata['spatial_resolution'] = resolution
            dgcps_meter = 25000.
            dgcps = (np.round(dgcps_meter / resolution).astype('int'), ) * 2
            stfmt.write_netcdf(ncfile,
                               metadata,
                               geolocation,
                               bands,
                               'grid_proj',
                               dgcps=dgcps)
            print '        {}'.format(datetime.utcnow() - t0)
Пример #27
0
def viirs_sst(infile, outdir, vmin=None, vmax=None, contrast='relative',
              ngcps=(41, 32), denoise_kernel='boxcar', denoise_width=27,
              open_iterations=1, nprocs=1,
              pngkml=False, write_netcdf=False, file_range=None):
    """
    """
    dic = None

    # Check file containing ranges
    if file_range is not None:
        if not os.path.isfile(file_range):
            raise Exception('file_range {} not found'.format(file_range))
        # Read a txt file which contains three columns: yearday,vmin,vmax
        with open(file_range, 'r') as f:
            dic = {}
            for line in f:
                (fdoy, fmin, fmax) = line.split(',')
                dic[int(fdoy)] = (float(fmin), float(fmax))

    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48],
                   [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88],
                   [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Read/Process data
    print 'Read/Process data'
    dataset = Dataset(infile)
    start_time = datetime.strptime(dataset.start_time, '%Y%m%dT%H%M%SZ')
    print start_time.day
    print start_time.month
    stop_time = datetime.strptime(dataset.stop_time, '%Y%m%dT%H%M%SZ')
    lon = dataset.variables['lon'][:, :]
    lat = dataset.variables['lat'][:, :]
    sst = np.ma.array(dataset.variables['sea_surface_temperature'][0, :, :])
    _bt11= dataset.variables['brightness_temperature_11um'][0, :, :]
    bt11 = np.ma.array(_bt11)
    quality_level = np.ma.array(dataset.variables['quality_level'][0, :, :])
    '''
    if file_shape is not None:
        with open(file_shape, 'r') as fshape:
            shape = shapely.wkt.load(fshape)
        box = shape.bounds
        index_in = np.where((lon >= box[0]) & (lat >= box[1])
                            & (lon <= box[2]) & (lat <= box[3]))
        index_out = np.where((lon < box[0]) | (lat < box[1])
                             | (lon > box[2]) | (lat > box[3]))
        sst[index_out] = np.nan
        print(np.shape(index_in))
        sys.exit(1)
        for i, j in zip(index_in[0], index_in[1]):
            p = Point(lon[i, j], lat[i, j])
            if p.within(shape) is False:
                sst[i, j] = np.nan
    '''
    if listbox is not None:
        mask_box = np.zeros(np.shape(sst))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                             & (lon <= listbox[i][2]) & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = ma.getmaskarray(sst) | ma.getmaskarray(bt11) | \
               (quality_level.data < 4) | (mask_box == 0)
    else:
        mask = ma.getmaskarray(sst) | ma.getmaskarray(bt11) | \
               (quality_level.data < 4)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 16
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1],
                      gcplon[mid, 1:], gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 750.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon, gcplat, gcppixel,
                                             gcpline, rspxsize, rspysize,
                                             1, lon, lat, nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    sst.data[mask] = np.nan
    bt11.data[mask] = np.nan
    val = np.dstack((sst.data, bt11.data))
    rspval = resample.resample_bowtie_linear(pix, lin, val, scansize,
                                             rspxsize, rspysize, show=False)
    rspsst = rspval[:, :, 0]
    rspbt11 = rspval[:, :, 1]
    rspmask = ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Denoise sst and open mask
    rspsst.mask = rspmask
    rspbt11.mask = rspmask
    finalsst = denoise_sst(rspsst, rspbt11, kernel=denoise_kernel,
                           width=denoise_width, show=False)
    finalmask = ~binary_opening(~rspmask, structure=np.ones((3, 3)),
                                iterations=open_iterations)
    finalsst.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalsst.compressed(), 0.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(start_time.timetuple().tm_yday)
        #    vmin = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. - 9.
        #    #par = [277.94999694824219, 42, 2.5500030517578125, -219]
        #    par = [278.09999084472656, 0.62831853071795862,
        #           2.4000091552734375, 0.1570796326794896]
        #    vmin = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific txt file is provided for the range
        elif dic is not None:
            dayofyear = float(start_time.timetuple().tm_yday)
            extrema = dic.get(dayofyear, dic[min(dic.keys(),
                           key=lambda k:abs(k - dayofyear))])
            vmin = extrema[0]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalsst.compressed(), 99.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(start_time.timetuple().tm_yday)
        #    vmax = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. + 4.
        #    #par = [300.59999084472656, 21, 2.8499908447265625, -191]
        #    par = [300.59999084472656, 0.29919930034188508,
        #           2.8499908447265625, 0.14959965017094254]
        #    vmax = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific text file is provided for the range
        elif dic is not None:
            dayofyear = float(start_time.timetuple().tm_yday)
            extrema = dic.get(dayofyear, dic[min(dic.keys(),
                           key=lambda k:abs(k - dayofyear))])
            vmax = extrema[1]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))

    # Flip (geotiff in "swath sense")
    finalsst = finalsst[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(start_time, stop_time,
                                                      units='ms')
    metadata['product_name'] = 'SST_VIIRS_denoised'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    else:
        metadata['name'] = '{}_{}'.format(os.path.splitext(os.path.basename(infile))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'NOAA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'VIIRS'
    metadata['sensor_platform'] = 'Suomi-NPP'
    #metadata['sensor_pass'] =
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalsst) == True)
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(finalsst.data, vmin, vmax, out=finalsst.data)
    array = np.round((finalsst.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax, vmax_pal=vmax,
                                         vmin=vmin, vmin_pal=vmin)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'sea surface temperature', 'unittype':'K',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})

    if write_netcdf == False:
        # Write geotiff
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'denoised_sst'
        band[0]['long_name'] = 'denoised sea surface temperature'
        band[0]['standard_name'] = 'sea_surface_temperature'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 749.905437495 749.905892002 749.905827652
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 737.638084996 741.195663083 739.157662785
        metadata['spatial_resolution'] = 750.
        stfmt.write_netcdf(ncfile, metadata, geolocation, band, 'swath',
                           ngcps=gcplon.shape)
Пример #28
0
def hf_radar(url,
             outdir,
             date=None,
             vmin=0.,
             vmax=5.08,
             vmin_pal=0.,
             vmax_pal=2.):
    """
    """
    if date is None:
        raise Exception('A date has to be specified for HF radar !')
    # Read/Process data
    print 'Read/Process data'
    dataset = netCDF4.Dataset(url + '?lat,lon,time,u,v', 'r')
    time = dataset.variables['time']
    time_search = np.round(netCDF4.date2num(date, time.units))
    # time_index = search_in_time(time, time_search)
    # if time_index is None:
    #     raise Exception('Date not found !')
    time_index = np.where((time[:] == time_search))[0]
    if time_index.size == 0:
        raise Exception('Date not found !')
    time_index = time_index[0]
    dtime = netCDF4.num2date(time[time_index], time.units)
    lon = dataset.variables['lon'][:]
    dlon = (lon[-1] - lon[0]) / (lon.size - 1)
    lat = dataset.variables['lat'][::-1]
    dlat = (lat[-1] - lat[0]) / (lat.size - 1)
    uvel = dataset.variables['u'][time_index, ::-1, :]
    if not isinstance(uvel, numpy.ma.MaskedArray):
        uvel = numpy.ma.masked_invalid(uvel)
    vvel = dataset.variables['v'][time_index, ::-1, :]
    if not isinstance(vvel, numpy.ma.MaskedArray):
        vvel = numpy.ma.masked_invalid(vvel)
    name = '_'.join(url.split('/')[-4:-1]) + dtime.strftime('_%Y%m%dT%H%M%S')

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'HF_radar'
    metadata['name'] = name
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = ['-30m', '+30m']
    metadata['source_URI'] = url
    metadata['source_provider'] = 'Scripps Institution of Oceanography'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = ['current velocity', 'current direction']
    # metadata['type'] = 'model'
    # metadata['model_longitude_resolution'] = abs(dlon)
    # metadata['model_latitude_resolution'] = abs(dlat)
    # metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [
        lon[0] - dlon / 2., dlon, 0, lat[0] - dlat / 2., 0, dlat
    ]
    band = []
    mask = uvel.mask | vvel.mask
    curvel = np.sqrt(uvel.data**2 + vvel.data**2)
    curdir = np.mod(
        np.arctan2(vvel.data, uvel.data) * 180. / np.pi + 360., 360.)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(curvel, vmin, vmax, out=curvel)
    array = np.round((curvel - offset) / scale).astype('uint8')
    array[mask] = 255
    colortable = stfmt.format_colortable('matplotlib_jet',
                                         vmin=vmin,
                                         vmax=vmax,
                                         vmin_pal=vmin_pal,
                                         vmax_pal=vmax_pal)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'current velocity',
        'unittype': 'm/s',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })
    array = np.round(curdir / 360. * 254.).astype('uint8')
    array[mask] = 255
    band.append({
        'array': array,
        'scale': 360. / 254.,
        'offset': 0.,
        'description': 'current direction',
        'unittype': 'deg',
        'nodatavalue': 255,
        'parameter_range': [0, 360.]
    })

    # Write geotiff
    print 'Write geotiff'
    tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
    stfmt.write_geotiff(tifffile, metadata, geolocation, band)
Пример #29
0
def modis_sst(infileid,
              outdir,
              download_dir='/tmp',
              vmin=None,
              vmax=None,
              contrast='relative',
              ngcps=(21, 25),
              resample_radius=5000.,
              resample_sigma=2500.,
              denoise_kernel='boxcar',
              denoise_width=20,
              open_iterations=1,
              nprocs=1,
              pngkml=False,
              write_netcdf=False,
              file_range=None):
    """
    """
    dic = None

    # Check file containing ranges
    if file_range is not None:
        if not os.path.isfile(file_range):
            raise Exception('file_range {} not found'.format(file_range))
        # Read a txt file which contains three columns: yearday,vmin,vmax
        with open(file_range, 'r') as f:
            dic = {}
            for line in f:
                (fdoy, fmin, fmax) = line.split(',')
                dic[int(fdoy)] = (float(fmin), float(fmax))

    # modissstfname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/A2011338122500.L2_LAC_SST'
    # modis02fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD021KM.A2011338.1225.005.2011339235825.hdf'
    # modis03fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD03.A2011338.1225.005.2011339233301.hdf'
    # modis35l2fname = '/mnt/data/sst/modis/MYD021KM.A2011338.1225/MYD35_L2.A2011338.1225.005.2011340001234.hdf'
    if contrast == 'med':
        listbox = [[-6., 35., 2.75, 42.48], [2.74, 30, 42.2, 47.00]]
    elif contrast == 'cwe':
        listbox = [[-23., 35.2, -5.5, 42.88], [-23., 42.8, 2.20, 51.]]
    elif contrast == 'nwe':
        listbox = [[-23., 50.8, 32.7, 68.]]
    elif contrast == 'gom':
        listbox = [[-98., 18.0, -80.5, 30.5]]
    elif contrast == 'agulhas':
        listbox = [[10.8437, -45.7404, 39.9799, -25.3019]]
    elif contrast == 'gs':
        listbox = [[-81.52, 20, -30, 45]]
    else:
        listbox = None
    # Search/Download data
    print 'Search/Download data'
    if re.match(r'^[AT][0-9]{13}$', infileid) is None:
        raise Exception('Input for modis_sst is an ID '
                        '(e.g. A2011338122500 or T2014143234500)')
    platform = infileid[0]
    date = datetime.strptime(infileid[1:], '%Y%j%H%M%S')
    modissstid = {'A': 'MODISAL2SST', 'T': 'MODISTL2SST'}[platform]
    modissstfname = modis.search_and_download(modissstid, date, download_dir)
    modis02id = {'A': 'MYD021KM', 'T': 'MOD021KM'}[platform]
    modis02fname = modis.search_and_download(modis02id, date, download_dir)
    modis03id = {'A': 'MYD03', 'T': 'MOD03'}[platform]
    modis03fname = modis.search_and_download(modis03id, date, download_dir)
    modis35l2id = {'A': 'MYD35_L2', 'T': 'MOD35_L2'}[platform]
    modis35l2fname = modis.search_and_download(modis35l2id, date, download_dir)

    # Read/Process data
    print 'Read/Process data'
    # Read from SST file
    modissstfile = modis.MODISL2File(modissstfname)
    # lon = modissstfile.read_lon()
    # lat = modissstfile.read_lat()
    sst = modissstfile.read_sst() + 273.15
    attrs = modissstfile.read_attributes()
    modissstfile.close()
    # Read from radiances file
    modis02file = modis.MODIS02File(modis02fname)
    rad11 = modis02file.read_radiance(31)
    modis02file.close()
    bt11 = modis.modis_bright(rad11, 31, 1)
    # Read from geolocation file
    modis03file = modis.MODIS03File(modis03fname)
    lon = modis03file.read_lon()
    lat = modis03file.read_lat()
    modis03file.close()
    # Read from cloud mask file
    modis35l2file = modis.MODIS35L2File(modis35l2fname)
    cloudmask = modis35l2file.read_cloudmask(byte=0)
    modis35l2file.close()
    cloudy = (np.bitwise_and(cloudmask, 2) == 0) & \
             (np.bitwise_and(cloudmask, 4) == 0)
    land = np.bitwise_and(cloudmask, 128) == 128  # Desert or Land
    # land = (np.bitwise_and(cloudmask, 128) == 128) | \
    #        (np.bitwise_and(cloudmask, 64) == 64) # Desert or Land or Coastal
    if listbox is not None:
        mask_box = np.zeros(np.shape(sst))
        for i in range(np.shape(listbox)[0]):
            index_in = np.where((lon >= listbox[i][0]) & (lat >= listbox[i][1])
                                & (lon <= listbox[i][2])
                                & (lat <= listbox[i][3]))
            mask_box[index_in] = 1
        mask = cloudy | land | ma.getmaskarray(sst) | ma.getmaskarray(bt11) | (
            mask_box == 0)
    else:
        mask = cloudy | land | ma.getmaskarray(sst) | ma.getmaskarray(bt11)
    if mask.all():
        print 'No data'
        sys.exit(0)
    # GCPs for resampling and geotiff georeference
    scansize = 10
    dtime0 = datetime.utcnow()
    gcps = resample.get_gcps_from_bowtie(lon, lat, scansize, ngcps=ngcps)
    #gcps = resample.get_gcps_from_bowtie_old(lon, lat, scansize, ngcps=ngcps)
    dtime = datetime.utcnow() - dtime0
    print 'Get GCPs from bowtie swath : {}'.format(dtime)
    gcplon, gcplat, gcpnpixel, gcpnline = gcps
    rspysize = lon.shape[0]
    geod = pyproj.Geod(ellps='WGS84')
    mid = abs(gcpnline[:, 0] - 0.5).argmin()
    xdists = geod.inv(gcplon[mid, :-1], gcplat[mid, :-1], gcplon[mid, 1:],
                      gcplat[mid, 1:])[2]
    xdist = np.sum(xdists) / abs(gcpnpixel[mid, -1] - gcpnpixel[mid, 0])
    rspxsize = np.round(xdist / 1000.).astype('int') + 1
    gcpline = gcpnline * rspysize
    gcppixel = gcpnpixel * rspxsize

    # Resample with LinearNDInterpolator in output space
    dtime0 = datetime.utcnow()
    pix, lin = resample.get_points_from_gcps(gcplon,
                                             gcplat,
                                             gcppixel,
                                             gcpline,
                                             rspxsize,
                                             rspysize,
                                             1,
                                             lon,
                                             lat,
                                             nprocs=nprocs) - 0.5
    dtime = datetime.utcnow() - dtime0
    print 'Get input coordinates in new grid : {}'.format(dtime)
    # Test input grid in output space
    # import matplotlib.pyplot as plt
    # for iscan in range(lon.shape[0] / scansize):
    #     pixscan = pix[iscan * scansize: (iscan+1) * scansize, :]
    #     linscan = lin[iscan * scansize: (iscan+1) * scansize, :]
    #     # maskscan = mask[iscan * scansize: (iscan+1) * scansize, :]
    #     # pixscan = pixscan[~maskscan]
    #     # linscan = linscan[~maskscan]
    #     plt.plot(pixscan.flatten(), linscan.flatten(), '+')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # \Test input grid in output space
    dtime0 = datetime.utcnow()
    sst.data[mask] = np.nan
    bt11.data[mask] = np.nan
    val = np.dstack((sst.data, bt11.data))
    rspval = resample.resample_bowtie_linear(pix,
                                             lin,
                                             val,
                                             scansize,
                                             rspxsize,
                                             rspysize,
                                             show=False)
    rspsst = rspval[:, :, 0]
    rspbt11 = rspval[:, :, 1]
    rspmask = ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)
    dtime = datetime.utcnow() - dtime0
    print 'Interpolate in new grid : {}'.format(dtime)

    # Resample with pyresample in lon/lat space
    # rsplin, rsppix = np.mgrid[0:rspysize, 0:rspxsize] + 0.5
    # rsplon, rsplat = resample.get_points_from_gcps(gcplon, gcplat, gcppixel,
    #                                                gcpline, rspxsize, rspysize,
    #                                                0, rsppix, rsplin, nprocs=nprocs)
    # # Test resample grid
    # import matplotlib.pyplot as plt
    # plt.plot(lon.flatten(), lat.flatten(), '+b')
    # plt.plot(rsplon.flatten(), rsplat.flatten(), '+g')
    # plt.plot(gcplon.flatten(), gcplat.flatten(), 'xr')
    # plt.show()
    # import pdb ; pdb.set_trace()
    # # \Test resample grid
    # # Test radius / sigma
    # resample_radius = 5000.
    # resample_sigma = 2500.
    # sst.mask = False
    # #sst.mask = sst.mask | (sst.data < 273.15+5) | (sst.data > 273.15+30)
    # rspsst = resample.resample_gauss(lon, lat, sst, rsplon, rsplat,
    #                                  resample_radius, resample_sigma,
    #                                  nprocs=nprocs, show=True)
    # import pdb ; pdb.set_trace()
    # # \Test radius / sigma
    # valid = np.where(mask == False)
    # rspsst = resample.resample_gauss(lon[valid], lat[valid], sst[valid],
    #                                  rsplon, rsplat,
    #                                  resample_radius, resample_sigma,
    #                                  fill_value=None, nprocs=nprocs,
    #                                  show=False)
    # rspbt11 = resample.resample_gauss(lon[valid], lat[valid], bt11[valid],
    #                                   rsplon, rsplat,
    #                                   resample_radius, resample_sigma,
    #                                   fill_value=None, nprocs=nprocs,
    #                                   show=False)
    # rspmask = resample.resample_nearest(lon, lat, mask,
    #                                     rsplon, rsplat,
    #                                     resample_radius,
    #                                     fill_value=True, nprocs=nprocs,
    #                                     show=False)
    # rspmask = rspmask | ma.getmaskarray(rspsst) | ma.getmaskarray(rspbt11)

    # Denoise sst and open mask
    rspsst.mask = rspmask
    rspbt11.mask = rspmask
    finalsst = denoise_sst(rspsst,
                           rspbt11,
                           kernel=denoise_kernel,
                           width=denoise_width,
                           show=False)
    #finalsst = rspsst
    finalmask = ~binary_opening(
        ~rspmask, structure=np.ones((3, 3)), iterations=open_iterations)
    #finalmask = rspmask
    finalsst.mask = finalmask

    # Contrast
    if vmin == None:
        if contrast == 'relative':
            vmin = np.percentile(finalsst.compressed(), 0.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(attrs['start_time'].timetuple().tm_yday)
        #    vmin = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. - 9.
        #    #par = [277.94999694824219, 42, 2.5500030517578125, -219]
        #    par = [278.09999084472656, 0.62831853071795862,
        #           2.4000091552734375, 0.1570796326794896]
        #    vmin = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific txt file is provided for the range
        elif dic is not None:
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            # Read a txt file which contains three columns: yearday,vmin,vmax
            extrema = dic.get(
                dayofyear, dic[min(dic.keys(),
                                   key=lambda k: abs(k - dayofyear))])
            vmin = extrema[0]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))
    if vmax == None:
        if contrast == 'relative':
            vmax = np.percentile(finalsst.compressed(), 99.5)
        #elif contrast == 'agulhas':
        #    dayofyear = float(attrs['start_time'].timetuple().tm_yday)
        #    vmax = 273.15 + 2. * np.cos((dayofyear - 45.) * 2. * np.pi / 365.) + 20. + 4.
        #    #par = [300.59999084472656, 21, 2.8499908447265625, -191]
        #    par = [300.59999084472656, 0.29919930034188508,
        #           2.8499908447265625, 0.14959965017094254]
        #    vmax = par[0] + par[2] * np.cos(par[3] * dayofyear - par[1])
        #if a specific text file is provided for the range
        elif dic is not None:
            dayofyear = float(attrs['start_time'].timetuple().tm_yday)
            extrema = dic.get(
                dayofyear, dic[min(dic.keys(),
                                   key=lambda k: abs(k - dayofyear))])
            vmax = extrema[1]
        else:
            raise Exception('Unknown contrast : {}'.format(contrast))

    # Flip (geotiff in "swath sense")
    finalsst = finalsst[::-1, ::-1]
    gcppixel = rspxsize - gcppixel
    gcpline = rspysize - gcpline

    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    (dtime, time_range) = stfmt.format_time_and_range(attrs['start_time'],
                                                      attrs['stop_time'],
                                                      units='ms')
    metadata['product_name'] = 'SST_MODIS_denoised'
    if contrast == 'relative':
        metadata['name'] = os.path.splitext(os.path.basename(modissstfname))[0]
    else:
        metadata['name'] = '{}_{}'.format(
            os.path.splitext(os.path.basename(modissstfname))[0], contrast)
    metadata['datetime'] = dtime
    metadata['time_range'] = time_range
    metadata['source_URI'] = [
        modissstfname, modis02fname, modis03fname, modis35l2fname
    ]
    metadata['source_provider'] = 'NASA'
    metadata['processing_center'] = 'OceanDataLab'
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    metadata['parameter'] = 'sea surface temperature'
    metadata['type'] = 'remote sensing'
    metadata['sensor_type'] = 'radiometer'
    metadata['sensor_name'] = 'MODIS'
    metadata['sensor_platform'] = attrs['platform']
    metadata['sensor_pass'] = attrs['pass']
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    gcpheight = np.zeros(gcppixel.shape)
    geolocation['gcps'] = stfmt.format_gdalgcps(gcplon, gcplat, gcpheight,
                                                gcppixel, gcpline)
    band = []
    indndv = np.where(ma.getmaskarray(finalsst) == True)
    offset, scale = vmin, (vmax - vmin) / 254.
    np.clip(finalsst.data, vmin, vmax, out=finalsst.data)
    array = np.round((finalsst.data - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('cerbere_medspiration',
                                         vmax=vmax,
                                         vmax_pal=vmax,
                                         vmin=vmin,
                                         vmin_pal=vmin)
    band.append({
        'array': array,
        'scale': scale,
        'offset': offset,
        'description': 'sea surface temperature',
        'unittype': 'K',
        'nodatavalue': 255,
        'parameter_range': [vmin, vmax],
        'colortable': colortable
    })

    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
        # Write projected png/kml
        if pngkml == True:
            print 'Write projected png/kml'
            stfmt.write_pngkml_proj(tifffile)
    elif write_netcdf == True:
        print 'Write netcdf'
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        band[0]['name'] = 'denoised_sst'
        band[0]['long_name'] = 'denoised sea surface temperature'
        band[0]['standard_name'] = 'sea_surface_temperature'
        # ymid = abs(gcpline[:, 0] - rspysize / 2.).argmin()
        # xdists = geod.inv(gcplon[ymid, :-1], gcplat[ymid, :-1],
        #                   gcplon[ymid, 1:], gcplat[ymid, 1:])[2] / \
        #                   np.abs(gcppixel[ymid, 1:] - gcppixel[ymid, :-1])
        # xmid = abs(gcppixel[0, :] - rspxsize / 2.).argmin()
        # ydists = geod.inv(gcplon[:-1, xmid], gcplat[:-1, xmid],
        #                   gcplon[1:, xmid], gcplat[1:, xmid])[2] / \
        #                   np.abs(gcpline[1:, xmid] - gcpline[:-1, xmid])
        # print xdists.min(), xdists.max(), xdists.mean()
        # # e.g. 999.763079208 999.763084628 999.763082543
        # print ydists.min(), ydists.max(), ydists.mean()
        # # e.g. 1006.4149472 1008.60679776 1007.5888004
        metadata['spatial_resolution'] = 1000.
        stfmt.write_netcdf(ncfile,
                           metadata,
                           geolocation,
                           band,
                           'swath',
                           ngcps=gcplon.shape)
Пример #30
0
def ecmwf_model_wind(infile, outdir, max_forecast_hours=None,
                     vmin=0., vmax=25.4, vmin_pal=0., vmax_pal=50*0.514,
                     write_netcdf=False):
    """
    """
    # Read/Process data
    windfield = ECMWF0125NCFile(infile)
    u10 = windfield.read_values('u10m')[0, ::-1, :]
    v10 = windfield.read_values('v10m')[0, ::-1, :]
    lon = windfield.read_values('lon')
    dlon = lon[1]-lon[0]
    lat = windfield.read_values('lat')[::-1]
    dlat = lat[1]-lat[0]
    land_mask = get_land_mask()[::-1, :]
    # Replicate -180 deg at 180 deg for gdal_warp
    # dim = u10.shape
    # u10 = np.hstack((u10, u10[:, 0].reshape((dim[0], 1))))
    # v10 = np.hstack((v10, v10[:, 0].reshape((dim[0], 1))))
    # lon = np.hstack((lon, lon[0]+360.))
    # land_mask = np.hstack((land_mask, land_mask[:, 0].reshape((dim[0], 1))))
    # /Replicate -180 deg at 180 deg for gdal_warp
    dtime = windfield.read_values('time')[0]
    dtime_units = windfield.read_field('time').units
    dtime = num2date(dtime, dtime_units)
    rundtime = windfield.read_global_attribute('run_time')
    rundtime = datetime.strptime(rundtime, '%Y-%m-%dT%H:%M:%SZ')
    if max_forecast_hours is not None:
        forecast_hours = (dtime - rundtime).total_seconds() / 3600.
        if forecast_hours > max_forecast_hours:
            raise Exception('Exceeds max_forecast_hours.')
    # Construct metadata/geolocation/band(s)
    print 'Construct metadata/geolocation/band(s)'
    metadata = {}
    metadata['product_name'] = 'ECMWF_model_wind'
    #metadata['name'] = os.path.splitext(os.path.basename(infile))[0]
    metadata['name'] = 'ECMWF_'+dtime.strftime('%Y%m%dT%HZ')
    metadata['datetime'] = stfmt.format_time(dtime)
    metadata['time_range'] = ['-90m', '+90m']
    metadata['source_URI'] = infile
    metadata['source_provider'] = 'ECMWF'
    metadata['processing_center'] = ''
    metadata['conversion_software'] = 'Syntool'
    metadata['conversion_version'] = '0.0.0'
    metadata['conversion_datetime'] = stfmt.format_time(datetime.utcnow())
    #metadata['parameter'] = ['zonal wind speed', 'meridional wind speed']
    metadata['parameter'] = ['wind speed', 'wind direction']
    metadata['type'] = 'model'
    metadata['model_longitude_resolution'] = 0.125
    metadata['model_latitude_resolution'] = 0.125
    metadata['model_analysis_datetime'] = stfmt.format_time(rundtime)
    geolocation = {}
    geolocation['projection'] = stfmt.format_gdalprojection()
    geolocation['geotransform'] = [lon[0]-dlon/2., dlon, 0,
                                   lat[0]-dlat/2., 0, dlat]
    # band = []
    # scale = 0.2
    # offset = -25.4
    # windspeed = np.sqrt(u10**2 + v10**2)
    # winddirection = np.arctan2(v10, u10)
    # np.clip(windspeed, 0, abs(offset), out=windspeed)
    # u10 = np.cos(winddirection)*windspeed
    # array = np.round((u10-offset)/scale).astype('uint8')
    # band.append({'array':array, 'scale':scale, 'offset':offset,
    #              'description':'zonal wind speed', 'unittype':'m/s',
    #              'nodatavalue':255, 'parameter_range':[-25.4, 25.4]})
    # v10 = np.sin(winddirection)*windspeed
    # array = np.round((v10-offset)/scale).astype('uint8')
    # band.append({'array':array, 'scale':scale, 'offset':offset,
    #              'description':'meridional wind speed', 'unittype':'m/s',
    #              'nodatavalue':255, 'parameter_range':[-25.4, 25.4]})
    band = []
    indndv = np.where(land_mask == 1)
    windspeed = np.sqrt(u10**2 + v10**2)
    winddirection = np.mod(np.arctan2(v10, u10)*180./np.pi+360., 360.)
    offset, scale = vmin, (vmax-vmin)/254.
    np.clip(windspeed, vmin, vmax, out=windspeed)
    array = np.round((windspeed - offset) / scale).astype('uint8')
    array[indndv] = 255
    colortable = stfmt.format_colortable('noaa_wind', vmax=vmax, vmax_pal=vmax_pal,
                                         vmin=vmin, vmin_pal=vmin_pal)
    band.append({'array':array, 'scale':scale, 'offset':offset,
                 'description':'wind speed', 'unittype':'m/s',
                 'nodatavalue':255, 'parameter_range':[vmin, vmax],
                 'colortable':colortable})
    array = np.round(winddirection/360.*254.).astype('uint8')
    array[indndv] = 255
    band.append({'array':array, 'scale':360./254., 'offset':0.,
                 'description':'wind direction', 'unittype':'deg',
                 'nodatavalue':255, 'parameter_range':[0, 360.]})
    # Write geotiff
    if write_netcdf == False:
        print 'Write geotiff'
        tifffile = stfmt.format_tifffilename(outdir, metadata, create_dir=True)
        stfmt.write_geotiff(tifffile, metadata, geolocation, band)
    elif write_netcdf == True:
        print 'Write netcdf'
        # u/v -> bands
        band = []
        indndv = np.where(land_mask == 1)
        vmin = -vmax
        offset, scale = vmin, (vmax-vmin)/254.
        np.clip(u10, vmin, vmax, out=u10)
        array = np.round((u10 - offset) / scale).astype('uint8')
        array[indndv] = 255
        band.append({'array':array, 'scale':scale, 'offset':offset,
                     'description':'wind u', 'unittype':'m s-1',
                     'nodatavalue':255, 'parameter_range':[vmin, vmax],
                     'name':'u10m', 'long_name':'u component of horizontal wind',
                     'standard_name':'eastward_wind'})
        np.clip(v10, vmin, vmax, out=v10)
        array = np.round((v10 - offset) / scale).astype('uint8')
        array[indndv] = 255
        band.append({'array':array, 'scale':scale, 'offset':offset,
                     'description':'wind v', 'unittype':'m s-1',
                     'nodatavalue':255, 'parameter_range':[vmin, vmax],
                     'name':'v10m', 'long_name':'v component of horizontal wind',
                     'standard_name':'northward_wind'})
        # Write
        ncfile = stfmt.format_ncfilename(outdir, metadata, create_dir=True)
        metadata['spatial_resolution'] = 0.125 * 111000.
        dgcps = np.round(1. / np.array([0.125, 0.125])).astype('int')
        stfmt.write_netcdf(ncfile, metadata, geolocation, band, 'grid_lonlat',
                           dgcps=dgcps)