Example #1
0
def get_bounding_box(meta, geom_file=None):
    """Get lat/lon range (roughly), in the same order of data file
    lat0/lon0 - starting latitude/longitude (first row/column)
    lat1/lon1 - ending latitude/longitude (last row/column)
    """
    length, width = int(meta['LENGTH']), int(meta['WIDTH'])
    if 'Y_FIRST' in meta.keys():
        # geo coordinates
        lat0 = float(meta['Y_FIRST'])
        lon0 = float(meta['X_FIRST'])
        lat_step = float(meta['Y_STEP'])
        lon_step = float(meta['X_STEP'])
        lat1 = lat0 + lat_step * (length - 1)
        lon1 = lon0 + lon_step * (width - 1)

        # 'Y_FIRST' not in 'degree'
        # e.g. meters for UTM projection from ASF HyP3
        y_unit = meta.get('Y_UNIT', 'degrees').lower()
        if not y_unit.startswith('deg'):
            lat0, lon0 = ut.to_latlon(meta['OG_FILE_PATH'], lon0, lat0)
            lat1, lon1 = ut.to_latlon(meta['OG_FILE_PATH'], lon1, lat1)

    else:
        # radar coordinates
        if geom_file and os.path.isfile(geom_file):
            geom_dset_list = readfile.get_dataset_list(geom_file)
        else:
            geom_dset_list = []

        if 'latitude' in geom_dset_list:
            lats = readfile.read(geom_file, datasetName='latitude')[0]
            lons = readfile.read(geom_file, datasetName='longitude')[0]
            lats[lats == 0] = np.nan
            lons[lons == 0] = np.nan
            lat0 = np.nanmin(lats)
            lat1 = np.nanmax(lats)
            lon0 = np.nanmin(lons)
            lon1 = np.nanmax(lons)

        else:
            lats = [float(meta['LAT_REF{}'.format(i)]) for i in [1,2,3,4]]
            lons = [float(meta['LON_REF{}'.format(i)]) for i in [1,2,3,4]]
            lat0 = np.mean(lats[0:2])
            lat1 = np.mean(lats[2:4])
            lon0 = np.mean(lons[0:3:2])
            lon1 = np.mean(lons[1:4:2])

    return lat0, lat1, lon0, lon1
Example #2
0
def prepare_los_geometry(geom_file):
    """Prepare LOS geometry data/info in geo-coordinates
    Parameters: geom_file  - str, path of geometry file
    Returns:    inc_angle  - 2D np.ndarray, incidence angle in radians
                head_angle - 2D np.ndarray, heading   angle in radians
                atr        - dict, metadata in geo-coordinate
    """

    print('prepare LOS geometry in geo-coordinates from file: {}'.format(
        geom_file))
    atr = readfile.read_attribute(geom_file)

    print('read incidenceAngle from file: {}'.format(geom_file))
    inc_angle = readfile.read(geom_file, datasetName='incidenceAngle')[0]

    if 'azimuthAngle' in readfile.get_dataset_list(geom_file):
        print('read azimuthAngle   from file: {}'.format(geom_file))
        print('convert azimuth angle to heading angle')
        az_angle = readfile.read(geom_file, datasetName='azimuthAngle')[0]
        head_angle = ut.azimuth2heading_angle(az_angle)
    else:
        print('use the HEADING attribute as the mean heading angle')
        head_angle = np.ones(inc_angle.shape, dtype=np.float32) * float(
            atr['HEADING'])

    # geocode inc/az angle data if in radar-coord
    if 'Y_FIRST' not in atr.keys():
        print('-' * 50)
        print('geocoding the incidence / heading angles ...')
        res_obj = resample(lut_file=geom_file, src_file=geom_file)
        res_obj.open()
        res_obj.prepare()

        # resample data
        box = res_obj.src_box_list[0]
        inc_angle = res_obj.run_resample(src_data=inc_angle[box[1]:box[3],
                                                            box[0]:box[2]])
        head_angle = res_obj.run_resample(src_data=head_angle[box[1]:box[3],
                                                              box[0]:box[2]])

        # update attribute
        atr = attr.update_attribute4radar2geo(atr, res_obj=res_obj)

    # for 'Y_FIRST' not in 'degree'
    # e.g. meters for UTM projection from ASF HyP3
    if not atr['Y_UNIT'].lower().startswith('deg'):
        # get SNWE in meter
        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
        N = float(atr['Y_FIRST'])
        W = float(atr['X_FIRST'])
        y_step = float(atr['Y_STEP'])
        x_step = float(atr['X_STEP'])
        S = N + y_step * length
        E = W + x_step * width

        # SNWE in meter --> degree
        lat0, lon0 = ut.to_latlon(atr['OG_FILE_PATH'], W, N)
        lat1, lon1 = ut.to_latlon(atr['OG_FILE_PATH'], E, S)
        lat_step = (lat1 - lat0) / length
        lon_step = (lon1 - lon0) / width

        # update Y/X_FIRST/STEP/UNIT
        atr['Y_FIRST'] = lat0
        atr['X_FIRST'] = lon0
        atr['Y_STEP'] = lat_step
        atr['X_STEP'] = lon_step
        atr['Y_UNIT'] = 'degrees'
        atr['X_UNIT'] = 'degrees'

    # unit: degree to radian
    inc_angle *= np.pi / 180.
    head_angle *= np.pi / 180.

    return inc_angle, head_angle, atr
Example #3
0
def calc_delay_timeseries(inps):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
    Returns:    tropo_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        shape = (int(atr['LENGTH']), int(atr['WIDTH']))
        return shape

    def run_or_skip(grib_files, tropo_file, geom_file):
        print('update mode: ON')
        print('output file: {}'.format(tropo_file))
        flag = 'skip'

        # check existance and modification time
        if ut.run_or_skip(out_file=tropo_file,
                          in_file=grib_files,
                          print_msg=False) == 'run':
            flag = 'run'
            print(
                '1) output file either do NOT exist or is NOT newer than all GRIB files.'
            )

        else:
            print('1) output file exists and is newer than all GRIB files.')

            # check dataset size in space / time
            date_list = [
                str(re.findall('\d{8}', os.path.basename(i))[0])
                for i in grib_files
            ]
            if (get_dataset_size(tropo_file) != get_dataset_size(geom_file)
                    or any(i not in timeseries(tropo_file).get_date_list()
                           for i in date_list)):
                flag = 'run'
                print(
                    '2) output file does NOT have the same len/wid as the geometry file {} or does NOT contain all dates'
                    .format(geom_file))
            else:
                print(
                    '2) output file has the same len/wid as the geometry file and contains all dates'
                )

                # check if output file is fully written
                with h5py.File(tropo_file, 'r') as f:
                    if np.all(f['timeseries'][-1, :, :] == 0):
                        flag = 'run'
                        print('3) output file is NOT fully written.')
                    else:
                        print('3) output file is fully written.')

        # result
        print('run or skip: {}'.format(flag))
        return flag

    if run_or_skip(inps.grib_files, inps.tropo_file, inps.geom_file) == 'skip':
        return

    ## 1. prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    inps.dem = geom_obj.read(datasetName='height')

    # for testing
    if inps.custom_height:
        print(
            'use input custom height of {} m for vertical integration'.format(
                inps.custom_height))
        inps.dem[:] = inps.custom_height

    if 'latitude' in geom_obj.datasetNames:
        # for lookup table in radar-coord (isce, doris)
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')

    elif 'Y_FIRST' in geom_obj.metadata:
        # for lookup table in geo-coded (gamma, roipac) and obs. in geo-coord
        inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata)

        # convert coordinates to lat/lon, e.g. from UTM for ASF HyPP3
        if not geom_obj.metadata['Y_UNIT'].startswith('deg'):
            inps.lat, inps.lon = ut.to_latlon(inps.atr['OG_FILE_PATH'],
                                              inps.lon, inps.lat)

    else:
        # for lookup table in geo-coded (gamma, roipac) and obs. in radar-coord
        inps.lat, inps.lon = ut.get_lat_lon_rdc(inps.atr)

    # mask of valid pixels
    mask = np.multiply(inps.inc != 0, ~np.isnan(inps.inc))

    ## 2. prepare output file
    # metadata
    atr = inps.atr.copy()
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    # remove metadata related with double reference
    # because absolute delay is calculated and saved
    for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        if key in atr.keys():
            atr.pop(key)

    # instantiate time-series
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_files)
    date_list = [
        str(re.findall('\d{8}', os.path.basename(i))[0])
        for i in inps.grib_files
    ]
    dates = np.array(date_list, dtype=np.string_)
    ds_name_dict = {
        "date": [dates.dtype, (num_date, ), dates],
        "timeseries": [np.float32, (num_date, length, width), None],
    }
    writefile.layout_hdf5(inps.tropo_file, ds_name_dict, metadata=atr)

    ## 3. calculate phase delay
    print(
        '\n------------------------------------------------------------------------------'
    )
    print(
        'calculating absolute delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...'
    )
    print('number of grib files used: {}'.format(num_date))

    prog_bar = ptime.progressBar(maxValue=num_date, print_msg=~inps.verbose)
    for i in range(num_date):
        grib_file = inps.grib_files[i]

        # calc tropo delay
        tropo_data = get_delay(grib_file,
                               tropo_model=inps.tropo_model,
                               delay_type=inps.delay_type,
                               dem=inps.dem,
                               inc=inps.inc,
                               lat=inps.lat,
                               lon=inps.lon,
                               mask=mask,
                               verbose=inps.verbose)

        # write tropo delay to file
        block = [i, i + 1, 0, length, 0, width]
        writefile.write_hdf5_block(inps.tropo_file,
                                   data=tropo_data,
                                   datasetName='timeseries',
                                   block=block,
                                   print_msg=False)

        prog_bar.update(i + 1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    return inps.tropo_file