Пример #1
0
def get_delay_radar(ztd_file, cos_inc_angle, pts_new):
    """calc single path tropo delay in line-of-sight direction

    Parameters: ztd_file      - str, path of zenith delay file
                cos_inc_angle - 2D np.ndarray in (len, wid) in float32, cos(inc_angle)
                pts_new       - 2D np.ndarray in (len*wid, 2) in float32
    Returns:    delay         - 2D np.ndarray in float32, LOS delay
    """
    # read ztd file
    delay_ztd, atr_ztd = readfile.read(ztd_file)
    # flip to be consistent with the reversed lats
    delay_ztd = np.flipud(delay_ztd)

    # pixel coordinates in ztd file
    lats, lons = ut.get_lat_lon(atr_ztd, dimension=1)
    # set lats in ascending order as required by RGI
    lats = np.flipud(lats)
    pts_ztd = ((lats.flatten(), lons.flatten()))

    # resample in pts_new coordinates
    RGI_func = RGI(pts_ztd,
                   delay_ztd,
                   method='nearest',
                   bounds_error=False,
                   fill_value=0)
    delay = RGI_func(pts_new)
    delay = delay.reshape(cos_inc_angle.shape)

    # project from zenith to line-of-sight
    delay /= cos_inc_angle

    # reverse the sign for consistency between different phase correction steps/methods
    delay *= -1

    return delay
Пример #2
0
def flatten_lat_lon(box, ts_obj, coords=None):
    if coords is None:
        lats, lons = ut.get_lat_lon(ts_obj.metadata, box=box)

    lats = sorted(lats.flatten())
    lons = sorted(lons.flatten())
    return lats, lons
Пример #3
0
def ll2xy(inps):
    """transfer lat/lon to local coordination"""
    inps.metadata = readfile.read_attribute(inps.file[0])

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    #inps.inc_angle = readfile.read(inps.geometry[0], datasetName='incidenceAngle')[0]
    #inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING'])
    #inps.height = readfile.read(inps.geometry[0], datasetName='height')[0]

    # read mask file
    inps.mask = readfile.read(inps.file[0])[0]
    # mask data
    #inps.lat[inps.mask==0] = np.nan
    #inps.lon[inps.mask==0] = np.nan
    #inps.inc_angle[inps.mask==0] = np.nan
    #inps.head_angle[inps.mask==0] = np.nan
    #inps.height[inps.mask==0] = np.nan

    # conver latlon to xy
    # origin point
    origin_lat = (inps.lat[0, 0] + inps.lat[-1, 0]) / 2
    origin_lon = (inps.lon[0, 0] + inps.lon[0, -1]) / 2

    lat = np.transpose(inps.lat.reshape(-1, 1))
    lon = np.transpose(inps.lon.reshape(-1, 1))

    llh = np.vstack((lon, lat))
    origin = np.array([origin_lon, origin_lat], dtype=float)
    XY = np.transpose(
        mut.llh2xy(llh, origin)
    ) * 1000  # unit of X/Y is meter and is a [N,2] matrix with [N,0] is X; [N,1] is Y
    X = XY[:, 0]
    Y = XY[:, 1]

    return X, Y, origin
Пример #4
0
def calculate_delay_timeseries(inps):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
    Returns:    tropo_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        shape = (int(atr['LENGTH']), int(atr['WIDTH']))
        return shape

    # check existing tropo delay file
    if (ut.run_or_skip(out_file=inps.tropo_file,
                       in_file=inps.grib_files,
                       print_msg=False) == 'skip'
            and get_dataset_size(inps.tropo_file) == get_dataset_size(
                inps.geom_file)):
        print(
            '{} file exists and is newer than all GRIB files, skip updating.'.
            format(inps.tropo_file))
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')

    if 'latitude' in geom_obj.datasetNames:
        # for dataset in geo OR radar coord with lookup table in radar-coord (isce, doris)
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    elif 'Y_FIRST' in geom_obj.metadata:
        # for geo-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata)
    else:
        # for radar-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon_rdc(geom_obj.metadata)

    # calculate phase delay
    length, width = int(inps.atr['LENGTH']), int(inps.atr['WIDTH'])
    num_date = len(inps.grib_files)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_files]
    tropo_data = np.zeros((num_date, length, width), np.float32)
    print(
        '\n------------------------------------------------------------------------------'
    )
    print(
        'calcualting absolute delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...'
    )
    print('number of grib files used: {}'.format(num_date))
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_files[i]
        tropo_data[i] = get_delay(grib_file, inps)
        prog_bar.update(i + 1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    # remove metadata related with double reference
    # because absolute delay is calculated and saved
    for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        if key in inps.atr.keys():
            inps.atr.pop(key)

    # Write tropospheric delay to HDF5
    ts_obj = timeseries(inps.tropo_file)
    ts_obj.write2hdf5(data=tropo_data,
                      dates=date_list,
                      metadata=inps.atr,
                      refFile=inps.timeseries_file)
    return inps.tropo_file
Пример #5
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    inps.work_dir = os.path.abspath(os.path.dirname(inps.ts_file))
    inps.cbar_file = os.path.join(inps.work_dir, 'google_earth_cbar.png')
    inps.star_file = os.path.join(inps.work_dir, "star.png")
    inps.dot_file = os.path.join(inps.work_dir, "shaded_dot.png")
    inps.dygraph_file = os.path.join(inps.work_dir, "dygraph-combined.js")
    inps.kml_data_dir = os.path.join(inps.work_dir, 'kml_data')

    ## Define file names
    if inps.outfile:
        inps.outfile_base = os.path.splitext(os.path.basename(inps.outfile))[0]
    else:
        inps.outfile_base = plot.auto_figure_title(inps.ts_file,
                                                   inps_dict=vars(inps))
    kml_root_file = os.path.join(inps.work_dir,
                                 '{}_root.kml'.format(inps.outfile_base))
    kmz_file = os.path.join(inps.work_dir, '{}.kmz'.format(inps.outfile_base))

    ## read data
    ts_obj = timeseries(inps.ts_file)
    ts_obj.open()
    length, width = ts_obj.length, ts_obj.width
    inps.metadata = ts_obj.metadata
    lats, lons = ut.get_lat_lon(ts_obj.metadata)
    print('input data shape in row/col: {}/{}'.format(length, width))

    vel = readfile.read(inps.vel_file, datasetName='velocity')[0] * 100.
    # Set min/max velocity for colormap
    if inps.vlim is None:
        inps.vlim = [np.nanmin(vel), np.nanmax(vel)]
    if inps.wrap:
        print('re-wrapping data to {} cm/year for color coding'.format(
            inps.vlim))

    ##--------- Create root KML file with network links to data KML files --------------##
    kml_root_doc = KML.Document()

    # 1 Create Overlay element for colorbar
    cbar_overlay = generate_cbar_element(cbar_file=inps.cbar_file,
                                         vmin=inps.vlim[0],
                                         vmax=inps.vlim[1],
                                         cmap=inps.colormap)
    kml_root_doc.append(cbar_overlay)

    # 2 Generate the placemark for the Reference Pixel
    ref_point = create_reference_point_element(inps, lats, lons, ts_obj)
    print('add reference point.')
    ref_folder = KML.Folder(KML.name("ReferencePoint"))
    ref_folder.append(ref_point)
    kml_root_doc.append(ref_folder)

    # 3 Create data folder to contain actual data elements
    data_folder = KML.Folder(KML.name("Data"))
    for i, step in enumerate(inps.steps):
        net_link = generate_network_link(inps,
                                         ts_obj,
                                         step=step,
                                         lod=(inps.lods[i], inps.lods[i + 1]))
        if net_link is not None:
            data_folder.append(net_link)
    kml_root_doc.append(data_folder)

    ##---------------------------- Write root KML file ------------------------------##
    print('-' * 30)
    print('writing ' + kml_root_file)
    kml_root = KML.kml()
    kml_root.append(kml_root_doc)
    with open(kml_root_file, 'w') as f:
        f.write(etree.tostring(kml_root, pretty_print=True).decode('utf-8'))

    ## Copy auxiliary files
    res_dir = os.path.join(os.path.dirname(mintpy.__file__), "data")
    for fname in [inps.star_file, inps.dot_file, inps.dygraph_file]:
        src_file = os.path.join(res_dir, os.path.basename(fname))
        shutil.copy2(src_file, inps.work_dir)
        print("copy {} to the local directory".format(src_file))

    ## Generate KMZ file
    # 1) go to the directory of kmz file
    run_dir = os.path.abspath(os.getcwd())
    os.chdir(inps.work_dir)
    # 2) zip all data files
    with ZipFile(kmz_file, 'w') as fz:
        kml_data_files = get_all_file_paths(inps.kml_data_dir)
        for fname in [
                kml_root_file, inps.cbar_file, inps.dygraph_file,
                inps.dot_file, inps.star_file
        ] + kml_data_files:
            fz.write(os.path.relpath(fname))
            os.remove(fname)
        shutil.rmtree(inps.kml_data_dir)
    # 3) go back to the running directory
    os.chdir(run_dir)
    print('merged all files to {}'.format(kmz_file))
    print('Done.')
    print('Open {} in Google Earth and play!'.format(kmz_file))
    return
Пример #6
0
def create_kml_region_document(inps, box_list, ts_obj, step):
    """Create list of KML.Document() objects 
    for one level of details defined by box_list and step
    """
    dot_file = '../../{}'.format(os.path.basename(inps.dot_file))

    ## 1. read data file into timeseries object
    region_docs = []
    num_box = len(box_list)
    for i in range(num_box):
        box = box_list[i]

        if box is None:
            box = (0, 0, ts_obj.width, ts_obj.length)

        length = box[3] - box[1]
        width = box[2] - box[0]

        # 1.1 Parse Date
        dates = np.array(
            ts_obj.times
        )  # 1D np.array of dates in datetime.datetime object in size of [num_date,]
        dates = list(map(lambda d: d.strftime("%Y-%m-%d"), dates))
        num_date = len(dates)

        # 1.2 Parse Spatial coordinates
        lats, lons = ut.get_lat_lon(ts_obj.metadata, box=box)
        rows, cols = np.mgrid[box[1]:box[3] - 1:length * 1j,
                              box[0]:box[2] - 1:width * 1j]

        # 1.3 Read Velocity / time-series / temporal coherence data
        vel = readfile.read(inps.vel_file, datasetName='velocity',
                            box=box)[0] * 100.
        vel_std = readfile.read(
            inps.vel_file, datasetName='velocityStd', box=box)[0] * 100.
        ts_data = readfile.read(inps.ts_file, box=box)[0] * 100.
        ts_data -= np.tile(
            ts_data[0, :, :],
            (ts_data.shape[0], 1, 1))  # enforce displacement starts from zero
        temp_coh = readfile.read(inps.tcoh_file, box=box)[0]
        mask = readfile.read(inps.mask_file, box=box)[0]

        vel_c = np.array(vel, dtype=np.float32)
        if inps.wrap:
            vel_c = inps.vlim[0] + np.mod(vel_c - inps.vlim[0],
                                          inps.vlim[1] - inps.vlim[0])

        ## 2. Create KML Document
        kml_document = KML.Document()

        # 2.1 Set and normalize colormap to defined vlim
        colormap = mpl.cm.get_cmap(inps.colormap)
        norm = mpl.colors.Normalize(vmin=inps.vlim[0], vmax=inps.vlim[1])

        # 2.2 Set number of pixels to use
        num_pixel = int(length / step) * int(width / step)
        msg = "create KML doc for box {}/{}: {}".format(i + 1, num_box, box)
        msg += ", step: {} pixels, {} pixels in total ...".format(
            step, num_pixel)
        print(msg)

        # 2.3 Create data folder for all points
        data_folder = KML.Folder(KML.name("Data"))
        for i in range(0, length, step):
            for j in range(0, width, step):
                if mask[i, j]:  # add point if it's not marked as masked out
                    lat = lats[i, j]
                    lon = lons[i, j]
                    row = rows[i, j]
                    col = cols[i, j]
                    ts = ts_data[:, i, j]
                    v = vel[i, j]
                    vc = vel_c[i, j]
                    vstd = vel_std[i, j]
                    tcoh = temp_coh[i, j]

                    # 2.3.1 Create KML icon style element
                    style = KML.Style(
                        KML.IconStyle(
                            KML.color(get_hex_color(vc, colormap, norm)),
                            KML.scale(0.5),
                            KML.Icon(KML.href("{}".format(dot_file)))))

                    # 2.3.2 Create KML point element
                    point = KML.Point(KML.coordinates("{},{}".format(lon,
                                                                     lat)))

                    js_data_string = generate_js_datastring(
                        dates, inps.dygraph_file, num_date, ts)

                    # 2.3.3 Create KML description element
                    stats_info = get_description_string((lat, lon), (row, col),
                                                        v,
                                                        vstd,
                                                        ts[-1],
                                                        tcoh=tcoh)
                    description = KML.description(stats_info, js_data_string)

                    # 2.3.4 Crate KML Placemark element to hold style, description, and point elements
                    placemark = KML.Placemark(style, description, point)

                    # 2.3.5 Append each placemark element to the KML document object
                    data_folder.append(placemark)

        # 2.4 Append data folder to KML document
        kml_document.append(data_folder)

        # 2.5 Append KML document to list of regionalized documents
        region_docs.append(kml_document)

    return region_docs
Пример #7
0
def read_data(inps):
    """
    Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN
    """
    # metadata
    inps.metadata = readfile.read_attribute(inps.file)
    k = inps.metadata['FILE_TYPE']
    inps.range2phase =  -4. * np.pi / float(inps.metadata['WAVELENGTH'])

    # mask
    if inps.mask_file:
        inps.mask = readfile.read(inps.mask_file)[0]
    else:
        inps.mask = np.ones((int(inps.metadata['LENGTH']),
                             int(inps.metadata['WIDTH'])), dtype=np.bool_)

    # data
    if k in ['.unw','velocity']:
        inps.phase = readfile.read(inps.file)[0]
        if k == 'velocity':
            # velocity to displacement
            date1, date2 = inps.metadata['DATE12'].split('_')
            dt1, dt2 = ptime.date_list2vector([date1, date2])[0]
            inps.phase *= (dt2 - dt1).days / 365.25
            # displacement to phase
            inps.phase *= inps.range2phase

        # update mask to exclude pixel with NaN value
        inps.mask *= ~np.isnan(inps.phase)
        # set all masked out pixel to NaN
        inps.phase[inps.mask==0] = np.nan
    else:
        raise ValueError("input file not support yet: {}".format(k))
    print('number of pixels: {}'.format(np.sum(inps.mask)))

    # change reference point
    if inps.ref_lalo:
        coord = ut.coordinate(inps.metadata)
        ref_lat, ref_lon = inps.ref_lalo
        ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2]
        # update data
        inps.phase -= inps.phase[ref_y, ref_x]
        # update metadata
        inps.metadata['REF_LAT'] = ref_lat
        inps.metadata['REF_LON'] = ref_lon
        inps.metadata['REF_Y'] = ref_y
        inps.metadata['REF_X'] = ref_x

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0]
    inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING'])
    inps.height = readfile.read(inps.geom_file, datasetName='height')[0]

    # convert the height of ellipsoid to geoid (mean sea level)
    # ref: https://github.com/vandry/geoidheight
    if inps.ellipsoid2geoid:
        # import geoid module
        try:
            import geoid
        except:
            raise ImportError('Can not import geoidheight!')

        # calculate offset and correct height
        egm_file = os.path.join(os.path.dirname(geoid.__file__), 'geoids/egm2008-1.pgm')
        gh_obj = geoid.GeoidHeight(egm_file)
        h_offset = gh_obj.get(lat=np.nanmean(inps.lat), lon=np.nanmean(inps.lon))
        inps.height -= h_offset

        # print message
        msg = 'convert height from ellipsoid to geoid'
        msg += '\n\tby subtracting a constant offset of {:.2f} m'.format(h_offset)
        print(msg)

    inps.lat[inps.mask==0] = np.nan
    inps.lon[inps.mask==0] = np.nan
    inps.inc_angle[inps.mask==0] = np.nan
    inps.head_angle[inps.mask==0] = np.nan
    inps.height[inps.mask==0] = np.nan

    # output filename
    if not inps.outfile:
        proj_name = sensor.project_name2sensor_name(inps.file)[1]
        if not proj_name:
            raise ValueError('No custom/auto output filename found.')
        inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12'])

        if not inps.outdir:
            inps.outdir = os.path.dirname(inps.file)
        inps.outfile = os.path.join(inps.outdir, inps.outfile)
    inps.outfile = os.path.abspath(inps.outfile)
    return
Пример #8
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    # check 1 - existing tropo delay file
    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
        return

    # check 2 - geometry file
    if any(i is None for i in [inps.geom_file, inps.ref_yx]):
        print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
        if not os.path.isfile(inps.trop_file):
            inps.trop_file = None
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    
    if 'latitude' in geom_obj.datasetNames:
        # for dataset in geo OR radar coord with lookup table in radar-coord (isce, doris)
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    elif 'Y_FIRST' in geom_obj.metadata:
        # for geo-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata)
    else: 
        # for radar-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon_rdc(geom_obj.metadata)

    # calculate phase delay
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_file_list)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
    trop_data = np.zeros((num_date, length, width), np.float32)

    print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
    print('number of grib files used: {}'.format(num_date))
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_file_list[i]
        trop_data[i] = get_delay(grib_file, inps)
        prog_bar.update(i+1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    # Convert relative phase delay on reference date
    inps.ref_date = atr.get('REF_DATE', date_list[0])
    print('convert to relative phase delay with reference date: '+inps.ref_date)
    inps.ref_idx = date_list.index(inps.ref_date)
    trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

    # Write tropospheric delay to HDF5
    atr['REF_Y'] = inps.ref_yx[0]
    atr['REF_X'] = inps.ref_yx[1]
    ts_obj = timeseries(inps.trop_file)
    ts_obj.write2hdf5(data=trop_data,
                      dates=date_list,
                      metadata=atr,
                      refFile=inps.timeseries_file)
    return
Пример #9
0
def write_shape_file(fDict, shp_file, box=None):
    '''Write time-series data to a shape file

    Parameters: fDict    - dict, with value for path of data files
                shp_file - str, output filename
                box      - tuple of 4 int, in (x0, y0, x1, y1)
    Returns:    shp_file - str, output filename
    '''

    shpDriver = ogr.GetDriverByName("ESRI Shapefile")
    print('output shape file: {}'.format(shp_file))

    ##Check if shape file already exists
    if os.path.exists(shp_file):
        print('output shape file: {} exists, will be overwritten ....'.format(
            shp_file))
        shpDriver.DeleteDataSource(shp_file)

    ##Start creating shapefile dataset and layer definition
    ds = shpDriver.CreateDataSource(shp_file)
    srs = ogr.osr.SpatialReference()
    srs.ImportFromEPSG(4326)
    layer = ds.CreateLayer('mintpy', srs, geom_type=ogr.wkbPoint)

    #Add code for each point
    fd = ogr.FieldDefn('CODE', ogr.OFTString)
    fd.SetWidth(8)
    layer.CreateField(fd)

    #Add DEM height for each point - this could be before / after DEM error correction
    fd = ogr.FieldDefn('HEIGHT', ogr.OFTReal)
    fd.SetWidth(7)
    fd.SetPrecision(2)
    layer.CreateField(fd)

    #Supposed to represent DEM error estimation uncertainty
    fd = ogr.FieldDefn('H_STDEV', ogr.OFTReal)
    fd.SetWidth(5)
    fd.SetPrecision(2)
    layer.CreateField(fd)

    #Estimated LOS velocity
    fd = ogr.FieldDefn('VEL', ogr.OFTReal)
    fd.SetWidth(8)
    fd.SetPrecision(2)
    layer.CreateField(fd)

    #Estimated uncertainty in velocity
    fd = ogr.FieldDefn('V_STDEV', ogr.OFTReal)
    fd.SetWidth(6)
    fd.SetPrecision(2)
    layer.CreateField(fd)

    #Temporal coherence
    fd = ogr.FieldDefn('COHERENCE', ogr.OFTReal)
    fd.SetWidth(5)
    fd.SetPrecision(3)
    layer.CreateField(fd)

    #Effective area - SqueeSAR DS / PS
    layer.CreateField(ogr.FieldDefn('EFF_AREA', ogr.OFTInteger))

    ##Time to load the dates from time-series HDF5 field and create one attribute for each date
    ts_obj = timeseries(fDict['TimeSeries'])
    ts_obj.open(print_msg=False)
    for date in ts_obj.dateList:
        fd = ogr.FieldDefn('D{0}'.format(date), ogr.OFTReal)
        fd.SetWidth(8)
        fd.SetPrecision(2)
        layer.CreateField(fd)
    layerDefn = layer.GetLayerDefn()

    ####Total number of points
    mask = readfile.read(fDict['Mask'], box=box)[0]
    nValid = np.sum(mask != 0)
    print('number of points with time-series:', nValid)

    lats, lons = ut.get_lat_lon(ts_obj.metadata,
                                geom_file=fDict['Geometry'],
                                box=box)

    ###Loop over all datasets in context managers to skip close statements
    with h5py.File(fDict['TimeSeries'], 'r') as tsid:
        with h5py.File(fDict['Coherence'], 'r') as cohid:
            with h5py.File(fDict['Velocity'], 'r') as velid:
                with h5py.File(fDict['Geometry'], 'r') as geomid:

                    length = box[3] - box[1]
                    width = box[2] - box[0]

                    #Start counter
                    counter = 1
                    prog_bar = ptime.progressBar(maxValue=nValid)

                    #For each line
                    for i in range(length):
                        line = i + box[1]

                        # read data for the line
                        ts = tsid['timeseries'][:, line, box[0]:box[2]].astype(
                            np.float64)
                        coh = cohid['temporalCoherence'][line,
                                                         box[0]:box[2]].astype(
                                                             np.float64)
                        vel = velid['velocity'][line, box[0]:box[2]].astype(
                            np.float64)
                        vel_std = velid['velocityStd'][line,
                                                       box[0]:box[2]].astype(
                                                           np.float64)
                        hgt = geomid['height'][line, box[0]:box[2]].astype(
                            np.float64)
                        lat = lats[i, :].astype(np.float64)
                        lon = lons[i, :].astype(np.float64)

                        for j in range(width):
                            if mask[i, j] == 0:
                                continue

                            #Create metadata dict
                            rdict = {
                                'CODE': hex(counter)[2:].zfill(8),
                                'HEIGHT': hgt[j],
                                'H_STDEV': 0.,
                                'VEL': vel[j] * 1000,
                                'V_STDEV': vel_std[j] * 1000,
                                'COHERENCE': coh[j],
                                'EFF_AREA': 1
                            }

                            for ind, date in enumerate(ts_obj.dateList):
                                rdict['D{0}'.format(date)] = ts[ind, j] * 1000

                            #Create feature with definition
                            feature = ogr.Feature(layerDefn)
                            add_metadata(feature, [lon[j], lat[j]], rdict)
                            layer.CreateFeature(feature)
                            feature = None

                            # update counter / progress bar
                            counter += 1
                            prog_bar.update(counter,
                                            every=100,
                                            suffix='{}/{}'.format(
                                                counter, nValid))
                    prog_bar.close()

    print('finished writing to file: {}'.format(shp_file))
    return shp_file
Пример #10
0
def prep_gbis(inps):
    """prepare data that has to be written in *.mat file"""

    if str.find(str(inps.file), 'S1') != -1:
        key1 = 'S1'
        key2 = '.he5'
        for file in os.listdir(os.getcwd()):
            if str.find(file, key1) != -1 and str.find(file, key2) != -1:
                shutil.copy(file, inps.outdir)
        os.chdir("".join(inps.outdir))
        geom_file = multitrack_utilities.find_HDFEOS_fullname(os.getcwd())
    else:

        geom_file = 'geo_geometryRadar.h5'
    # metadata
    unw_file = 'geo_' + inps.startDate + '_' + inps.endDate + '.unw'
    inps.metadata = readfile.read_attribute(unw_file)
    inps.phase, atr = readfile.read(unw_file)

    # mask
    if not inps.mask_file or inps.mask_file == 'None':
        inps.mask = np.ones(
            (int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])),
            dtype=np.bool_)
    else:
        inps.mask = readfile.read('geo_' + inps.mask_file)[0]

    # update mask to exclude pixel with NaN value
    inps.mask *= ~np.isnan(inps.phase)
    # set all masked out pixel to NaN
    inps.phase[inps.mask == 0] = np.nan

    # change reference point
    if inps.ref_lalo:
        coord = ut.coordinate(inps.metadata)
        ref_lat, ref_lon = inps.ref_lalo
        ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2]
        # update data
        inps.phase -= inps.phase[ref_y, ref_x]
        # update metadata
        inps.metadata['REF_LAT'] = ref_lat
        inps.metadata['REF_LON'] = ref_lon
        inps.metadata['REF_Y'] = ref_y
        inps.metadata['REF_X'] = ref_x

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    inps.inc_angle = readfile.read(geom_file, datasetName='incidenceAngle')[0]
    inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(
        inps.metadata['HEADING'])
    inps.height = readfile.read(geom_file, datasetName='height')[0]
    inps.lat[inps.mask == 0] = np.nan
    inps.lon[inps.mask == 0] = np.nan
    inps.inc_angle[inps.mask == 0] = np.nan
    inps.head_angle[inps.mask == 0] = np.nan
    inps.height[inps.mask == 0] = np.nan

    # output filename
    proj_name = atr['PROJECT_NAME']
    if not proj_name:
        raise ValueError('No custom/auto output filename found.')
    inps.outfile = '{}_{}_{}.mat'.format(proj_name, inps.startDate,
                                         inps.endDate)
    inps.outfile = os.path.join(inps.outdir, inps.outfile)
    inps.outfile = os.path.abspath(inps.outfile)

    #delete geo_*.h5 files
    multitrack_utilities.delete_tmpgeo(inps.outdir, 'S1_', '.he5')
    multitrack_utilities.delete_tmpgeo(inps.outdir, 'geo_', '.h5')
    return
Пример #11
0
def calculate_delay_timeseries(tropo_file, dis_file, geom_file, GACOS_dir):
    """calculate delay time-series and write to HDF5 file"""

    ## get list of dates
    atr = readfile.read_attribute(dis_file)
    ftype = atr['FILE_TYPE']
    if ftype == 'timeseries':
        date_list = timeseries(dis_file).get_date_list()

    elif ftype == '.unw':
        date12 = readfile.read_attribute(dis_file)['DATE12']
        date_list = ptime.yyyymmdd(date12.split('-'))

    else:
        raise ValueError(
            'un-supported displacement file type: {}'.format(ftype))

    # list of dates --> list of ztd files
    ztd_files = [
        os.path.join(GACOS_dir, '{}.ztd'.format(i)) for i in date_list
    ]

    # check missing ztd files
    flag = np.ones(len(date_list), dtype=np.bool_)
    for i in range(len(date_list)):
        if not os.path.isfile(ztd_files[i]):
            print('WARNING: {} file not found, ignore it and continue'.format(
                ztd_files[i]))
            flag[i] = False

    if np.any(flag == 0):
        date_list = np.array(date_list)[flag].tolist()
        ztd_files = np.array(ztd_files)[flag].tolist()

    ## update_mode
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    def run_or_skip(ztd_files, tropo_file, geom_file):
        print('update mode: ON')
        print('output file: {}'.format(tropo_file))
        flag = 'skip'

        # check existance and modification time
        if ut.run_or_skip(out_file=tropo_file,
                          in_file=ztd_files,
                          print_msg=False) == 'run':
            flag = 'run'
            print(
                '1) output file either do NOT exist or is NOT newer than all ZTD files.'
            )

        else:
            print('1) output file exists and is newer than all ZTD files.')

            # check dataset size in space / time
            date_list = [str(re.findall('\d{8}', i)[0]) for i in ztd_files]
            if (get_dataset_size(tropo_file) != get_dataset_size(geom_file)
                    or any(i not in timeseries(tropo_file).get_date_list()
                           for i in date_list)):
                flag = 'run'
                print(
                    '2) output file does NOT have the same len/wid as the geometry file {} or does NOT contain all dates'
                    .format(geom_file))
            else:
                print(
                    '2) output file has the same len/wid as the geometry file and contains all dates'
                )

                # check if output file is fully written
                with h5py.File(tropo_file, 'r') as f:
                    if np.all(f['timeseries'][-1, :, :] == 0):
                        flag = 'run'
                        print('3) output file is NOT fully written.')
                    else:
                        print('3) output file is fully written.')

        # result
        print('run or skip: {}'.format(flag))
        return flag

    if run_or_skip(ztd_files, tropo_file, geom_file) == 'skip':
        return

    ## prepare output file

    # metadata
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    # remove metadata related with double reference
    # because absolute delay is calculated and saved
    for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        if key in atr.keys():
            atr.pop(key)

    # instantiate time-series
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(date_list)
    dates = np.array(date_list, dtype=np.string_)
    ds_name_dict = {
        "date": [dates.dtype, (num_date, ), dates],
        "timeseries": [np.float32, (num_date, length, width), None],
    }
    writefile.layout_hdf5(tropo_file, ds_name_dict, metadata=atr)

    ## calculate phase delay

    # read geometry
    print('read incidenceAngle from file: {}'.format(geom_file))
    inc_angle = readfile.read(geom_file, datasetName='incidenceAngle')[0]
    cos_inc_angle = np.cos(inc_angle * np.pi / 180.0)

    if 'Y_FIRST' in atr.keys():
        pts_new = None

    else:
        # pixel coordinates in geometry file
        print('get pixel coordinates in geometry file')
        lats, lons = ut.get_lat_lon(atr, geom_file)
        pts_new = np.hstack((lats.reshape(-1, 1), lons.reshape(-1, 1)))

    # loop for date-by-date IO
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        date_str = date_list[i]
        ztd_file = ztd_files[i]

        # calc delay
        if 'Y_FIRST' in atr.keys():
            delay = get_delay_geo(ztd_file, atr, cos_inc_angle)

        else:
            delay = get_delay_radar(ztd_file, cos_inc_angle, pts_new)

        # write delay to file
        block = [i, i + 1, 0, length, 0, width]
        writefile.write_hdf5_block(tropo_file,
                                   data=delay,
                                   datasetName='timeseries',
                                   block=block,
                                   print_msg=False)

        prog_bar.update(i + 1, suffix=os.path.basename(ztd_file))
    prog_bar.close()

    return tropo_file
Пример #12
0
def calc_delay_timeseries(inps):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
    Returns:    tropo_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        shape = (int(atr['LENGTH']), int(atr['WIDTH']))
        return shape

    def run_or_skip(grib_files, tropo_file, geom_file):
        print('update mode: ON')
        print('output file: {}'.format(tropo_file))
        flag = 'skip'

        # check existance and modification time
        if ut.run_or_skip(out_file=tropo_file,
                          in_file=grib_files,
                          print_msg=False) == 'run':
            flag = 'run'
            print(
                '1) output file either do NOT exist or is NOT newer than all GRIB files.'
            )

        else:
            print('1) output file exists and is newer than all GRIB files.')

            # check dataset size in space / time
            date_list = [
                str(re.findall('\d{8}', os.path.basename(i))[0])
                for i in grib_files
            ]
            if (get_dataset_size(tropo_file) != get_dataset_size(geom_file)
                    or any(i not in timeseries(tropo_file).get_date_list()
                           for i in date_list)):
                flag = 'run'
                print(
                    '2) output file does NOT have the same len/wid as the geometry file {} or does NOT contain all dates'
                    .format(geom_file))
            else:
                print(
                    '2) output file has the same len/wid as the geometry file and contains all dates'
                )

                # check if output file is fully written
                with h5py.File(tropo_file, 'r') as f:
                    if np.all(f['timeseries'][-1, :, :] == 0):
                        flag = 'run'
                        print('3) output file is NOT fully written.')
                    else:
                        print('3) output file is fully written.')

        # result
        print('run or skip: {}'.format(flag))
        return flag

    if run_or_skip(inps.grib_files, inps.tropo_file, inps.geom_file) == 'skip':
        return

    ## 1. prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    inps.dem = geom_obj.read(datasetName='height')

    # for testing
    if inps.custom_height:
        print(
            'use input custom height of {} m for vertical integration'.format(
                inps.custom_height))
        inps.dem[:] = inps.custom_height

    if 'latitude' in geom_obj.datasetNames:
        # for lookup table in radar-coord (isce, doris)
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')

    elif 'Y_FIRST' in geom_obj.metadata:
        # for lookup table in geo-coded (gamma, roipac) and obs. in geo-coord
        inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata)

        # convert coordinates to lat/lon, e.g. from UTM for ASF HyPP3
        if not geom_obj.metadata['Y_UNIT'].startswith('deg'):
            inps.lat, inps.lon = ut.to_latlon(inps.atr['OG_FILE_PATH'],
                                              inps.lon, inps.lat)

    else:
        # for lookup table in geo-coded (gamma, roipac) and obs. in radar-coord
        inps.lat, inps.lon = ut.get_lat_lon_rdc(inps.atr)

    # mask of valid pixels
    mask = np.multiply(inps.inc != 0, ~np.isnan(inps.inc))

    ## 2. prepare output file
    # metadata
    atr = inps.atr.copy()
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    # remove metadata related with double reference
    # because absolute delay is calculated and saved
    for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        if key in atr.keys():
            atr.pop(key)

    # instantiate time-series
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_files)
    date_list = [
        str(re.findall('\d{8}', os.path.basename(i))[0])
        for i in inps.grib_files
    ]
    dates = np.array(date_list, dtype=np.string_)
    ds_name_dict = {
        "date": [dates.dtype, (num_date, ), dates],
        "timeseries": [np.float32, (num_date, length, width), None],
    }
    writefile.layout_hdf5(inps.tropo_file, ds_name_dict, metadata=atr)

    ## 3. calculate phase delay
    print(
        '\n------------------------------------------------------------------------------'
    )
    print(
        'calculating absolute delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...'
    )
    print('number of grib files used: {}'.format(num_date))

    prog_bar = ptime.progressBar(maxValue=num_date, print_msg=~inps.verbose)
    for i in range(num_date):
        grib_file = inps.grib_files[i]

        # calc tropo delay
        tropo_data = get_delay(grib_file,
                               tropo_model=inps.tropo_model,
                               delay_type=inps.delay_type,
                               dem=inps.dem,
                               inc=inps.inc,
                               lat=inps.lat,
                               lon=inps.lon,
                               mask=mask,
                               verbose=inps.verbose)

        # write tropo delay to file
        block = [i, i + 1, 0, length, 0, width]
        writefile.write_hdf5_block(inps.tropo_file,
                                   data=tropo_data,
                                   datasetName='timeseries',
                                   block=block,
                                   print_msg=False)

        prog_bar.update(i + 1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    return inps.tropo_file
Пример #13
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    inps.work_dir = os.path.abspath(os.path.dirname(inps.ts_file))
    inps.cbar_file = os.path.join(inps.work_dir, 'google_earth_cbar.png')
    inps.star_file = os.path.join(inps.work_dir, "star.png")
    inps.dot_file = os.path.join(inps.work_dir, "shaded_dot.png")
    inps.dygraph_file = os.path.join(inps.work_dir, "dygraph-combined.js")
    inps.kml_data_dir = os.path.join(inps.work_dir, 'kml_data')

    ## Define file names
    inps.outfile_base = plot.auto_figure_title(inps.ts_file, inps_dict=vars(inps))
    kml_master_file = os.path.join(inps.work_dir, '{}_master.kml'.format(inps.outfile_base))
    kmz_file = os.path.join(inps.work_dir, '{}.kmz'.format(inps.outfile_base))

    ## read data
    ts_obj = timeseries(inps.ts_file)
    ts_obj.open()
    length, width = ts_obj.length, ts_obj.width
    inps.metadata = ts_obj.metadata
    lats, lons = ut.get_lat_lon(ts_obj.metadata)
    print('input data shape in row/col: {}/{}'.format(length, width))

    vel = readfile.read(inps.vel_file, datasetName='velocity')[0] * 100.
    # Set min/max velocity for colormap
    if inps.vlim is None:
        inps.vlim = [np.nanmin(vel), np.nanmax(vel)]


    ##--------- Create master KML file with network links to data KML files --------------##
    kml_master_doc = KML.Document()

    # 1 Create Overlay element for colorbar
    cbar_overlay = generate_cbar_element(cbar_file=inps.cbar_file,
                                         vmin=inps.vlim[0],
                                         vmax=inps.vlim[1],
                                         cmap=inps.colormap)
    kml_master_doc.append(cbar_overlay)

    # 2 Generate the placemark for the Reference Pixel
    ref_point = create_reference_point_element(inps, lats, lons, ts_obj)
    print('add reference point.')
    ref_folder = KML.Folder(KML.name("ReferencePoint"))
    ref_folder.append(ref_point)
    kml_master_doc.append(ref_folder)

    # 3 Create data folder to contain actual data elements
    net_link1 = generate_network_link(inps, ts_obj, step=inps.steps[0], lod=(0, inps.lods[0]))
    net_link2 = generate_network_link(inps, ts_obj, step=inps.steps[1], lod=(inps.lods[0], inps.lods[1]))
    net_link3 = generate_network_link(inps, ts_obj, step=inps.steps[2], lod=(inps.lods[1], inps.lods[2]))

    # 3.3 Append network links to data folder
    data_folder = KML.Folder(KML.name("Data"))
    data_folder.append(net_link1)
    data_folder.append(net_link2)
    data_folder.append(net_link3)
    kml_master_doc.append(data_folder)


    ##---------------------------- Write master KML file ------------------------------##
    print('-'*30)
    print('writing ' + kml_master_file)
    kml_master = KML.kml()
    kml_master.append(kml_master_doc)
    with open(kml_master_file, 'w') as f:
        f.write(etree.tostring(kml_master, pretty_print=True).decode('utf-8'))

    ## Copy auxiliary files
    res_dir = os.path.join(os.path.dirname(__file__), "../docs/resources")
    for fname in [inps.star_file, inps.dot_file, inps.dygraph_file]:
        src_file = os.path.join(res_dir, os.path.basename(fname))
        shutil.copy2(src_file, inps.work_dir)
        print("copy {} to the local directory".format(src_file))

    ## Generate KMZ file
    kml_files_str = ''
    for fname in [kml_master_file, inps.kml_data_dir,
                  inps.cbar_file,inps.dygraph_file,
                  inps.dot_file, inps.star_file]:
        kml_files_str += ' {}'.format(os.path.basename(fname))
    cmd = 'cd {}; zip -r {} {}'.format(inps.work_dir, kmz_file, kml_files_str)
    print('writing {} from kml files'.format(kmz_file))
    os.system(cmd)

    ## Remove extra files from file tree after KMZ generation
    cmd = 'cd {}; rm -r {}'.format(inps.work_dir, kml_files_str)
    os.system(cmd)

    print('Done.')
    print('Open {} in Google Earth!'.format(kmz_file))
    return
Пример #14
0
def read_data(inps):
    """
    Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN
    """
    # metadata
    inps.metadata = readfile.read_attribute(inps.file)
    k = inps.metadata['FILE_TYPE']
    inps.range2phase = -4. * np.pi / float(inps.metadata['WAVELENGTH'])
    ext = os.path.splitext(inps.file)[1]

    # mask
    if inps.mask_file:
        inps.mask = readfile.read(inps.mask_file)[0]
    else:
        inps.mask = np.ones(
            (int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])),
            dtype=np.bool_)

    # data
    if k in ['.unw', 'velocity']:
        inps.phase = readfile.read(inps.file)[0]
        if k == 'velocity':
            # velocity to displacement
            date1, date2 = inps.metadata['DATE12'].split('_')
            dt1, dt2 = ptime.date_list2vector([date1, date2])[0]
            inps.phase *= (dt2 - dt1).days / 365.25
            # displacement to phase
            inps.phase *= inps.range2phase

        # update mask to exclude pixel with NaN value
        inps.mask *= ~np.isnan(inps.phase)
        # set all masked out pixel to NaN
        inps.phase[inps.mask == 0] = np.nan
    else:
        raise ValueError("input file not support yet: {}".format(k))
    print('number of pixels: {}'.format(np.sum(inps.mask)))

    # change reference point
    if inps.ref_lalo:
        coord = ut.coordinate(inps.metadata)
        ref_lat, ref_lon = inps.ref_lalo
        ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2]
        # update data
        inps.phase -= inps.phase[ref_y, ref_x]
        # update metadata
        inps.metadata['REF_LAT'] = ref_lat
        inps.metadata['REF_LON'] = ref_lon
        inps.metadata['REF_Y'] = ref_y
        inps.metadata['REF_X'] = ref_x

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    inps.inc_angle = readfile.read(inps.geom_file,
                                   datasetName='incidenceAngle')[0]
    inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(
        inps.metadata['HEADING'])
    inps.lat[inps.mask == 0] = np.nan
    inps.lon[inps.mask == 0] = np.nan
    inps.inc_angle[inps.mask == 0] = np.nan
    inps.head_angle[inps.mask == 0] = np.nan

    # output filename
    if not inps.outfile:
        out_dir = os.path.dirname(inps.file)
        proj_name = sensor.project_name2sensor_name(out_dir)[1]
        if not proj_name:
            raise ValueError('No custom/auto output filename found.')
        inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12'])
        inps.outfile = os.path.join(out_dir, inps.outfile)
    inps.outfile = os.path.abspath(inps.outfile)
    return