def read_date_info(inps):
    """Read dates used in the estimation and its related info.
    Parameters: inps - Namespace
    Returns:    inps - Namespace
    """
    if inps.key == 'timeseries':
        tsobj = timeseries(inps.timeseries_file)
    elif inps.key == 'giantTimeseries':
        tsobj = giantTimeseries(inps.timeseries_file)
    elif inps.key == 'HDFEOS':
        tsobj = HDFEOS(inps.timeseries_file)
    tsobj.open()
    inps.excludeDate = read_exclude_date(inps, tsobj.dateList)

    # exclude dates without obs data [for offset time-series only for now]
    if os.path.basename(inps.timeseries_file).startswith('timeseriesRg'):
        date_list = timeseries(inps.timeseries_file).get_date_list()
        data, atr = readfile.read(inps.timeseries_file)
        flag = np.nansum(data, axis=(1, 2)) == 0
        flag[date_list.index(atr['REF_DATE'])] = 0
        if np.sum(flag) > 0:
            print('number of empty dates to exclude: {}'.format(np.sum(flag)))
            inps.excludeDate += np.array(date_list)[flag].tolist()
            inps.excludeDate = sorted(list(set(inps.excludeDate)))

    # Date used for estimation inps.dateList
    inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate]
    inps.numDate = len(inps.dateList)
    inps.startDate = inps.dateList[0]
    inps.endDate = inps.dateList[-1]
    print('-' * 50)
    print('dates from input file: {}\n{}'.format(tsobj.numDate,
                                                 tsobj.dateList))
    print('-' * 50)
    if len(inps.dateList) == len(tsobj.dateList):
        print('using all dates to calculate the velocity')
    else:
        print('dates used to estimate the velocity: {}\n{}'.format(
            inps.numDate, inps.dateList))
    print('-' * 50)

    # flag array for ts data reading
    inps.dropDate = np.array(
        [i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_)

    # output file name
    if not inps.outfile:
        fbase = os.path.splitext(os.path.basename(inps.timeseries_file))[0]
        outname = 'velocity'
        if inps.key == 'giantTimeseries':
            prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0]
            outname = prefix + outname
        elif fbase in ['timeseriesRg', 'timeseriesAz']:
            suffix = fbase.split('timeseries')[-1]
            outname = outname + suffix
        outname += '.h5'
        inps.outfile = outname

    return inps
Exemple #2
0
def read_date_info(inps):
    """Get inps.excludeDate full list
    Inputs:
        inps          - Namespace, 
    Output:
        inps.excludeDate  - list of string for exclude date in YYYYMMDD format
    """
    if inps.key == 'timeseries':
        tsobj = timeseries(inps.timeseries_file)
    elif inps.key == 'giantTimeseries':
        tsobj = giantTimeseries(inps.timeseries_file)
    elif inps.key == 'HDFEOS':
        tsobj = HDFEOS(inps.timeseries_file)
    tsobj.open()
    inps.excludeDate = read_exclude_date(inps, tsobj.dateList)

    # Date used for estimation inps.dateList
    inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate]

    date_list = inps.dateList
    dt_list = [dt.strptime(i, '%Y%m%d') for i in date_list]
    yr_list = [i.year + (i.timetuple().tm_yday - 1) / 365.25 for i in dt_list]
    yr_diff = np.array(yr_list)
    yr_diff -= yr_diff[0]
    inps.yr_diff = yr_diff

    inps.numDate = len(inps.dateList)
    print('-' * 50)
    print('dates from input file: {}\n{}'.format(tsobj.numDate,
                                                 tsobj.dateList))
    print('-' * 50)
    if len(inps.dateList) == len(tsobj.dateList):
        print('using all dates to calculate the velocity')
    else:
        print('dates used to estimate the velocity: {}\n{}'.format(
            inps.numDate, inps.dateList))
    print('-' * 50)

    # flag array for ts data reading
    inps.dropDate = np.array(
        [i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_)

    # output file name
    if not inps.outfile:
        outname = 'velocity'
        if inps.key == 'giantTimeseries':
            prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0]
            outname = prefix + outname
        outname += '.h5'
        inps.outfile = outname
    return inps
Exemple #3
0
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False):
    """Print time/date info of file"""
    k = readfile.read_attribute(fname)['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        dateList = timeseries(fname).get_date_list()

    elif k == 'HDFEOS':
        dateList = HDFEOS(fname).get_date_list()

    elif k == 'giantTimeseries':
        dateList = giantTimeseries(fname).get_date_list()

    elif k in ['giantIfgramStack']:
        dateList = giantIfgramStack(fname).get_date12_list()

    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateListAll = obj.get_date12_list(dropIfgram=False)
        dateListKept = obj.get_date12_list(dropIfgram=True)

        # show dropped ifgram or not
        if disp_ifgram == 'all':
            dateList = list(dateListAll)
        elif disp_ifgram == 'kept':
            dateList = list(dateListKept)
        else:
            dateList = sorted(list(set(dateListAll) - set(dateListKept)))

    else:
        print('--date option can not be applied to {} file, ignore it.'.format(
            k))

    # print list info
    if print_msg and dateList is not None:
        for d in dateList:
            if disp_num:
                if k in ['ifgramStack']:
                    num = dateListAll.index(d)
                else:
                    num = dateList.index(d)
                msg = '{}\t{}'.format(d, num)
            else:
                msg = d
            print(msg)
    return dateList
Exemple #4
0
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False):
    """Print time/date info of file"""
    k = readfile.read_attribute(fname)['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        dateList = timeseries(fname).get_date_list()

    elif k == 'HDFEOS':
        dateList = HDFEOS(fname).get_date_list()

    elif k == 'giantTimeseries':
        dateList = giantTimeseries(fname).get_date_list()


    elif k in ['giantIfgramStack']:
        dateList = giantIfgramStack(fname).get_date12_list()

    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateListAll = obj.get_date12_list(dropIfgram=False)
        dateListKept = obj.get_date12_list(dropIfgram=True)

        # show dropped ifgram or not
        if disp_ifgram == 'all':
            dateList = list(dateListAll)
        elif disp_ifgram == 'kept':
            dateList = list(dateListKept)
        else:
            dateList = sorted(list(set(dateListAll) - set(dateListKept)))

    else:
        print('--date option can not be applied to {} file, ignore it.'.format(k))

    # print list info
    if print_msg and dateList is not None:
        for d in dateList:
            if disp_num:
                if k in ['ifgramStack']:
                    num = dateListAll.index(d)
                else:
                    num = dateList.index(d)
                msg = '{}\t{}'.format(d, num)
            else:
                msg = d
            print(msg)
    return dateList
Exemple #5
0
def read_data(inps):
    # metadata
    atr = readfile.read_attribute(inps.file)

    if 'WAVELENGTH' in atr.keys():
        range2phase = -4 * np.pi / float(atr['WAVELENGTH'])

    # change reference pixel
    if inps.ref_yx:
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        print('change reference point to y/x: {}'.format(inps.ref_yx))

    # various file types
    print('read {} from file {}'.format(inps.dset, inps.file))
    k = atr['FILE_TYPE']
    if k == 'velocity':
        # read/prepare data
        data = readfile.read(inps.file)[0] * range2phase
        print(
            "converting velocity to an interferogram with one year temporal baseline"
        )
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    elif k == 'timeseries':
        # date1 and date2
        if '_' in inps.dset:
            date1, date2 = ptime.yyyymmdd(inps.dset.split('_'))
        else:
            date1 = atr['REF_DATE']
            date2 = ptime.yyyymmdd(inps.dset)

        # read/prepare data
        data = readfile.read(inps.file, datasetName=date2)[0]
        data -= readfile.read(inps.file, datasetName=date1)[0]
        print('converting range to phase')
        data *= range2phase
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}_{}.unw'.format(date1, date2)
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    elif k == 'HDFEOS':
        dname = inps.dset.split('-')[0]

        # date1 and date2
        if dname == 'displacement':
            if '-' in inps.dset:
                suffix = inps.dset.split('-')[1]
                if '_' in suffix:
                    date1, date2 = ptime.yyyymmdd(suffix.split('_'))
                else:
                    date1 = atr['REF_DATE']
                    date2 = ptime.yyyymmdd(suffix)
            else:
                raise ValueError(
                    "No '-' in input dataset! It is required for {}".format(
                        dname))
        else:
            date_list = HDFEOS(inps.file).get_date_list()
            date1 = date_list[0]
            date2 = date_list[-1]
        date12 = '{}_{}'.format(date1, date2)

        # read / prepare data
        slice_list = readfile.get_slice_list(inps.file)
        if 'displacement' in inps.dset:
            # read/prepare data
            slice_name1 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date1))[0][0]
            slice_name2 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date2))[0][0]
            data = readfile.read(inps.file, datasetName=slice_name1)[0]
            data -= readfile.read(inps.file, datasetName=slice_name2)[0]
            print('converting range to phase')
            data *= range2phase
            if inps.ref_yx:
                data -= data[inps.ref_yx[0], inps.ref_yx[1]]
        else:
            slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0]
            data = readfile.read(inps.file, datasetName=slice_name)[0]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname == 'displacement':
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif 'coherence' in dname.lower():
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'height':
            atr['FILE_TYPE'] = '.dem'
            atr['DATA_TYPE'] = 'int16'
        else:
            raise ValueError('unrecognized input dataset type: {}'.format(
                inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])

    elif k == 'ifgramStack':
        dname, date12 = inps.dset.split('-')
        date1, date2 = date12.split('_')

        # read / prepare data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]
        if dname.startswith('unwrapPhase'):
            if 'REF_X' in atr.keys():
                data -= data[int(atr['REF_Y']), int(atr['REF_X'])]
                print('consider reference pixel in y/x: ({}, {})'.format(
                    atr['REF_Y'], atr['REF_X']))
            else:
                print('No REF_Y/X found.')

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname.startswith('unwrapPhase'):
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif dname == 'coherence':
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'wrapPhase':
            atr['FILE_TYPE'] = '.int'
            atr['UNIT'] = 'radian'
        else:
            raise ValueError('unrecognized dataset type: {}'.format(inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    else:
        # read data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]

        if inps.outfile:
            fext = os.path.splitext(inps.outfile)[1]
            atr['FILE_TYPE'] = fext
        else:
            # metadata
            if 'coherence' in k.lower():
                atr['FILE_TYPE'] = '.cor'
            elif k in ['mask']:
                atr['FILE_TYPE'] = '.msk'
            elif k in ['geometry'] and inps.dset == 'height':
                if 'Y_FIRST' in atr.keys():
                    atr['FILE_TYPE'] = '.dem'
                else:
                    atr['FILE_TYPE'] = '.hgt'
                atr['UNIT'] = 'm'
            else:
                atr['FILE_TYPE'] = '.unw'

            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    # get rid of starting . if output as hdf5 file
    if inps.outfile.endswith('.h5'):
        if atr['FILE_TYPE'].startswith('.'):
            atr['FILE_TYPE'] = atr['FILE_TYPE'][1:]

    atr['PROCESSOR'] = 'roipac'
    return data, atr, inps.outfile
def main():
    parser = build_parser()
    parseArgs = parser.parse_args()
    file_name = parseArgs.file
    output_folder = parseArgs.outputDir
    should_mask = True

    path_name_and_extension = os.path.basename(file_name).split(".")
    path_name = path_name_and_extension[0]
    # ---------------------------------------------------------------------------------------
    # start clock to track how long conversion process takes
    start_time = time.clock()

    # use h5py to open specified group(s) in the h5 file
    # then read datasets from h5 file into memory for faster reading of data
    he_obj = HDFEOS(file_name)
    he_obj.open(print_msg=False)
    displacement_3d_matrix = he_obj.read(datasetName='displacement')
    mask = he_obj.read(datasetName='mask')
    if should_mask:
        print("Masking displacement")
        displacement_3d_matrix = mask_matrix(displacement_3d_matrix, mask)
    del mask

    dates = he_obj.dateList
    attributes = dict(he_obj.metadata)

    #file = h5py.File(file_name,  "r")
    #timeseries_group = file["HDFEOS"]["GRIDS"]["timeseries"]
    #displacement_3d_matrix = timeseries_group["observation"]["displacement"]

    # get attributes (stored at root) of UNAVCO timeseries file
    #attributes = dict(file.attrs)

    # in timeseries displacement_3d_matrix, there are datasets
    # need to get datasets with dates - strings that can be converted to integers
    #dates = displacement_3d_matrix.attrs["DATE_TIMESERIES"].split(" ")

    # array that stores dates from dates that have been converted to decimal
    decimal_dates = []

    # read datasets in the group into a dictionary of 2d arrays and intialize decimal dates
    timeseries_datasets = {}
    num_date = len(dates)
    for i in range(num_date):
        timeseries_datasets[dates[i]] = np.squeeze(
            displacement_3d_matrix[i, :, :])
        d = get_date(dates[i])
        decimal = get_decimal_date(d)
        decimal_dates.append(decimal)
    del displacement_3d_matrix

    #for displacement_2d_matrix in displacement_3d_matrix:
    #    dataset = displacement_2d_matrix[:]
    #    if should_mask:
    #        print("Masking " + dates[i])
    #        mask = timeseries_group["quality"]["mask"][:]
    #        dataset = mask_matrix(dataset, mask)
    #    timeseries_datasets[dates[i]] = dataset
    #    d = get_date(dates[i])
    #    decimal = get_decimal_date(d)
    #    decimal_dates.append(decimal)
    #    i += 1

    # close h5 file
    #file.close()

    path_list = path_name.split("/")
    folder_name = path_name.split("/")[len(path_list) - 1]

    try:  # create path for output
        os.mkdir(output_folder)
    except:
        print(output_folder + " already exists")

    # read and convert the datasets, then write them into json files and insert into database
    convert_data(attributes, decimal_dates, timeseries_datasets, dates,
                 output_folder, folder_name)

    # run tippecanoe command to get mbtiles file
    os.chdir(os.path.abspath(output_folder))
    os.system(
        "tippecanoe *.json -l chunk_1 -x d -pf -pk -Bg -d9 -D12 -g12 -r0 -o " +
        folder_name + ".mbtiles")

    # ---------------------------------------------------------------------------------------
    # check how long it took to read h5 file data and create json files
    end_time = time.clock()
    print(("time elapsed: " + str(end_time - start_time)))
    return
Exemple #7
0
def read_init_info(inps):
    # Time Series Info
    atr = readfile.read_attribute(inps.file[0])
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(inps.file[0])
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(inps.file[0])
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(inps.file[0])
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open(print_msg=inps.print_msg)
    inps.seconds = atr.get('CENTER_LINE_UTC', 0)

    if not inps.file_label:
        inps.file_label = []
        for fname in inps.file:
            fbase = os.path.splitext(os.path.basename(fname))[0]
            fbase = fbase.replace('timeseries', '')
            inps.file_label.append(fbase)

    # default mask file
    if not inps.mask_file and 'msk' not in inps.file[0]:
        dir_name = os.path.dirname(inps.file[0])
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    ## date info
    inps.date_list = obj.dateList
    inps.num_date = len(inps.date_list)
    if inps.start_date:
        inps.date_list = [i for i in inps.date_list if int(i) >= int(inps.start_date)]
    if inps.end_date:
        inps.date_list = [i for i in inps.date_list if int(i) <= int(inps.end_date)]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)

    (inps.ex_date_list,
     inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # reference date/index
    if not inps.ref_date:
        inps.ref_date = atr.get('REF_DATE', None)
    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    else:
        inps.ref_idx = None

    # date/index of interest for initial display
    if not inps.idx:
        if (not inps.ref_idx) or (inps.ref_idx < inps.num_date / 2.):
            inps.idx = inps.num_date - 2
        else:
            inps.idx = 2

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3]

    # Map info - coordinate unit
    inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower()

    # Read Error List
    inps.ts_plot_func = plot_ts_scatter
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        # assign plot function
        inps.ts_plot_func = plot_ts_errorbar

        # read error file
        error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
        inps.error_ts = error_fc[:, 1].astype(np.float)*inps.unit_fac

        # update error file with exlcude date
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file and coordinate object
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    ## size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    vprint('data   coverage in y/x: '+str(data_box))
    vprint('subset coverage in y/x: '+str(inps.pix_box))
    vprint('data   coverage in lat/lon: '+str(inps.coord.box_pixel2geo(data_box)))
    vprint('subset coverage in lat/lon: '+str(inps.geo_box))
    vprint('------------------------------------------------------------------------')

    # calculate multilook_num
    # ONLY IF:
    #   inps.multilook is True (no --nomultilook input) AND
    #   inps.multilook_num ==1 (no --multilook-num input)
    # Note: inps.multilook is used for this check ONLY
    # Note: multilooking is only applied to the 3D data cubes and their related operations:
    # e.g. spatial indexing, referencing, etc. All the other variables are in the original grid
    # so that users get the same result as the non-multilooked version.
    if inps.multilook and inps.multilook_num == 1:
        inps.multilook_num = pp.auto_multilook_num(inps.pix_box, inps.num_date,
                                                   max_memory=inps.maxMemory,
                                                   print_msg=inps.print_msg)

    ## reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        # set longitude to [-180, 180)
        if inps.coord_unit.lower().startswith('deg') and inps.ref_lalo[1] >= 180.:
            inps.ref_lalo[1] -= 360.
        # ref_lalo --> ref_yx if not set in cmd
        if not inps.ref_yx:
            inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2]

    # use REF_Y/X if ref_yx not set in cmd
    if not inps.ref_yx and 'REF_Y' in atr.keys():
        inps.ref_yx = (int(atr['REF_Y']), int(atr['REF_X']))

    # ref_yx --> ref_lalo if in geo-coord
    # for plotting purpose only
    if inps.ref_yx and 'Y_FIRST' in atr.keys():
        inps.ref_lalo = inps.coord.radar2geo(inps.ref_yx[0], inps.ref_yx[1], print_msg=False)[0:2]

    # do not plot native reference point if it's out of the coverage due to subset
    if (inps.ref_yx and 'Y_FIRST' in atr.keys()
        and inps.ref_yx == (int(atr.get('REF_Y',-999)), int(atr.get('REF_X',-999)))
        and not (    inps.pix_box[0] <= inps.ref_yx[1] < inps.pix_box[2]
                 and inps.pix_box[1] <= inps.ref_yx[0] < inps.pix_box[3])):
        inps.disp_ref_pixel = False
        print('the native REF_Y/X is out of subset box, thus do not display')

    ## initial pixel coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2]
    except:
        inps.lalo = None

    ## figure settings
    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr, print_msg=inps.print_msg)

    # Transparency - Alpha
    if not inps.transparency:
        # Auto adjust transparency value when showing shaded relief DEM
        if inps.dem_file and inps.disp_dem_shade:
            inps.transparency = 0.7
        else:
            inps.transparency = 1.0

    ## display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian;
    # otherwise set disp_unit_img = disp_unit
    inps.disp_unit_img = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if   'cm' == inps.disp_unit.split('/')[0]:   inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]:   inps.range2phase /= 1000.
        elif 'm'  == inps.disp_unit.split('/')[0]:   inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2*np.pi:
            inps.disp_unit_img = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img)

    ## fit a suite of time func to the time series
    inps.model, inps.num_param = ts2vel.read_inps2model(inps, date_list=inps.date_list)

    # dense TS for plotting
    inps.date_list_fit = ptime.get_date_range(inps.date_list[0], inps.date_list[-1])
    inps.dates_fit = ptime.date_list2vector(inps.date_list_fit)[0]
    inps.G_fit = time_func.get_design_matrix4time_func(
        date_list=inps.date_list_fit,
        model=inps.model,
        seconds=inps.seconds)

    return inps, atr
Exemple #8
0
def read_init_info(inps):
    # Time Series Info
    ts_file0 = inps.file[0]
    atr = readfile.read_attribute(ts_file0)
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(ts_file0)
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(ts_file0)
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(ts_file0)
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open(print_msg=inps.print_msg)

    if not inps.file_label:
        inps.file_label = [str(i) for i in list(range(len(inps.file)))]

    # default mask file
    if not inps.mask_file and 'masked' not in ts_file0:
        dir_name = os.path.dirname(ts_file0)
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    # date info
    inps.date_list = obj.dateList
    inps.num_date = len(inps.date_list)
    if inps.start_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) >= int(inps.start_date)
        ]
    if inps.end_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) <= int(inps.end_date)
        ]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)
    (inps.ex_date_list, inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # initial display index
    #if atr['REF_DATE'] in inps.date_list:
    #    inps.ref_idx = inps.date_list.index(atr['REF_DATE'])
    #else:
    #    inps.ref_idx = 0

    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    else:
        inps.ref_idx = 3

    if not inps.idx:
        if inps.ref_idx < inps.num_date / 2.:
            inps.idx = inps.num_date - 3
        else:
            inps.idx = 3

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr,
                                              disp_unit=inps.disp_unit)[1:3]

    # Map info - coordinate unit
    inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower()

    # Read Error List
    inps.ts_plot_func = plot_ts_scatter
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        # assign plot function
        inps.ts_plot_func = plot_ts_errorbar
        # read error file
        error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
        inps.error_ts = error_fc[:, 1].astype(np.float) * inps.unit_fac
        # update error file with exlcude date
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    # size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    vprint('data   coverage in y/x: ' + str(data_box))
    vprint('subset coverage in y/x: ' + str(inps.pix_box))
    vprint('data   coverage in lat/lon: ' +
           str(inps.coord.box_pixel2geo(data_box)))
    vprint('subset coverage in lat/lon: ' + str(inps.geo_box))
    vprint(
        '------------------------------------------------------------------------'
    )

    # reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        if inps.ref_lalo[1] > 180.:
            inps.ref_lalo[1] -= 360.
        inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0],
                                           inps.ref_lalo[1],
                                           print_msg=False)[0:2]
    if not inps.ref_yx and 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]

    # Initial Pixel Coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0],
                                       inps.lalo[1],
                                       print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0],
                                         inps.yx[1],
                                         print_msg=False)[0:2]
    except:
        inps.lalo = None

    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(
            atr, print_msg=inps.print_msg)

    # Transparency - Alpha
    if not inps.transparency:
        # Auto adjust transparency value when showing shaded relief DEM
        if inps.dem_file and inps.disp_dem_shade:
            inps.transparency = 0.7
        else:
            inps.transparency = 1.0

    # display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian;
    # otherwise set disp_unit_img = disp_unit
    inps.disp_unit_img = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000.
        elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(
                inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi:
            inps.disp_unit_img = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img)
    return inps, atr
Exemple #9
0
def get_slice_list(fname):
    """Get list of 2D slice existed in file (for display)"""
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    atr = read_attribute(fname)
    k = atr['FILE_TYPE']

    global slice_list
    # HDF5 Files
    if fext in ['.h5', '.he5']:
        with h5py.File(fname, 'r') as f:
            d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]
        if k == 'timeseries' and k in d1_list:
            obj = timeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['geometry'] and k not in d1_list:
            obj = geometry(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['ifgramStack']:
            obj = ifgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['HDFEOS']:
            obj = HDFEOS(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantTimeseries']:
            obj = giantTimeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantIfgramStack']:
            obj = giantIfgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        else:
            ## Find slice by walking through the file structure
            length, width = int(atr['LENGTH']), int(atr['WIDTH'])

            def get_hdf5_2d_dataset(name, obj):
                global slice_list
                if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length,
                                                                        width):
                    if obj.ndim == 2:
                        slice_list.append(name)
                    else:
                        warnings.warn(
                            'file has un-defined {}D dataset: {}'.format(
                                obj.ndim, name))

            slice_list = []
            with h5py.File(fname, 'r') as f:
                f.visititems(get_hdf5_2d_dataset)

    # Binary Files
    else:
        if fext.lower() in ['.trans', '.utm_to_rdc']:
            slice_list = ['rangeCoord', 'azimuthCoord']
        elif fbase.startswith('los'):
            slice_list = ['incidenceAngle', 'azimuthAngle']
        elif atr.get('number_bands', '1') == '2' and 'unw' not in k:
            slice_list = ['band1', 'band2']
        else:
            slice_list = ['']
    return slice_list
Exemple #10
0
def read_data(inps):
    # metadata
    atr = readfile.read_attribute(inps.file)

    if 'WAVELENGTH' in atr.keys():
        range2phase = -4 * np.pi / float(atr['WAVELENGTH'])

    # change reference pixel
    if inps.ref_lalo:
        if 'Y_FIRST' in atr.keys():
            coord = ut.coordinate(atr)
            ref_y, ref_x = coord.geo2radar(inps.ref_lalo[0],
                                           inps.ref_lalo[1])[0:2]
            inps.ref_yx = [ref_y, ref_x]
        else:
            raise ValueError(
                "input file is not geocoded --> reference point in lat/lon is NOT support"
            )

    if inps.ref_yx:
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        if 'Y_FIRST' in atr.keys():
            coord = ut.coordinate(atr)
            ref_lat, ref_lon = coord.radar2geo(inps.ref_yx[0],
                                               inps.ref_yx[1])[0:2]
            atr['REF_LAT'] = ref_lat
            atr['REF_LON'] = ref_lon
        print('change reference point to y/x: {}'.format(inps.ref_yx))

    # various file types
    print('read {} from file {}'.format(inps.dset, inps.file))
    k = atr['FILE_TYPE']
    if k == 'velocity':
        # read/prepare data
        data = readfile.read(inps.file)[0]

        # velocity to displacement
        print('convert velocity to displacement for {}'.format(atr['DATE12']))
        date1, date2 = atr['DATE12'].split('_')
        dt1, dt2 = ptime.date_list2vector([date1, date2])[0]
        data *= (dt2 - dt1).days / 365.25

        # displacement to phase
        print('convert displacement to phase in radian')
        data *= range2phase

        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = os.path.join(os.path.dirname(inps.file),
                                        '{}.unw'.format(atr['DATE12']))

    elif k == 'timeseries':
        # date1 and date2
        if '_' in inps.dset:
            date1, date2 = ptime.yyyymmdd(inps.dset.split('_'))
        else:
            date1 = atr['REF_DATE']
            date2 = ptime.yyyymmdd(inps.dset)

        # read/prepare data
        data = readfile.read(inps.file, datasetName=date2)[0]
        data -= readfile.read(inps.file, datasetName=date1)[0]
        print('converting range to phase')
        data *= range2phase
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}_{}.unw'.format(date1, date2)
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    elif k == 'HDFEOS':
        dname = inps.dset.split('-')[0]

        # date1 and date2
        if dname == 'displacement':
            if '-' in inps.dset:
                suffix = inps.dset.split('-')[1]
                if '_' in suffix:
                    date1, date2 = ptime.yyyymmdd(suffix.split('_'))
                else:
                    date1 = atr['REF_DATE']
                    date2 = ptime.yyyymmdd(suffix)
            else:
                raise ValueError(
                    "No '-' in input dataset! It is required for {}".format(
                        dname))
        else:
            date_list = HDFEOS(inps.file).get_date_list()
            date1 = date_list[0]
            date2 = date_list[-1]
        date12 = '{}_{}'.format(date1, date2)

        # read / prepare data
        slice_list = readfile.get_slice_list(inps.file)
        if 'displacement' in inps.dset:
            # read/prepare data
            slice_name1 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date1))[0][0]
            slice_name2 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date2))[0][0]
            data = readfile.read(inps.file, datasetName=slice_name1)[0]
            data -= readfile.read(inps.file, datasetName=slice_name2)[0]
            print('converting range to phase')
            data *= range2phase
            if inps.ref_yx:
                data -= data[inps.ref_yx[0], inps.ref_yx[1]]
        else:
            slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0]
            data = readfile.read(inps.file, datasetName=slice_name)[0]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname == 'displacement':
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif 'coherence' in dname.lower():
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'height':
            atr['FILE_TYPE'] = '.dem'
            atr['DATA_TYPE'] = 'int16'
        else:
            raise ValueError('unrecognized input dataset type: {}'.format(
                inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])

    elif k == 'ifgramStack':
        dname, date12 = inps.dset.split('-')
        date1, date2 = date12.split('_')

        # read / prepare data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]
        if dname.startswith('unwrapPhase'):
            if 'REF_X' in atr.keys():
                data -= data[int(atr['REF_Y']), int(atr['REF_X'])]
                print('consider reference pixel in y/x: ({}, {})'.format(
                    atr['REF_Y'], atr['REF_X']))
            else:
                print('No REF_Y/X found.')

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname.startswith('unwrapPhase'):
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif dname == 'coherence':
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'wrapPhase':
            atr['FILE_TYPE'] = '.int'
            atr['UNIT'] = 'radian'
        elif dname == 'connectComponent':
            atr['FILE_TYPE'] = '.conncomp'
            atr['UNIT'] = '1'
            atr['DATA_TYPE'] = 'byte'
        else:
            raise ValueError('unrecognized dataset type: {}'.format(inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    else:
        # read data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]

        if inps.outfile:
            fext = os.path.splitext(inps.outfile)[1]
            atr['FILE_TYPE'] = fext
        else:
            # metadata
            if 'coherence' in k.lower():
                atr['FILE_TYPE'] = '.cor'
            elif k in ['mask']:
                atr['FILE_TYPE'] = '.msk'
            elif k in ['geometry'] and inps.dset == 'height':
                if 'Y_FIRST' in atr.keys():
                    atr['FILE_TYPE'] = '.dem'
                else:
                    atr['FILE_TYPE'] = '.hgt'
                atr['UNIT'] = 'm'
            else:
                atr['FILE_TYPE'] = '.unw'

            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    # mask
    if inps.mask_file:
        for m_file in inps.mask_file:
            print('mask data based on input file: {}'.format(m_file))
            mask = readfile.read(m_file)[0]
            mask *= ~np.isnan(data)
            data[mask == 0] = np.nan

    # get rid of starting . if output as hdf5 file
    if inps.outfile.endswith('.h5'):
        if atr['FILE_TYPE'].startswith('.'):
            atr['FILE_TYPE'] = atr['FILE_TYPE'][1:]

    atr['PROCESSOR'] = 'roipac'
    return data, atr, inps.outfile
Exemple #11
0
def get_slice_list(fname):
    """Get list of 2D slice existed in file (for display)"""
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    atr = read_attribute(fname)
    k = atr['FILE_TYPE']

    global slice_list
    # HDF5 Files
    if fext in ['.h5', '.he5']:
        with h5py.File(fname, 'r') as f:
            d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]
        if k == 'timeseries' and k in d1_list:
            obj = timeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['geometry'] and k not in d1_list:
            obj = geometry(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['ifgramStack']:
            obj = ifgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['HDFEOS']:
            obj = HDFEOS(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantTimeseries']:
            obj = giantTimeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantIfgramStack']:
            obj = giantIfgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        else:
            ## Find slice by walking through the file structure
            length, width = int(atr['LENGTH']), int(atr['WIDTH'])
            def get_hdf5_2d_dataset(name, obj):
                global slice_list
                if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length, width):
                    if obj.ndim == 2:
                        slice_list.append(name)
                    else:
                        warnings.warn('file has un-defined {}D dataset: {}'.format(obj.ndim, name))
            slice_list = []
            with h5py.File(fname, 'r') as f:
                f.visititems(get_hdf5_2d_dataset)

    # Binary Files
    else:
        if fext.lower() in ['.trans', '.utm_to_rdc']:
            slice_list = ['rangeCoord', 'azimuthCoord']
        elif fbase.startswith('los'):
            slice_list = ['incidenceAngle', 'azimuthAngle']
        elif atr.get('number_bands', '1') == '2' and 'unw' not in k:
            slice_list = ['band1', 'band2']
        else:
            slice_list = ['']
    return slice_list
def main():
    parser = build_parser()
    parseArgs = parser.parse_args()
    file_name = parseArgs.file
    output_folder = parseArgs.outputDir
    should_mask = True

    path_name_and_extension = os.path.basename(file_name).split(".")
    path_name = path_name_and_extension[0]
    # ---------------------------------------------------------------------------------------
    # start clock to track how long conversion process takes
    start_time = time.clock()

    # use h5py to open specified group(s) in the h5 file 
    # then read datasets from h5 file into memory for faster reading of data
    he_obj = HDFEOS(file_name)
    he_obj.open(print_msg=False)
    displacement_3d_matrix = he_obj.read(datasetName='displacement')
    mask = he_obj.read(datasetName='mask')
    if should_mask:
        print("Masking displacement")
        displacement_3d_matrix = mask_matrix(displacement_3d_matrix, mask)
    del mask

    dates = he_obj.dateList
    attributes = dict(he_obj.metadata)

    #file = h5py.File(file_name,  "r")
    #timeseries_group = file["HDFEOS"]["GRIDS"]["timeseries"]
    #displacement_3d_matrix = timeseries_group["observation"]["displacement"]

    # get attributes (stored at root) of UNAVCO timeseries file
    #attributes = dict(file.attrs)

    # in timeseries displacement_3d_matrix, there are datasets
    # need to get datasets with dates - strings that can be converted to integers
    #dates = displacement_3d_matrix.attrs["DATE_TIMESERIES"].split(" ")

    # array that stores dates from dates that have been converted to decimal
    decimal_dates = []

    # read datasets in the group into a dictionary of 2d arrays and intialize decimal dates
    timeseries_datasets = {}
    num_date = len(dates)
    for i in range(num_date):
        timeseries_datasets[dates[i]] = np.squeeze(displacement_3d_matrix[i, :, :])
        d = get_date(dates[i])
        decimal = get_decimal_date(d)
        decimal_dates.append(decimal)
    del displacement_3d_matrix

    #for displacement_2d_matrix in displacement_3d_matrix:
    #    dataset = displacement_2d_matrix[:]
    #    if should_mask:
    #        print("Masking " + dates[i])
    #        mask = timeseries_group["quality"]["mask"][:]
    #        dataset = mask_matrix(dataset, mask)
    #    timeseries_datasets[dates[i]] = dataset
    #    d = get_date(dates[i])
    #    decimal = get_decimal_date(d)
    #    decimal_dates.append(decimal)
    #    i += 1

    # close h5 file
    #file.close()

    path_list = path_name.split("/")
    folder_name = path_name.split("/")[len(path_list)-1]

    try: # create path for output
        os.mkdir(output_folder)
    except:
        print(output_folder + " already exists")

    # read and convert the datasets, then write them into json files and insert into database
    convert_data(attributes, decimal_dates, timeseries_datasets, dates, output_folder, folder_name)

    # run tippecanoe command to get mbtiles file
    os.chdir(os.path.abspath(output_folder))
    os.system("tippecanoe *.json -l chunk_1 -x d -pf -pk -Bg -d9 -D12 -g12 -r0 -o " + folder_name + ".mbtiles")

    # ---------------------------------------------------------------------------------------
    # check how long it took to read h5 file data and create json files
    end_time =  time.clock()
    print(("time elapsed: " + str(end_time - start_time)))
    return