Exemple #1
0
def get_datetime_list(ts_file, date_wise_acq_time=False):
    """Prepare exact datetime for each acquisition in the time-series file.

    Parameters: ts_file            - str, path of the time-series HDF5 file
                date_wise_acq_time - bool, use the exact date-wise acquisition time
    Returns:    sensingMid         - list of datetime.datetime objects
    """
    print('\nprepare datetime info for each acquisition')

    ts_file = os.path.abspath(ts_file)
    date_list = timeseries(ts_file).get_date_list()

    proj_dir = os.path.dirname(os.path.dirname(ts_file))
    xml_dirs = [os.path.join(proj_dir, i) for i in ['reference', 'secondarys']]

    # list of existing dataset names
    with h5py.File(ts_file, 'r') as f:
        ds_names = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]

    dt_name = 'sensingMid'
    if dt_name in ds_names:
        # opt 1. read sensingMid if exists
        print('read exact datetime info from /{} in file: {}'.format(
            dt_name, os.path.basename(ts_file)))
        with h5py.File(ts_file, 'r') as f:
            sensingMidStr = [i.decode('utf-8') for i in f[dt_name][:]]

        # convert string to datetime object
        date_str_format = ptime.get_date_str_format(sensingMidStr[0])
        sensingMid = [
            dt.datetime.strptime(i, date_str_format) for i in sensingMidStr
        ]

    elif date_wise_acq_time and all(os.path.isdir(i) for i in xml_dirs):
        # opt 2. read sensingMid in xml files
        print(
            'read exact datetime info in XML files from ISCE-2/topsStack results in directory:',
            proj_dir)
        from mintpy.utils import isce_utils
        sensingMid = isce_utils.get_sensing_datetime_list(
            proj_dir, date_list=date_list)[0]

        # plot
        plot_sensingMid_variation(sensingMid)

    else:
        # opt 3. use constant time of the day for all acquisitions
        msg = 'Use the same time of the day for all acquisitions from CENTER_LINE_UTC\n'
        msg += 'With <= 1 min variation for Sentinel-1A/B for example, this simplication has negligible impact on SET calculation.'
        print(msg)
        atr = readfile.read_attribute(ts_file)
        utc_sec = dt.timedelta(seconds=float(atr['CENTER_LINE_UTC']))
        sensingMid = [
            dt.datetime.strptime(i, '%Y%m%d') + utc_sec for i in date_list
        ]

    return sensingMid
Exemple #2
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    atr = readfile.read_attribute(inps.file[0])
    box = read_inps2box(inps, atr)
    num_file = len(inps.file)

    # initiate output
    dsDict = {}
    if num_file == 1:
        dsDict[inps.dset_name] = np.zeros((box[3]-box[1], box[2]-box[0]), dtype=inps.data_type)
    else:
        dsDict[inps.dset_name] = np.zeros((num_file, box[3]-box[1], box[2]-box[0]), dtype=inps.data_type)

        # add "date" dataset for timeseries
        if inps.dset_name and inps.dset_name == 'timeseries':
            date_list = [os.path.basename(os.path.dirname(x)) for x in inps.file]
            date_str_format = ptime.get_date_str_format(date_list[0])
            dsDict['date'] = np.array(date_list, dtype=np.string_)

    # metadata
    if inps.metadata:
        print('add/update the following metadata:')
        for meta_str in inps.metadata:
            key, value = meta_str.split('=')
            atr[key] = value
            print(f'{key} : {value}')

    # read
    for i, fname in enumerate(inps.file):
        print(f'reading file {i+1} / {num_file}: {fname}')
        ds_names = readfile.get_dataset_list(fname)
        ds_name = inps.dset_name if inps.dset_name in ds_names else None
        data = readfile.read(fname, datasetName=ds_name, box=box)[0]
        if num_file == 1:
            dsDict[inps.dset_name][:] = data
        else:
            dsDict[inps.dset_name][i, :, :] = data

    # write
    atr['LENGTH'] = box[3] - box[1]
    atr['WIDTH'] = box[2] - box[0]
    writefile.write(dsDict, out_file=inps.outfile, metadata=atr)

    return inps.outfile
Exemple #3
0
def get_sensing_datetime_list(proj_dir, date_list=None):
    """Get the sensing datetime objects from ISCE stack results.
    It assumes the default directory structure from topsStack, as below:
    /proj_dir
        /reference/IW*.xml
        /secondarys
            /20150521/IW*.xml
            /20150614/IW*.xml
            ...
            /20210113/IW*.xml

    Parameters: proj_dir     - str, path to the root directory of stack processing
    Returns:    sensingMid   - list of datetime.datetime.obj
                sensingStart - list of datetime.datetime.obj
                sensingStop  - list of datetime.datetime.obj
    """
    # determine xml file basename
    ref_fname = glob.glob(os.path.join(proj_dir, 'reference', 'IW*.xml'))[0]
    fbase = os.path.basename(ref_fname)

    # get xml files for all acquisitions
    sec_fnames = sorted(glob.glob(os.path.join(proj_dir, 'secondarys', '*', fbase)))
    fnames = [ref_fname] + sec_fnames
    num_file = len(fnames)

    # loop to read file one by one
    sensingStart = []
    sensingStop = []
    for i, fname in enumerate(fnames):
        print('[{}/{}] read {}'.format(i+1, num_file, fname))
        obj = load_product(fname)
        sensingStart.append(obj.bursts[0].sensingStart)
        sensingStop.append(obj.bursts[-1].sensingStop)

    sensingStart = sorted(sensingStart)
    sensingStop  = sorted(sensingStop)

    # sensingStart/Stop --> sensingMid
    sensingMid = [i + (j - i)/2 for i, j in zip(sensingStart, sensingStop)]

    # round to the nearest second
    print('round sensingStart/Stop/Mid to the nearest second.')
    sensingStart = [ptime.round_seconds(i) for i in sensingStart]
    sensingStop  = [ptime.round_seconds(i) for i in sensingStop]
    sensingMid   = [ptime.round_seconds(i) for i in sensingMid]

    if date_list is not None:
        date_str_format = ptime.get_date_str_format(date_list[0])
        date_list_out = [i.strftime(date_str_format) for i in sensingMid]

        # check possible missing dates
        dates_missing = [i for i in date_list if i not in date_list_out]
        if dates_missing:
            raise ValueError('The following dates are missing:\n{}'.format(dates_missing))

        # prune dates not-needed
        flag = np.array([i in date_list for i in date_list_out], dtype=np.bool_)
        if np.sum(flag) > 0:
            sensingMid    = np.array(sensingMid)[flag].tolist()
            sensingStart  = np.array(sensingStart)[flag].tolist()
            sensingStop   = np.array(sensingStop)[flag].tolist()
            dates_removed = np.array(date_list_out)[~flag].tolist()
            print('The following dates are not needed and removed:\n{}'.format(dates_removed))

    return sensingMid, sensingStart, sensingStop