コード例 #1
0
    def generate_temporal_coherence_mask(self):
        """Generate reliable pixel mask from temporal coherence"""
        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
        tcoh_file = 'temporalCoherence.h5'
        mask_file = 'maskTempCoh.h5'
        tcoh_min = self.template['mintpy.networkInversion.minTempCoh']

        scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min,
                                                       mask_file, geom_file)
        print('generate_mask.py', scp_args)

        # update mode: run only if:
        # 1) output file exists and newer than input file, AND
        # 2) all config keys are the same
        config_keys = ['mintpy.networkInversion.minTempCoh']
        print('update mode: ON')
        flag = 'skip'
        if ut.run_or_skip(out_file=mask_file,
                          in_file=tcoh_file,
                          print_msg=False) == 'run':
            flag = 'run'
        else:
            print(
                '1) output file: {} already exists and newer than input file: {}'
                .format(mask_file, tcoh_file))
            atr = readfile.read_attribute(mask_file)
            if any(
                    str(self.template[i]) != atr.get(i, 'False')
                    for i in config_keys):
                flag = 'run'
                print(
                    '2) NOT all key configration parameters are the same: {}'.
                    format(config_keys))
            else:
                print('2) all key configuration parameters are the same: {}'.
                      format(config_keys))
        print('run or skip: {}'.format(flag))

        if flag == 'run':
            mintpy.generate_mask.main(scp_args.split())
            # update configKeys
            atr = {}
            for key in config_keys:
                atr[key] = self.template[key]
            ut.add_attribute(mask_file, atr)

        # check number of pixels selected in mask file for following analysis
        num_pixel = np.sum(readfile.read(mask_file)[0] != 0.)
        print('number of reliable pixels: {}'.format(num_pixel))

        min_num_pixel = float(
            self.template['mintpy.networkInversion.minNumPixel'])
        if num_pixel < min_num_pixel:
            msg = "Not enough reliable pixels (minimum of {}). ".format(
                int(min_num_pixel))
            msg += "Try the following:\n"
            msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
            msg += "2) Check the network and make sure it's fully connected without subsets"
            raise RuntimeError(msg)
        return
コード例 #2
0
def main(iargs=None):
    # check inputs
    inps = cmd_line_parse(iargs)

    # update mode
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.ifgram_file

    start_time = time.time()
    # run bridging
    run_unwrap_error_bridge(inps.ifgram_file,
                            water_mask_file=inps.waterMaskFile,
                            ramp_type=inps.ramp,
                            radius=inps.bridgePtsRadius,
                            dsNameIn=inps.datasetNameIn,
                            dsNameOut=inps.datasetNameOut)

    # config parameter
    if os.path.splitext(inps.ifgram_file)[1] in ['.h5', '.he5']:
        print('add/update the following configuration metadata to file:')
        config_metadata = dict()
        for key in configKeys:
            config_metadata[key_prefix+key] = str(vars(inps)[key])
        ut.add_attribute(inps.ifgram_file, config_metadata, print_msg=True)

    m, s = divmod(time.time()-start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return inps.ifgram_file
コード例 #3
0
ファイル: smallbaselineApp.py プロジェクト: tukuan1992/PySAR
    def run_load_data(self, step_name):
        """Load InSAR stacks into HDF5 files in ./inputs folder.
        It 1) copy auxiliary files into work directory (for Unvi of Miami only)
           2) load all interferograms stack files into mintpy/inputs directory.
           3) check loading result
           4) add custom metadata (optional, for HDF-EOS5 format only)
        """
        # 1) copy aux files (optional)
        self._copy_aux_file()

        # 2) loading data
        scp_args = '--template {}'.format(self.templateFile)
        if self.customTemplateFile:
            scp_args += ' {}'.format(self.customTemplateFile)
        if self.projectName:
            scp_args += ' --project {}'.format(self.projectName)
        # run
        print("load_data.py", scp_args)
        mintpy.load_data.main(scp_args.split())
        os.chdir(self.workDir)

        # 3) check loading result
        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]

        # 3.1) output waterMask.h5
        water_mask_file = 'waterMask.h5'
        if 'waterMask' in readfile.get_dataset_list(geom_file):
            print('generate {} from {} for conveniency'.format(water_mask_file, geom_file))
            if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run':
                water_mask, atr = readfile.read(geom_file, datasetName='waterMask')
                atr['FILE_TYPE'] = 'waterMask'
                writefile.write(water_mask, out_file=water_mask_file, metadata=atr)

        # 4) add custom metadata (optional)
        if self.customTemplateFile:
            print('updating {}, {} metadata based on custom template file: {}'.format(
                os.path.basename(stack_file),
                os.path.basename(geom_file),
                os.path.basename(self.customTemplateFile)))
            # use ut.add_attribute() instead of add_attribute.py because of
            # better control of special metadata, such as SUBSET_X/YMIN
            ut.add_attribute(stack_file, self.customTemplate)
            ut.add_attribute(geom_file, self.customTemplate)

        # 5) if not load_complete, plot and raise exception
        if not load_complete:
            # plot result if error occured
            self.plot_result(print_aux=False, plot=plot)

            # go back to original directory
            print('Go back to directory:', self.cwd)
            os.chdir(self.cwd)

            # raise error
            msg = 'step {}: NOT all required dataset found, exit.'.format(step_name)
            raise RuntimeError(msg)
        return
コード例 #4
0
    def run_load_data(self, step_name):
        """Load InSAR stacks into HDF5 files in ./inputs folder.
        It 1) copy auxiliary files into work directory (for Unvi of Miami only)
           2) load all interferograms stack files into mintpy/inputs directory.
           3) check loading result
           4) add custom metadata (optional, for HDF-EOS5 format only)
        """
        # 1) copy aux files (optional)
        self._copy_aux_file()

        # 2) loading data
        # compose list of input arguments
        # instead of using command line then split
        # to support path with whitespace
        iargs = ['--template', self.templateFile]
        if self.customTemplateFile:
            iargs += [self.customTemplateFile]
        if self.projectName:
            iargs += ['--project', self.projectName]

        # run command line
        print('\nload_data.py', ' '.join(iargs))
        mintpy.load_data.main(iargs)

        # come back to working directory
        os.chdir(self.workDir)

        # 3) check loading result
        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]

        # 4) add custom metadata (optional)
        if self.customTemplateFile:
            print('updating {}, {} metadata based on custom template file: {}'.format(
                os.path.basename(stack_file),
                os.path.basename(geom_file),
                os.path.basename(self.customTemplateFile)))
            # use ut.add_attribute() instead of add_attribute.py because of
            # better control of special metadata, such as SUBSET_X/YMIN
            ut.add_attribute(stack_file, self.customTemplate)
            ut.add_attribute(geom_file, self.customTemplate)

        # 5) if not load_complete, plot and raise exception
        if not load_complete:
            # plot result if error occured
            self.plot_result(print_aux=False, plot=plot)

            # go back to original directory
            print('Go back to directory:', self.cwd)
            os.chdir(self.cwd)

            # raise error
            msg = 'step {}: NOT all required dataset found, exit.'.format(step_name)
            raise RuntimeError(msg)
        return
コード例 #5
0
def get_phase_linking_coherence_mask(template, work_dir):
    """
    Generate reliable pixel mask from temporal coherence
    functions = [generate_mask, readfile, run_or_skip, add_attribute]
    # from mintpy import generate_mask
    # from mintpy.utils import readfile
    # from mintpy.utils.utils import run_or_skip, add_attribute
    """

    tcoh_file = os.path.join(work_dir, 'temporalCoherence.h5')

    mask_file = os.path.join(work_dir, 'maskTempCoh.h5')

    tcoh_min = float(template['minopy.timeseries.minTempCoh'])

    scp_args = '{} -m {} --nonzero -o {} --update'.format(
        tcoh_file, tcoh_min, mask_file)
    print('generate_mask.py', scp_args)

    # update mode: run only if:
    # 1) output file exists and newer than input file, AND
    # 2) all config keys are the same

    print('update mode: ON')
    flag = 'skip'
    if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file,
                      print_msg=False) == 'run':
        flag = 'run'

    print('run or skip: {}'.format(flag))

    if flag == 'run':
        generate_mask.main(scp_args.split())
        # update configKeys
        atr = {}
        atr['minopy.timeseries.minTempCoh'] = tcoh_min
        ut.add_attribute(mask_file, atr)
        ut.add_attribute(mask_file, atr)

    # check number of pixels selected in mask file for following analysis
    #num_pixel = np.sum(readfile.read(mask_file)[0] != 0.)
    #print('number of reliable pixels: {}'.format(num_pixel))

    #min_num_pixel = float(template['mintpy.networkInversion.minNumPixel'])   # 100
    #if num_pixel < min_num_pixel:
    #    msg = "Not enough reliable pixels (minimum of {}). ".format(int(min_num_pixel))
    #    msg += "Try the following:\n"
    #    msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
    #    msg += "2) Check the network and make sure it's fully connected without subsets"
    #    raise RuntimeError(msg)

    return
コード例 #6
0
ファイル: smallbaselineApp.py プロジェクト: hfattahi/PySAR
    def generate_temporal_coherence_mask(self):
        """Generate reliable pixel mask from temporal coherence"""
        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
        tcoh_file = 'temporalCoherence.h5'
        mask_file = 'maskTempCoh.h5'
        tcoh_min = self.template['mintpy.networkInversion.minTempCoh']

        scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file)
        print('generate_mask.py', scp_args)

        # update mode: run only if:
        # 1) output file exists and newer than input file, AND
        # 2) all config keys are the same
        config_keys = ['mintpy.networkInversion.minTempCoh']
        print('update mode: ON')
        flag = 'skip'
        if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file, print_msg=False) == 'run':
            flag = 'run'
        else:
            print('1) output file: {} already exists and newer than input file: {}'.format(mask_file, tcoh_file))
            atr = readfile.read_attribute(mask_file)
            if any(str(self.template[i]) != atr.get(i, 'False') for i in config_keys):
                flag = 'run'
                print('2) NOT all key configration parameters are the same: {}'.format(config_keys))
            else:
                print('2) all key configuration parameters are the same: {}'.format(config_keys))
        print('run or skip: {}'.format(flag))

        if flag == 'run':
            mintpy.generate_mask.main(scp_args.split())
            # update configKeys
            atr = {}
            for key in config_keys:
                atr[key] = self.template[key]
            ut.add_attribute(mask_file, atr)

        # check number of pixels selected in mask file for following analysis
        num_pixel = np.sum(readfile.read(mask_file)[0] != 0.)
        print('number of reliable pixels: {}'.format(num_pixel))

        min_num_pixel = float(self.template['mintpy.networkInversion.minNumPixel'])
        if num_pixel < min_num_pixel:
            msg = "Not enough reliable pixels (minimum of {}). ".format(int(min_num_pixel))
            msg += "Try the following:\n"
            msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
            msg += "2) Check the network and make sure it's fully connected without subsets"
            raise RuntimeError(msg)
        return
コード例 #7
0
ファイル: add_attribute.py プロジェクト: ehavazli/MintPy
def update_file_attribute(fname, atr_new):
    # Read Original Attributes
    atr = readfile.read_attribute(fname)
    print('update {} file attribute: {}'.format(atr['FILE_TYPE'], fname))

    ext = os.path.splitext(fname)[1]
    if ext in ['.h5', '.he5']:
        fname = ut.add_attribute(fname, atr_new)
    else:
        if not ut.update_attribute_or_not(atr_new, atr):
            print(
                'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.'
            )
        else:
            for key, value in iter(atr_new.items()):
                if value == 'None':
                    try:
                        atr.pop(key)
                    except:
                        pass
                else:
                    atr[key] = value

            rsc_file = '{}.rsc'.format(fname)
            print('writing >>> {}'.format(rsc_file))
            writefile.write_roipac_rsc(atr, out_file=rsc_file)
    return fname
コード例 #8
0
ファイル: reference_point.py プロジェクト: hfattahi/PySAR
def remove_reference_pixel(File):
    """Remove reference pixel info from input file"""
    print("remove REF_Y/X and/or REF_LAT/LON from file: "+File)
    atrDrop = {}
    for i in ['REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        atrDrop[i] = 'None'
    File = ut.add_attribute(File, atrDrop)
    return File
コード例 #9
0
def remove_reference_pixel(File):
    """Remove reference pixel info from input file"""
    print("remove REF_Y/X and/or REF_LAT/LON from file: " + File)
    atrDrop = {}
    for i in ['REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        atrDrop[i] = 'None'
    File = ut.add_attribute(File, atrDrop)
    return File
コード例 #10
0
    def run_save2hdfeos5(self, step_name):
        """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format"""
        if self.template['mintpy.save.hdfEos5'] is True:
            # input
            ts_file = self.get_timeseries_filename(
                self.template)[step_name]['input']
            # Add attributes from custom template to timeseries file
            if self.customTemplate is not None:
                ut.add_attribute(ts_file, self.customTemplate)

            tcoh_file = 'temporalCoherence.h5'
            mask_file = 'geo_maskTempCoh.h5'
            geom_file = ut.check_loaded_dataset(self.workDir,
                                                print_msg=False)[2]
            if 'geo' in ts_file:
                tcoh_file = './geo/geo_temporalCoherence.h5'
                mask_file = './geo/geo_maskTempCoh.h5'
                geom_file = './geo/geo_{}'.format(os.path.basename(geom_file))

            # cmd
            print('--------------------------------------------')
            scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format(
                f=ts_file,
                c=tcoh_file,
                m=mask_file,
                g=geom_file,
                t=self.templateFile)
            print('save_hdfeos5.py', scp_args)

            # output (check existing file)
            atr = readfile.read_attribute(ts_file)
            SAT = sensor.get_unavco_mission_name(atr)
            try:
                hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                hdfeos5_file = None
            if ut.run_or_skip(
                    out_file=hdfeos5_file,
                    in_file=[ts_file, tcoh_file, mask_file,
                             geom_file]) == 'run':
                mintpy.save_hdfeos5.main(scp_args.split())
        else:
            print('save time-series to HDF-EOS5 format is OFF.')
        return
コード例 #11
0
ファイル: remove_ramp.py プロジェクト: hfattahi/PySAR
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # --update option
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.outfile

    out_file = ut.run_deramp(inps.file,
                             ramp_type=inps.surface_type,
                             mask_file=inps.mask_file,
                             out_file=inps.outfile,
                             datasetName=inps.dset)

    # config parameter
    print('add/update the following configuration metadata to file:\n{}'.format(configKeys))
    atr_new = {}
    atr_new['mintpy.deramp'] = inps.surface_type
    atr_new['mintpy.deramp.maskFile'] = inps.mask_file
    ut.add_attribute(out_file, atr_new)
    return
コード例 #12
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # --update option
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.outfile

    out_file = ut.run_deramp(inps.file,
                             ramp_type=inps.surface_type,
                             mask_file=inps.mask_file,
                             out_file=inps.outfile,
                             datasetName=inps.dset)

    # config parameter
    print(
        'add/update the following configuration metadata to file:\n{}'.format(
            configKeys))
    atr_new = {}
    atr_new['mintpy.deramp'] = inps.surface_type
    atr_new['mintpy.deramp.maskFile'] = inps.mask_file
    ut.add_attribute(out_file, atr_new)
    return
コード例 #13
0
ファイル: smallbaselineApp.py プロジェクト: hfattahi/PySAR
    def run_save2hdfeos5(self, step_name):
        """Save displacement time-series and its aux data in geo coordinate into HDF-EOS5 format"""
        if self.template['mintpy.save.hdfEos5'] is True:
            # input
            ts_file = self.get_timeseries_filename(self.template)[step_name]['input']
            # Add attributes from custom template to timeseries file
            if self.customTemplate is not None:
                ut.add_attribute(ts_file, self.customTemplate)

            tcoh_file = 'temporalCoherence.h5'
            mask_file = 'geo_maskTempCoh.h5'
            geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
            if 'geo' in ts_file:
                tcoh_file = './geo/geo_temporalCoherence.h5'
                mask_file = './geo/geo_maskTempCoh.h5'
                geom_file = './geo/geo_{}'.format(os.path.basename(geom_file))

            # cmd
            print('--------------------------------------------')
            scp_args = '{f} -c {c} -m {m} -g {g} -t {t}'.format(f=ts_file,
                                                                c=tcoh_file,
                                                                m=mask_file,
                                                                g=geom_file,
                                                                t=self.templateFile)
            print('save_hdfeos5.py', scp_args)

            # output (check existing file)
            atr = readfile.read_attribute(ts_file)
            SAT = sensor.get_unavco_mission_name(atr)
            try:
                hdfeos5_file = get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                hdfeos5_file = None
            if ut.run_or_skip(out_file=hdfeos5_file, in_file=[ts_file, tcoh_file, mask_file, geom_file]) == 'run':
                mintpy.save_hdfeos5.main(scp_args.split())
        else:
            print('save time-series to HDF-EOS5 format is OFF.')
        return
コード例 #14
0
ファイル: prep_giant.py プロジェクト: ehavazli/MintPy
def prepare_metadata4giant(fname, meta_files=None):
    """Extract metadata from xml files for GIAnT time-series file."""
    # check xml files
    if not meta_files:
        meta_files = auto_xml_file4giant(fname)
    if not meta_files:
        raise FileNotFoundError("no xml file found.")

    # extract metadata from xml files
    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
    xml_files = [i for i in meta_files if i.endswith('.xml')]
    xml_dict = {}
    for rsc_file in rsc_files:
        print('reading {}'.format(rsc_file))
        rsc_dict = readfile.read_roipac_rsc(rsc_file)
        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
            try:
                rsc_dict.pop(key)
            except:
                pass
        xml_dict.update(rsc_dict)
    for xml_file in xml_files:
        print('reading {}'.format(xml_file))
        xml_dict.update(read_giant_xml(xml_file))

    if not xml_dict:
        raise ValueError('No metadata found in file: ' + xml_file)

    # standardize metadata names
    xml_dict = readfile.standardize_metadata(xml_dict)

    # project name
    sensor_name, project_name = sensor.project_name2sensor_name(
        os.path.abspath(fname))
    if sensor_name:
        xml_dict['PLATFORM'] = sensor_name
    if project_name:
        xml_dict['PROJECT_NAME'] = project_name
        if sensor_name in project_name:
            tmp = project_name.split(sensor_name)[1][0]
            if tmp == 'A':
                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
            else:
                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'

    # update GIAnT HDF5 file
    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
    return fname
コード例 #15
0
ファイル: prep_giant.py プロジェクト: hfattahi/PySAR
def prepare_metadata4giant(fname, meta_files=None):
    """Extract metadata from xml files for GIAnT time-series file."""
    # check xml files
    if not meta_files:
        meta_files = auto_xml_file4giant(fname)
    if not meta_files:
        raise FileNotFoundError("no xml file found.")

    # extract metadata from xml files
    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
    xml_files = [i for i in meta_files if i.endswith('.xml')]
    xml_dict = {}
    for rsc_file in rsc_files:
        print('reading {}'.format(rsc_file))
        rsc_dict = readfile.read_roipac_rsc(rsc_file)
        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
            try:
                rsc_dict.pop(key)
            except:
                pass
        xml_dict.update(rsc_dict)
    for xml_file in xml_files:
        print('reading {}'.format(xml_file))
        xml_dict.update(read_giant_xml(xml_file))

    if not xml_dict:
        raise ValueError('No metadata found in file: '+xml_file)

    # standardize metadata names
    xml_dict = readfile.standardize_metadata(xml_dict)

    # project name
    sensor_name, project_name = sensor.project_name2sensor_name(os.path.abspath(fname))
    if sensor_name:
        xml_dict['PLATFORM'] = sensor_name
    if project_name:
        xml_dict['PROJECT_NAME'] = project_name
        if sensor_name in project_name:
            tmp = project_name.split(sensor_name)[1][0]
            if tmp == 'A':
                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
            else:
                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'

    # update GIAnT HDF5 file
    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
    return fname
コード例 #16
0
ファイル: load_ifgram.py プロジェクト: geodesymiami/MiNoPy
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    dateStr = datetime.datetime.strftime(datetime.datetime.now(),
                                         '%Y%m%d:%H%M%S')

    if not iargs is None:
        msg = os.path.basename(__file__) + ' ' + ' '.join(iargs[:])
        string = dateStr + " * " + msg
        print(string)
    else:
        msg = os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])
        string = dateStr + " * " + msg
        print(string)

    work_dir = os.path.dirname(inps.outdir)
    #os.chdir(work_dir)

    # read input options
    inpsDict = read_inps2dict(inps)
    prepare_metadata(inpsDict)

    inpsDict = read_subset_box(inpsDict)
    extraDict = get_extra_metadata(inpsDict)

    if not 'PLATFORM' in extraDict:
        slcStack = os.path.join(os.path.dirname(work_dir),
                                'inputs/slcStack.h5')
        atr = readfile.read_attribute(slcStack)
        if 'PLATFORM' in atr:
            extraDict['PLATFORM'] = atr['PLATFORM']

    # initiate objects
    inpsDict['ds_name2key'] = datasetName2templateKey
    stackObj = mld.read_inps_dict2ifgram_stack_dict_object(inpsDict)

    # prepare wirte
    updateMode, comp, box, boxGeo = print_write_setting(inpsDict)
    box = None
    boxGeo = None
    if stackObj and not os.path.isdir(inps.outdir):
        os.makedirs(inps.outdir)
        print('create directory: {}'.format(inps.outdir))
    # write
    if stackObj and mld.update_object(inps.outfile[0],
                                      stackObj,
                                      box,
                                      updateMode=updateMode,
                                      xstep=inpsDict['xstep'],
                                      ystep=inpsDict['ystep']):
        print('-' * 50)
        stackObj.write2hdf5(outputFile=inps.outfile[0],
                            access_mode='w',
                            box=box,
                            compression=comp,
                            extra_metadata=extraDict)

    geo_files = ['geometryRadar.h5', 'geometryGeo.h5']
    copy_file = False
    for geometry_file_2 in geo_files:
        geometry_file = os.path.join(os.path.dirname(work_dir), 'inputs',
                                     geometry_file_2)
        if os.path.exists(geometry_file):
            copy_file = True
            break

    if copy_file:
        if not os.path.exists(
                os.path.join(work_dir, 'inputs/{}'.format(geometry_file_2))):
            shutil.copyfile(
                geometry_file,
                os.path.join(work_dir, 'inputs/{}'.format(geometry_file_2)))
    else:
        geomRadarObj, geomGeoObj = mld.read_inps_dict2geometry_dict_object(
            inpsDict)
        if geomRadarObj and mld.update_object(inps.outfile[1],
                                              geomRadarObj,
                                              box,
                                              updateMode=updateMode,
                                              xstep=inpsDict['xstep'],
                                              ystep=inpsDict['ystep']):
            print('-' * 50)
            geomRadarObj.write2hdf5(outputFile=inps.outfile[1],
                                    access_mode='w',
                                    box=box,
                                    xstep=inpsDict['xstep'],
                                    ystep=inpsDict['ystep'],
                                    compression='lzf',
                                    extra_metadata=extraDict)

        if geomGeoObj and mld.update_object(inps.outfile[2],
                                            geomGeoObj,
                                            boxGeo,
                                            updateMode=updateMode,
                                            xstep=inpsDict['xstep'],
                                            ystep=inpsDict['ystep']):
            print('-' * 50)
            geomGeoObj.write2hdf5(outputFile=inps.outfile[2],
                                  access_mode='w',
                                  box=boxGeo,
                                  xstep=inpsDict['xstep'],
                                  ystep=inpsDict['ystep'],
                                  compression='lzf')

    # check loading result
    load_complete, stack_file, geom_file = ut.check_loaded_dataset(
        work_dir=work_dir, print_msg=True)[0:3]

    # add custom metadata (optional)
    customTemplate = inps.template_file[0]
    if customTemplate:
        print('updating {}, {} metadata based on custom template file: {}'.
              format(os.path.basename(stack_file), os.path.basename(geom_file),
                     os.path.basename(customTemplate)))
        # use ut.add_attribute() instead of add_attribute.py because of
        # better control of special metadata, such as SUBSET_X/YMIN
        ut.add_attribute(stack_file, inpsDict)
        ut.add_attribute(geom_file, inpsDict)

    ut.add_attribute(stack_file, extraDict)

    # if not load_complete, plot and raise exception
    if not load_complete:
        # go back to original directory
        print('Go back to directory:', work_dir)
        os.chdir(work_dir)

        # raise error
        msg = 'step load_ifgram: NOT all required dataset found, exit.'
        raise SystemExit(msg)

    return inps.outfile
コード例 #17
0
def main(iargs=None):
    """
        Overwrite filtered SLC images in Isce merged/SLC directory.
    """

    Parser = MinoPyParser(iargs, script='generate_temporal_coherence')
    inps = Parser.parse()

    dateStr = datetime.datetime.strftime(datetime.datetime.now(),
                                         '%Y%m%d:%H%M%S')

    if not iargs is None:
        msg = os.path.basename(__file__) + ' ' + ' '.join(iargs[:])
        string = dateStr + " * " + msg
        print(string)
    else:
        msg = os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])
        string = dateStr + " * " + msg
        print(string)

    start_time = time.time()
    os.chdir(inps.work_dir)

    minopy_dir = os.path.dirname(inps.work_dir)
    minopy_template_file = os.path.join(minopy_dir, 'minopyApp.cfg')
    inps.ifgramStackFile = os.path.join(inps.work_dir, 'inputs/ifgramStack.h5')

    template = readfile.read_template(minopy_template_file)
    if template['minopy.timeseries.tempCohType'] == 'auto':
        template['minopy.timeseries.tempCohType'] = 'full'

    atr = {}
    atr['minopy.timeseries.tempCohType'] = template[
        'minopy.timeseries.tempCohType']
    ut.add_attribute(inps.ifgramStackFile, atr)

    # check if input observation dataset exists.
    stack_obj = ifgramStack(inps.ifgramStackFile)
    stack_obj.open(print_msg=False)
    metadata = stack_obj.get_metadata()
    length, width = stack_obj.length, stack_obj.width

    inps.invQualityFile = 'temporalCoherence.h5'
    mintpy_mask_file = os.path.join(inps.work_dir, 'maskTempCoh.h5')

    quality_name = os.path.join(
        minopy_dir, 'inverted/tempCoh_{}'.format(
            template['minopy.timeseries.tempCohType']))
    quality = np.memmap(quality_name,
                        mode='r',
                        dtype='float32',
                        shape=(length, width))

    # inps.waterMaskFile = os.path.join(minopy_dir, 'waterMask.h5')
    inps.waterMaskFile = None
    water_mask = np.ones(quality.shape, dtype=np.int8)

    if template['minopy.timeseries.waterMask'] != 'auto':
        inps.waterMaskFile = template['minopy.timeseries.waterMask']
        if os.path.exists(inps.waterMaskFile):
            with h5py.File(inps.waterMaskFile, 'r') as f2:
                if 'waterMask' in f2:
                    water_mask = f2['waterMask'][:, :]
                else:
                    water_mask = f2['mask'][:, :]

    if inps.shadow_mask:
        if os.path.exists(os.path.join(minopy_dir, 'shadow_mask.h5')):
            with h5py.File(os.path.join(minopy_dir, 'shadow_mask.h5'),
                           'r') as f2:
                shadow_mask = f2['mask'][:, :]
                water_mask = water_mask * shadow_mask

    inv_quality = np.zeros((quality.shape[0], quality.shape[1]))
    inv_quality_name = 'temporalCoherence'
    inv_quality[:, :] = quality[:, :]
    inv_quality[inv_quality <= 0] = np.nan
    inv_quality[water_mask < 0.5] = np.nan
    if os.path.exists(mintpy_mask_file):
        mintpy_mask = readfile.read(mintpy_mask_file, datasetName='mask')[0]
        inv_quality[mintpy_mask == 0] = np.nan

    if not os.path.exists(inps.invQualityFile):
        metadata['UNIT'] = '1'
        metadata['FILE_TYPE'] = inv_quality_name
        if 'REF_DATE' in metadata:
            metadata.pop('REF_DATE')
        ds_name_dict = {metadata['FILE_TYPE']: [np.float32, (length, width)]}
        writefile.layout_hdf5(inps.invQualityFile,
                              ds_name_dict,
                              metadata=metadata)

    # write the block to disk
    # with 3D block in [z0, z1, y0, y1, x0, x1]
    # and  2D block in         [y0, y1, x0, x1]
    block = [0, length, 0, width]
    writefile.write_hdf5_block(inps.invQualityFile,
                               data=inv_quality,
                               datasetName=inv_quality_name,
                               block=block)

    get_phase_linking_coherence_mask(metadata, inps.work_dir)

    m, s = divmod(time.time() - start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))

    return
コード例 #18
0
ファイル: smallbaselineApp.py プロジェクト: zhouhuayun/MintPy
    def run_load_data(self, step_name):
        """Load InSAR stacks into HDF5 files in ./inputs folder.
        It 1) copy auxiliary files into work directory (for Unvi of Miami only)
           2) load all interferograms stack files into mintpy/inputs directory.
           3) check loading result
           4) add custom metadata (optional, for HDF-EOS5 format only)
        """
        # 1) copy aux files (optional)
        self._copy_aux_file()

        # 2) loading data
        stack_processor = self.template['mintpy.load.processor'].lower()
        if stack_processor == 'aria':
            # use subprocess instead of main() here to avoid import gdal3
            # which is required in prep_aria.py
            cmd = 'prep_aria.py --template {} --update '.format(self.templateFile)
            print(cmd)
            subprocess.Popen(cmd, shell=True).wait()

        else:
            # compose command line
            scp_args = '--template {}'.format(self.templateFile)
            if self.customTemplateFile:
                scp_args += ' {}'.format(self.customTemplateFile)
            if self.projectName:
                scp_args += ' --project {}'.format(self.projectName)

            # run command line
            print("load_data.py", scp_args)
            mintpy.load_data.main(scp_args.split())

        # come back to working directory
        os.chdir(self.workDir)

        # 3) check loading result
        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]

        # 4) add custom metadata (optional)
        if self.customTemplateFile:
            print('updating {}, {} metadata based on custom template file: {}'.format(
                os.path.basename(stack_file),
                os.path.basename(geom_file),
                os.path.basename(self.customTemplateFile)))
            # use ut.add_attribute() instead of add_attribute.py because of
            # better control of special metadata, such as SUBSET_X/YMIN
            ut.add_attribute(stack_file, self.customTemplate)
            ut.add_attribute(geom_file, self.customTemplate)

        # 5) if not load_complete, plot and raise exception
        if not load_complete:
            # plot result if error occured
            self.plot_result(print_aux=False, plot=plot)

            # go back to original directory
            print('Go back to directory:', self.cwd)
            os.chdir(self.cwd)

            # raise error
            msg = 'step {}: NOT all required dataset found, exit.'.format(step_name)
            raise RuntimeError(msg)
        return
コード例 #19
0
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)

    # update_mode
    if (not inps.force and inps.ref_y is not None
            and inps.ref_y == int(atr.get('REF_Y', -999))
            and inps.ref_x is not None
            and inps.ref_x == int(atr.get('REF_X', -999))):
        print(
            'SAME reference pixel is already selected/saved in file, skip updating.'
        )
        return inps.file

    # Check 1 - stack and its non-nan mask pixel coverage
    stack = ut.temporal_average(inps.file,
                                datasetName='unwrapPhase',
                                updateMode=True,
                                outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError(
            'no pixel found with valid phase value in all datasets.')

    # Check 2 - input ref_y/x: location and validity
    if inps.ref_y is not None and inps.ref_x is not None:
        if mask[inps.ref_y, inps.ref_x] == 0.:
            raise ValueError(
                'reference y/x have nan value in some dataset. Please re-select.'
            )
    else:
        # Find reference y/x
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(
                coh_file=inps.coherenceFile,
                mask=mask,
                min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)

        # Check ref_y/x from auto method
        if inps.ref_y is None or inps.ref_x is None:
            raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: ' + inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = inps.file

        k = atr['FILE_TYPE']
        fext = os.path.splitext(inps.file)[1]

        if fext == '.h5':
            if inps.outfile == inps.file:
                print('updating data value without re-writing to a new file')

                if k == 'ifgramStack':
                    with h5py.File(inps.file, 'r+') as f:
                        ds = f['unwrapPhase']
                        for i in range(ds.shape[0]):
                            ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]

                        print('update metadata')
                        f.attrs.update(atrNew)

                else:
                    with h5py.File(inps.file, 'r+') as f:
                        ds = f[k]
                        if len(ds.shape) == 3:
                            # 3D matrix
                            for i in range(ds.shape[0]):
                                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]

                        else:
                            # 2D matrix
                            ds[:] -= ds[inps.ref_y, inps.ref_x]

                        print('update metadata')
                        f.attrs.update(atrNew)

            else:
                ## write to a new file
                print('writing the referenced data into file: {}'.format(
                    inps.outfile))

                # 1. read and update data value
                data, atr = readfile.read(inps.file)
                if len(data.shape) == 3:
                    # 3D matrix
                    for i in range(data.shape[0]):
                        data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]

                else:
                    # 2D matrix
                    data -= data[inps.ref_y, inps.ref_x]

                # 2. update metadata
                atr.update(atrNew)

                # 3. write to file
                writefile.write(data,
                                inps.outfile,
                                metadata=atr,
                                ref_file=inps.file)

        else:
            # for binary file, over-write directly
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)

    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
コード例 #20
0
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)
    if (inps.ref_y and inps.ref_x and 'REF_Y' in atr.keys()
            and inps.ref_y == int(atr['REF_Y'])
            and inps.ref_x == int(atr['REF_X']) and not inps.force):
        print(
            'Same reference pixel is already selected/saved in file, skip updating.'
        )
        return inps.file

    # Get stack and mask
    stack = ut.temporal_average(inps.file,
                                datasetName='unwrapPhase',
                                updateMode=True,
                                outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError(
            'no pixel found with valid phase value in all datasets.')

    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x] == 0.:
        raise ValueError(
            'reference y/x have nan value in some dataset. Please re-select.')

    # Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(
                coh_file=inps.coherenceFile,
                mask=mask,
                min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)
    if not inps.ref_y or not inps.ref_x:
        raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: ' + inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = '{}_seeded{}'.format(
                os.path.splitext(inps.file)[0],
                os.path.splitext(inps.file)[1])
        k = atr['FILE_TYPE']

        # For ifgramStack file, update data value directly, do not write to new file
        if k == 'ifgramStack':
            f = h5py.File(inps.file, 'r+')
            ds = f[k].get('unwrapPhase')
            for i in range(ds.shape[0]):
                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
            f[k].attrs.update(atrNew)
            f.close()
            inps.outfile = inps.file

        elif k == 'timeseries':
            data = timeseries(inps.file).read()
            for i in range(data.shape[0]):
                data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]
            obj = timeseries(inps.outfile)
            atr.update(atrNew)
            obj.write2hdf5(data=data, metadata=atr, refFile=inps.file)
            obj.close()
        else:
            print('writing >>> ' + inps.outfile)
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)
    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
コード例 #21
0
ファイル: reference_point.py プロジェクト: hfattahi/PySAR
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)
    if (inps.ref_y and inps.ref_x and 'REF_Y' in atr.keys()
            and inps.ref_y == int(atr['REF_Y']) and inps.ref_x == int(atr['REF_X'])
            and not inps.force):
        print('Same reference pixel is already selected/saved in file, skip updating.')
        return inps.file

    # Get stack and mask
    stack = ut.temporal_average(inps.file, datasetName='unwrapPhase', updateMode=True, outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError('no pixel found with valid phase value in all datasets.')

    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x] == 0.:
        raise ValueError('reference y/x have nan value in some dataset. Please re-select.')

    # Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(coh_file=inps.coherenceFile,
                                                             mask=mask,
                                                             min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)
    if not inps.ref_y or not inps.ref_x:
        raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: '+inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = '{}_seeded{}'.format(os.path.splitext(inps.file)[0],
                                                os.path.splitext(inps.file)[1])
        k = atr['FILE_TYPE']

        # For ifgramStack file, update data value directly, do not write to new file
        if k == 'ifgramStack':
            f = h5py.File(inps.file, 'r+')
            ds = f[k].get('unwrapPhase')
            for i in range(ds.shape[0]):
                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
            f[k].attrs.update(atrNew)
            f.close()
            inps.outfile = inps.file

        elif k == 'timeseries':
            data = timeseries(inps.file).read()
            for i in range(data.shape[0]):
                data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]
            obj = timeseries(inps.outfile)
            atr.update(atrNew)
            obj.write2hdf5(data=data, metadata=atr, refFile=inps.file)
            obj.close()
        else:
            print('writing >>> '+inps.outfile)
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)
    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
コード例 #22
0
def main(iargs=None):
    """
        Overwrite filtered SLC images in Isce merged/SLC directory.
    """

    Parser = MinoPyParser(iargs, script='invert_network')
    inps = Parser.parse()

    dateStr = datetime.datetime.strftime(datetime.datetime.now(),
                                         '%Y%m%d:%H%M%S')

    if not iargs is None:
        msg = os.path.basename(__file__) + ' ' + ' '.join(iargs[:])
        string = dateStr + " * " + msg
        print(string)
    else:
        msg = os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])
        string = dateStr + " * " + msg
        print(string)

    start_time = time.time()
    os.chdir(inps.work_dir)
    minopy_dir = os.path.dirname(inps.work_dir)

    if inps.template_file is None:
        inps.template_file = os.path.join(minopy_dir, 'smallbaselineApp.cfg')

    minopy_template_file = os.path.join(minopy_dir, 'minopyApp.cfg')
    inps.ifgramStackFile = os.path.join(inps.work_dir, 'inputs/ifgramStack.h5')

    template = readfile.read_template(minopy_template_file)

    if template['minopy.timeseries.tempCohType'] == 'auto':
        template['minopy.timeseries.tempCohType'] = 'full'

    atr = {}
    atr['minopy.timeseries.tempCohType'] = template[
        'minopy.timeseries.tempCohType']
    ut.add_attribute(inps.ifgramStackFile, atr)

    # 1) invert ifgramStack for time-series
    stack_file = ut.check_loaded_dataset(inps.work_dir, print_msg=False)[1]
    #wrapped_phase_series = os.path.join(minopy_dir, 'inverted/phase_series.h5')
    iargs = [
        stack_file, '-t', inps.template_file, '--update', '--norm',
        inps.residualNorm, '--tcoh', inps.temp_coh, '--mask-threshold',
        str(inps.maskThreshold), '--smooth_factor', inps.L1_alpha
    ]  #, '--calc-cov']

    if not inps.minNormVelocity:
        iargs += ['--min-norm-phase']

    print('\nifgram_inversion_L1L2.py', ' '.join(iargs))
    ifgram_inversion_L1L2.main(iargs)

    # 1) Replace temporal coherence with the one obtained from full stack inversion
    iargs = ['-d', inps.work_dir]
    if inps.shadow_mask:
        iargs = ['-d', inps.work_dir, '--shadow_mask']
    print('\ngenerate_temporal_coherence.py', ' '.join(iargs))
    generate_temporal_coherence.main(iargs)

    #m, s = divmod(time.time() - start_time, 60)
    #print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))

    return
コード例 #23
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    key = 'geolocation_corrected'

    with h5py.File(inps.geometry_file, 'r') as f:
        keys = f.attrs.keys()
        latitude = f['latitude'][:, :]
        longitude = f['longitude'][:, :]

        atr = readfile.read(inps.geometry_file, datasetName='azimuthAngle')[1]

        if not key in keys or atr[key] == 'no':
            status = 'run'
            print('Run geolocation correction ...')
        else:
            status = 'skip'
            print(
                'Geolocation is already done, you may reverse it using --reverse. skip ...'
            )

        if inps.reverse:
            if key in keys and atr[key] == 'yes':
                status = 'run'
                print('Run reversing geolocation correction ...')
            else:
                status = 'skip'
                print('The file is not corrected for geolocation. skip ...')

    if status == 'run':

        az_angle = np.deg2rad(np.float(atr['HEADING']))
        inc_angle = np.deg2rad(
            readfile.read(inps.geometry_file, datasetName='incidenceAngle')[0])

        dem_error = readfile.read(inps.dem_error_file, datasetName='dem')[0]

        rad_latitude = np.deg2rad(latitude)

        one_degree_latitude = 111132.92 - 559.82 * np.cos(2*rad_latitude) + \
                              1.175 * np.cos(4 * rad_latitude) - 0.0023 * np.cos(6 * rad_latitude)

        one_degree_longitude = 111412.84 * np.cos(rad_latitude) - \
                               93.5 * np.cos(3 * rad_latitude) + 0.118 * np.cos(5 * rad_latitude)

        dx = np.divide(dem_error * (1 / np.tan(inc_angle)) * np.cos(az_angle),
                       one_degree_longitude)  # converted to degree
        dy = np.divide(dem_error * (1 / np.tan(inc_angle)) * np.sin(az_angle),
                       one_degree_latitude)  # converted to degree

        if inps.reverse:
            sign = np.sign(latitude)
            latitude -= sign * dy

            sign = np.sign(longitude)

            if atr['ORBIT_DIRECTION'] == 'Ascending':
                longitude += sign * dx
            else:
                longitude -= sign * dx

            atr[key] = 'no'
            block = [0, latitude.shape[0], 0, latitude.shape[1]]
            writefile.write_hdf5_block(inps.geometry_file,
                                       data=latitude,
                                       datasetName='latitude',
                                       block=block)

            writefile.write_hdf5_block(inps.geometry_file,
                                       data=longitude,
                                       datasetName='longitude',
                                       block=block)

            ut.add_attribute(inps.geometry_file, atr_new=atr)

        else:
            sign = np.sign(latitude)
            latitude += sign * dy

            sign = np.sign(longitude)

            if atr['ORBIT_DIRECTION'] == 'Ascending':
                longitude -= sign * dx
            else:
                longitude += sign * dx

            atr[key] = 'yes'
            block = [0, latitude.shape[0], 0, latitude.shape[1]]
            writefile.write_hdf5_block(inps.geometry_file,
                                       data=latitude,
                                       datasetName='latitude',
                                       block=block)
            writefile.write_hdf5_block(inps.geometry_file,
                                       data=longitude,
                                       datasetName='longitude',
                                       block=block)
            ut.add_attribute(inps.geometry_file, atr_new=atr)

    f.close()

    return