Exemplo n.º 1
0
def read_inps2dict(inps):
    """Read input Namespace object info into inpsDict"""
    # Read input info into inpsDict
    inpsDict = vars(inps)
    inpsDict['PLATFORM'] = None

    # Read template file
    template = {}
    for fname in inps.template_file:
        temp = readfile.read_template(fname)
        temp = ut.check_template_auto_value(temp)
        template.update(temp)
    for key, value in template.items():
        inpsDict[key] = value
    if 'processor' in template.keys():
        template['pysar.load.processor'] = template['processor']

    prefix = 'pysar.load.'
    key_list = [
        i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)
    ]
    for key in key_list:
        value = template[prefix + key]
        if key in ['processor', 'updateMode', 'compression']:
            inpsDict[key] = template[prefix + key]
        elif value:
            inpsDict[prefix + key] = template[prefix + key]

    if inpsDict['compression'] == False:
        inpsDict['compression'] = None

    # PROJECT_NAME --> PLATFORM
    if not inpsDict['PROJECT_NAME']:
        inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(
            inps.template_file)[1]
    inpsDict['PLATFORM'] = sensor.project_name2sensor_name(
        inpsDict['PROJECT_NAME'])[0]
    if inpsDict['PLATFORM']:
        print('platform : {}'.format(inpsDict['PLATFORM']))
    print('processor: {}'.format(inpsDict['processor']))

    # Here to insert code to check default file path for miami user
    if (auto_path.autoPath and 'SCRATCHDIR' in os.environ
            and inpsDict['PROJECT_NAME'] is not None):
        print(('check auto path setting for Univ of Miami users'
               ' for processor: {}'.format(inpsDict['processor'])))
        inpsDict = auto_path.get_auto_path(
            processor=inpsDict['processor'],
            project_name=inpsDict['PROJECT_NAME'],
            template=inpsDict)
    return inpsDict
Exemplo n.º 2
0
def write_job_file(iDict):
    # command line
    cmd = 'process_isce_stack.py -t {}'.format(iDict['templateFile'])
    if iDict['startNum']:
        cmd += ' --start {} '.format(iDict['startNum'])
    if iDict['endNum']:
        cmd += ' --end {} '.format(iDict['endNum'])
    print('run the following command in bsub mode')
    print(cmd)

    # write job file
    job_dir = os.getcwd()
    job_file = os.path.join(job_dir, 'z_input_process_isce_stack.job')
    job_name = sensor.project_name2sensor_name(iDict['templateFile'])[1]
    with open(job_file, 'w') as f:
        f.write('#! /bin/tcsh\n')
        f.write('#BSUB -J {}\n'.format(job_name))
        f.write('#BSUB -P insarlab\n')
        f.write('#BSUB -o z_output_{}.%J.o\n'.format(job_name))
        f.write('#BSUB -e z_output_{}.%J.e\n'.format(job_name))
        f.write('#BSUB -W {}\n'.format(iDict['walltime']))
        f.write('#BSUB -q general\n')
        f.write('#BSUB -n 1\n')
        f.write('#BSUB -R "rusage[mem={}]"\n'.format(iDict['memory']))
        if iDict['email']:
            f.write('#BSUB -u {}\n'.format(iDict['email']))
            f.write('#BSUB -N\n')

        # write cd work directory
        f.write('\n')
        f.write('cd {}\n'.format(job_dir))
        f.write('{}\n'.format(cmd))
    print('finished writing job file: {}'.format(job_file))
    return job_file
Exemplo n.º 3
0
def prepare_metadata4giant(fname, meta_files=None):
    """Extract metadata from xml files for GIAnT time-series file."""
    # check xml files
    if not meta_files:
        meta_files = auto_xml_file4giant(fname)
    if not meta_files:
        raise FileNotFoundError("no xml file found.")

    # extract metadata from xml files
    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
    xml_files = [i for i in meta_files if i.endswith('.xml')]
    xml_dict = {}
    for rsc_file in rsc_files:
        print('reading {}'.format(rsc_file))
        rsc_dict = readfile.read_roipac_rsc(rsc_file)
        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
            try:
                rsc_dict.pop(key)
            except:
                pass
        xml_dict.update(rsc_dict)
    for xml_file in xml_files:
        print('reading {}'.format(xml_file))
        xml_dict.update(read_giant_xml(xml_file))

    if not xml_dict:
        raise ValueError('No metadata found in file: ' + xml_file)

    # standardize metadata names
    xml_dict = readfile.standardize_metadata(xml_dict)

    # project name
    sensor_name, project_name = sensor.project_name2sensor_name(
        os.path.abspath(fname))
    if sensor_name:
        xml_dict['PLATFORM'] = sensor_name
    if project_name:
        xml_dict['PROJECT_NAME'] = project_name
        if sensor_name in project_name:
            tmp = project_name.split(sensor_name)[1][0]
            if tmp == 'A':
                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
            else:
                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'

    # update GIAnT HDF5 file
    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
    return fname
Exemplo n.º 4
0
def wait4jobs2finish(run_file, num_job):
    job_name = os.path.basename(run_file)
    file_pattern = 'z_output_{}.*.o'.format(job_name)

    proj_name = sensor.project_name2sensor_name(os.getcwd())[1]

    print('-' * 50)
    print('sleeping until {} jobs are done for {}'.format(num_job, job_name))
    t_sec = 0
    num_file = len(glob.glob(file_pattern))
    while num_file < num_job:
        time.sleep(1)  #wait one second

        msg = '# of '
        if proj_name:
            msg += '{}/'.format(proj_name)
        msg += '{} files: {} / {} after {} mins'.format(
            file_pattern, num_file, num_job, int(t_sec / 60))

        if t_sec <= 600:
            if t_sec % 60 == 0:
                print(msg)
        else:
            if t_sec % 300 == 0:
                print(msg)

        num_file = len(glob.glob(file_pattern))
        t_sec += 1

    print('-' * 50)
    print('ALL {} jobs are done for {}'.format(num_job, job_name))

    m, s = divmod(t_sec, 60)
    h, m = divmod(m, 60)
    print('Total used time: {:02d} hours {:02d} mins {:02d} secs'.format(
        h, m, s))
    return
Exemplo n.º 5
0
def read_template2inps(templateFile, inps=None):
    """Read network options from template file into Namespace variable inps"""
    if not inps:
        inps = cmd_line_parse()
    inpsDict = vars(inps)

    # Read template file
    template = readfile.read_template(templateFile)
    auto_file = os.path.join(os.path.dirname(__file__),
                             'defaults/selectNetwork.cfg')
    template = ut.check_template_auto_value(template, auto_file=auto_file)
    if not template:
        log('Empty template: ' + templateFile)
        return None

    prefix = 'selectNetwork.'
    # Check obsolete option prefix
    for i in ['selectPairs.', 'select.network.']:
        if any(i in key for key in template.keys()):
            msg = 'obsolete option prefix detected: {}\n'.format(i)
            msg += 'Use {} instead'.format(prefix)
            raise Exception(msg)
    if all(prefix not in key for key in template.keys()):
        msg = 'no valid input option deteced in template file!\n'
        msg += 'Check the template below for supported options:\n'
        msg += TEMPLATE
        raise Exception(msg)

    # convert template into inpsDict
    keyList = [
        i for i in list(inpsDict.keys()) if prefix + i in template.keys()
    ]
    for key in keyList:
        value = template[prefix + key]
        # bool
        if key in ['keepSeasonal']:
            inpsDict[key] = value
        elif value:
            # str
            if key in ['method', 'referenceFile', 'tempPerpList']:
                inpsDict[key] = value
            # date in YYYYMMDD
            elif key in ['masterDate', 'startDate', 'endDate']:
                inpsDict[key] = ptime.yyyymmdd(value)
            # list of dates in YYYYMMDD
            elif key in ['excludeDate']:
                inps.excludeDate = ptime.yyyymmdd(
                    [i.strip() for i in value.split(',')])
            # float
            elif key in [
                    'perpBaseMax', 'tempBaseMax', 'tempBaseMin',
                    'dopOverlapMin'
            ]:
                inpsDict[key] = float(value)
            # int
            elif key in ['connNum']:
                inpsDict[key] = int(value)

    # read tempPerpList from str
    if isinstance(inps.tempPerpList, str):
        inps.tempPerpList = [[float(j) for j in i.split(',')]
                             for i in inps.tempPerpList.split(';')]

    # Initial network using input methods
    inps.method = inps.method.lower().replace('-', '_')
    if inps.method in ['star', 'ps']:
        inps.method = 'star'
    elif inps.method.startswith('seq'):
        inps.method = 'sequential'
    elif inps.method.startswith('hierar'):
        inps.method = 'hierarchical'
    elif inps.method in ['mst', 'min_spanning_tree', 'minimum_spanning_tree']:
        inps.method = 'mst'
    elif inps.method in ['all', 'sb']:
        inps.method = 'all'

    # for coherence prediction
    key = 'PLATFORM'
    if key in template.keys() and not inps.sensor:
        inps.sensor = template[key]

    key = 'COH_COLOR_JUMP'
    if key in template.keys():
        inps.coh_thres = float(template[key])

    # project name and sensor
    project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    log('project name: ' + project_name)
    if not inps.sensor:
        inps.sensor = sensor.project_name2sensor_name(project_name)[0]

    # Output directory/filename
    if not inps.outfile:
        if autoPath and 'SCRATCHDIR' in os.environ:
            inps.out_dir = os.getenv(
                'SCRATCHDIR') + '/' + project_name + '/PROCESS'
        else:
            try:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.referenceFile))
            except:
                inps.out_dir = os.path.dirname(
                    os.path.abspath(inps.baseline_file))
        inps.outfile = inps.out_dir + '/ifgram_list.txt'

    # Auto path of bl_list.txt file (for Miami user)
    if not inps.baseline_file and autoPath and 'SCRATCHDIR' in os.environ:
        bl_file = os.path.join(os.getenv('SCRATCHDIR'),
                               '{}/SLC/bl_list.txt'.format(project_name))
        if os.path.isfile(bl_file):
            inps.baseline_file = bl_file

    if not inps.referenceFile and not inps.baseline_file:
        raise Exception(
            'No baseline file or reference file found! At least one is required.'
        )

    return inps