コード例 #1
0
def main():
    print(now(), 'copy_citcom_model_from_cluster.py')

    # Mark - these should probably be user inputs
    # You could also allow the user to specify the usual types of
    # time strings like we have for grid_maker.py  Therefore, the user
    # could use timesteps, run times, or ages in the various comma-sep
    # lists or start/end/range formats

    # for testing I was running this script on citerra in this directory:
    # /home/danb/lat/lat01

    field_list = ['velo', 'visc']  # list to loop over
    time_list = ['0', '290']  # list to loop over
    # local processing directory that can be 'seen' from the cluster
    # e.g., I can see this from citerra and is just a test location
    rootdir = '/home/danb/beno/test_copy/model'

    pid_file = 'pid14289.cfg'

    # pid_file should also be an input argument
    # I'm assuming the script will always be run in the directory of
    # the CitcomS model on the cluster where the data was generated

    # parsing the pid file is helpful because it gives us the datafile
    # etc.
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    pid_d = master_d['pid_d']

    # make data directory and determine structure
    datafile = pid_d['datafile']
    datadir = pid_d['datadir']

    if datadir.endswith('%RANK'):
        print('data stored by processor')
        datadir = datadir[:-5]  # strip '%RANK'
        print(datadir)
        PROC = True
    else:
        PROC = False  # not sure if this will be necessary, but
        # easy to include in this development draft

    # copy top level files
    cmd = 'cp %(pid_file)s %(rootdir)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stderr.txt %(rootdir)s/stderr.txt' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stdout.txt %(rootdir)s/stdout.txt' % vars()
    subprocess.call(cmd, shell=True)
    # copy user-created coordinate file if it exists
    coor_file = pid_d['coor_file']
    cmd = 'cp %(coor_file)s %(rootdir)s/%(coor_file)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp %(datafile)s.cfg %(rootdir)s/%(datafile)s.cfg' % vars()
    subprocess.call(cmd, shell=True)

    datadir_abs = rootdir + '/' + datadir

    # make the root (if doesn't exist) and data directory
    Core_Util.make_dir(datadir_abs)

    # copy data
    if PROC:
        for proc in range(pid_d['total_proc']):
            datadir_proc = datadir_abs + str(proc) + '/'
            Core_Util.make_dir(datadir_proc)
            for field in field_list:
                # always need coordinate file
                coord_name = str(proc) + '/' + datafile + '.coord.' + str(proc)
                filename1 = datadir + coord_name
                filename2 = datadir_abs + coord_name
                cmd = 'cp %(filename1)s %(filename2)s' % vars()
                print(cmd)
                # Mark - this command actually calls the copy command
                subprocess.call(cmd, shell=True)
                for time in time_list:
                    # create filename
                    file_name = str(proc) + '/' + datafile + '.' + field + '.'
                    file_name += str(proc) + '.' + str(time)
                    filename1 = datadir + file_name
                    filename2 = datadir_abs + file_name
                    cmd = 'cp %(filename1)s %(filename2)s' % vars()
                    print(cmd)
                    #subprocess.call( cmd, shell=True )

        # now copy essential files from 0/ directory
        zero_proc_dir = datadir_abs + '0/' + datafile
        for suffix in ['.time', '.log']:
            file_name = '0/' + datafile + suffix
            filename1 = datadir + file_name
            filename2 = datadir_abs + file_name
            cmd = 'cp %(filename1)s %(filename2)s' % vars()
            print(cmd)
            subprocess.call(cmd, shell=True)

    else:

        # non-processor (%RANK) branch
        # all files are stored in data
        # although we could code this up here, I think having
        # all the files in one directory will break grid_maker.py
        # at the moment.
        pass
コード例 #2
0
ファイル: restart_citcoms.py プロジェクト: EarthByte/citcoms
def main():
    '''This is the main function for restart_citcoms.py'''
    print(now(), 'restart_citcoms.py: START')

    # get the control .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # parse master run input pid file
    master_run_cfg = control_d['master_run_cfg']
    master_run_cfg_d = Core_Util.parse_configuration_file(master_run_cfg)

    # parse master run output pid file
    master_run_pid = control_d['master_run_pid']

    # get the master dictionary and define aliases
    master_run_d = Core_Citcom.get_all_pid_data(master_run_pid)
    master_run_d['control_d'] = control_d
    master_run_pid_d = master_run_d['pid_d']

    # report details of input data
    if verbose:
        print(now(), 'restart_citcoms: control_d = ')
        Core_Util.tree_print(control_d)
        print(now(), 'restart_citcoms: master_run_cfg_d = ')
        Core_Util.tree_print(master_run_cfg_d)
        print(now(), 'restart_citcoms: master_run_pid_d = ')
        Core_Util.tree_print(master_run_pid_d)

    # SAVE, might need later ... ?
    # copy of the geo frame defaults
    #geoframe_d = master_run_d['geoframe_d']

    # process the control entry to get a list of ages
    time_spec_d = Core_Citcom.get_time_spec_dictionary(
        control_d['restart_ages'], master_run_d['time_d'])

    print(now(), 'restart_citcoms: time_spec_d =')
    Core_Util.tree_print(time_spec_d)

    # Get the restart type and local copy of the restart parameter replacement dictionary
    rs_replace_d = {}
    rs_type = control_d['restart_type']
    if rs_type == 'dynamic_topography':
        rs_replace_d = Core_Citcom.dynamic_topography_restart_params
    elif rs_type == 'total_topography':
        rs_replace_d = Core_Citcom.total_topography_restart_params
    else:
        print(now(), 'restart_citcoms: ERROR: unknown value for restart_type.')
        print(now(),
              'Valid values are "dynamic_topography", or "total_topography"')
        sys.exit(-1)

    # Now update rs_replace_d values directly from those set in control_d
    for p in sorted(control_d):
        if p.startswith('CitcomS.'):
            rs_replace_d[p] = control_d[p]

    # Show the final rs_replace_d that will pass to the input creation function
    if verbose:
        print(now(), 'restart_citcoms: rs_replace_d = ')
        Core_Util.tree_print(rs_replace_d)

    # Set placeholders for the directory and file structre  and names
    rs_dir_prefix = 'restart_' + rs_type
    rs_inp_cfg_suffix = ''

    rs_structure = control_d['restart_structure']
    if rs_structure == 'all-in-one':
        # create the all-in-one restart directory from section name
        Core_Util.make_dir(rs_dir_prefix)

    # Now it's time to Loop over restart ages and create restart files for that age
    for a in time_spec_d['age_Ma']:

        # determine what time steps are available for this age
        # NOTE: 'temp' is requried to set which output files to check
        found_d = Core_Citcom.find_available_timestep_from_age(
            master_run_d, 'temp', a)

        timestep = found_d['found_timestep']

        # convert the found age to an int
        age = int(np.around(found_d['found_age']))

        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )
        print(now(), 'Creating files for restart run at age:', age, '(',
              str(a), 'Ma; timestep = ', timestep, ')')
        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )

        # Set the name of the restart directory
        rs_dir = ''
        if rs_structure == 'separate':
            # create restart directory from section name
            rs_dir = rs_dir_prefix + '_' + str(age) + 'Ma'
            Core_Util.make_dir(rs_dir)
            Core_Util.make_dir(rs_dir + f'/Age{age}Ma')
        else:
            # this is an all-in-on case
            rs_dir = rs_dir_prefix

        # update the new restart input cfg file name suffix
        rs_inp_cfg_suffix = rs_type + '_' + str(age) + 'Ma'

        # create a new set of initial conditions for the restart run,
        # and set file name patterns in control_d
        if rs_type == 'dynamic_topography':
            create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                                rs_inp_cfg_suffix, age, timestep)

        # else, no need to adjust files for 'total_topography' runs

        # create new run input .cfg for this restart run
        restart_run_cfg = {}
        restart_run_cfg = create_restart_run_cfg(control_d, master_run_cfg_d,
                                                 rs_replace_d, rs_dir,
                                                 rs_inp_cfg_suffix, age,
                                                 timestep)

    # End of loop over restart runs

    # Close up shop
    sys.exit(0)
コード例 #3
0
def main():
    '''main workflow of the script'''

    # report the start time and the name of the script
    print(now(), 'create_citcom_case.py')

    # get the case name from user cmd line
    case_name = str(sys.argv[1])
    print(now(), 'Creating GDF directory structure for case:', case_name)

    # create the top level case dir
    Core_Util.make_dir(case_name)

    # set some case level file names
    case_gdf_conf = case_name + '/' + Core_Util.gdf_conf
    pdir_gdf_conf = '..' + '/' + Core_Util.gdf_conf

    # copy the system file to the main case directory
    if not os.path.exists(case_gdf_conf):
        cmd = 'cp ' + Core_Util.sys_gdf_conf + ' ' + case_gdf_conf
        print(now(), cmd)
        subprocess.call(cmd, shell=True)
    else:
        print(now(),
              'Local GDF .conf file found; NOT copying system .conf file')

    # Make sub dirs for case-based Reconstruction/ kinematic and surface data
    Core_Util.make_dir(case_name + '/Reconstruction')

    # Create specific sub-dirs for pre- and post- processing
    in_list = ['Coord', 'ICHist', 'Tracers', 'Topologies', 'Velocity']
    for d in in_list:
        Core_Util.make_dir(case_name + '/Reconstruction/' + d)

    # NOTE: A few similar Reconstruction/ type system-wide input directories
    # are directly referenced by specific entries in the GFD .conf file.
    # (types of age grids, coastlines, velocity, etc.)
    #
    # Many GDF pre- and post- scripts use the current working directory
    # copy (or link) of the .conf file to control processing steps
    # and locate base file paths.
    #
    # Be sure to sychronize your .conf for case- and run- level work.

    # Check cmd line args to create multiple runs
    n_runs = 1
    if '-r' in sys.argv:
        n_runs = int(sys.argv[sys.argv.index('-r') + 1])

    # Create specific run directories
    for i in list(range(n_runs)):
        # make a string and pad with zeros
        if n_runs < 10: d = '%01d'
        elif n_runs < 100: d = '%02d'
        else: d = '%04d'
        r = d % i
        # make the dir
        Core_Util.make_dir(case_name + '/Run-' + r)
        # link the case-level .conf file
        Core_Util.make_link(pdir_gdf_conf,
                            case_name + '/Run-' + r + '/' + Core_Util.gdf_conf)
コード例 #4
0
ファイル: restart_citcoms.py プロジェクト: EarthByte/citcoms
def create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                        rs_inp_cfg_suffix, age, timestep):
    '''read master run velo files and modify the temperature using z>some_node '''
    # (6) Read in velo file from master run for closest age (use read_proc_files_to_cap_list() )
    # (7) Modify the temperature using z>some_node to set temperatures to background for models
    #  without the lithosphere
    # (8) write out `new' IC files using write_cap_or_proc_list_to_files()

    lithosphere_depth_DT = control_d['lithosphere_depth_DT']
    lithosphere_temperature_DT = control_d['lithosphere_temperature_DT']

    # Get nodez from depth
    znode = Core_Citcom.get_znode_from_depth(master_run_d['coor_d'],
                                             lithosphere_depth_DT)
    print(now(), 'create_no_lith_temp: lithosphere_depth_DT = ',
          lithosphere_depth_DT, '; znode=', znode)

    # choose the field to process
    field_name = 'temp'

    # get params for the run
    pid_d = master_run_d['pid_d']
    datafile = pid_d['datafile']

    # get the data file name specifics for this field
    file_name_component = Core_Citcom.field_to_file_map[field_name]['file']
    print(now(), 'create_no_lith_temp: file_name_component = ',
          file_name_component)

    # process data from Citcoms
    if os.path.exists(master_run_d['pid_d']['datadir'] + '/0/'):
        file_format = master_run_d['pid_d']['datadir'] + '/#/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'] + '/'):
        file_format = master_run_d['pid_d']['datadir'] + '/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'].replace('%RANK',
                                                                 '0')):
        file_format = master_run_d['pid_d']['datadir'].replace(
            '%RANK', '#') + '/' + master_run_d['pid_d'][
                'datafile'] + '.' + file_name_component + '.#.' + str(timestep)
    else:
        file_format = 'data/#/' + datafile + '.' + file_name_component + '.#.' + str(
            timestep)
    print(now(), 'create_no_lith_temp: create_no_lith_temp: file_format = ',
          file_format)

    # read data by proc, e.g., velo, visc, comp_nd, surf, botm
    data_by_cap = Core_Citcom.read_proc_files_to_cap_list(
        master_run_d['pid_d'], file_format, field_name)

    # find index of all nodes in a cap that have znode > requested_znode
    # first, make array of znode number for a cap
    nodex = pid_d['nodex']
    nodey = pid_d['nodey']
    nodez = pid_d['nodez']

    # znodes for one cap (same for every cap)
    znode_array = np.tile(range(nodez), nodex * nodey)

    # this gives  a mask of all the znodes that we need to correct the temperature for
    mask = np.where(znode_array > znode, True, False)

    # loop over all cap lists
    for nn, cap_list in enumerate(data_by_cap):
        print(now(), 'create_no_lith_temp: working on cap number', nn)
        # convert to numpy array
        cap_array = np.array(cap_list)
        # swap in new temperature values for lithosphere
        # temperature is fourth column
        np.place(cap_array[:, 3], mask, lithosphere_temperature_DT)
        # update master list of data with corrected list
        data_by_cap[nn] = cap_array.tolist()

    # check values have been updated
    #if verbose: print( now(), 'create_no_lith_temp: spot check: data_by_cap[0][0:nodez]', data_by_cap[0][0:nodez])

    # map the data from cap lists to processor lists
    out_data_by_proc = Core_Citcom.get_proc_list_from_cap_list(
        master_run_d['pid_d'], data_by_cap)

    # set up output info
    rs_datafile = datafile + '_restart_' + str(int(np.around(age))) + 'Ma'

    ic_dir = rs_dir + '/ic_dir'
    Core_Util.make_dir(ic_dir)
    out_name = ic_dir + '/' + rs_datafile + '.velo.#.' + str(timestep)
    print(now(), 'create_no_lith_temp: out_name =', out_name)

    # now write out data to processor files (with header, necessary for restart)
    Core_Citcom.write_cap_or_proc_list_to_files(master_run_d['pid_d'],
                                                out_name, (out_data_by_proc, ),
                                                'proc', True)

    # Update control_d with file name patterns
    control_d['rs_datafile'] = rs_datafile
    control_d['rs_datadir'] = './ic_dir/'

    return