def make_coordinate_files( master_d ):

    '''Make coordinate files'''

    coord_file = '/net/beno2/nobackup1/danb/input/mkhist/test/Coord/'
    pid_d = master_d['pid_d']
    coord_file += pid_d['datafile'] + '.coord.#'

    coor_by_cap =  Core_Citcom.read_citcom_surface_coor( pid_d,
                                                          coord_file )
    outname = 'coord.cap.#'
    coor_cap_names = Core_Citcom.write_cap_or_proc_list_to_files( pid_d,
                               outname, (coor_by_cap,), 'cap', False )

    return coor_by_cap
Exemplo n.º 2
0
def main():
    """main sequence of script actions"""

    print(now(), 'assimilation_diagnostic.py:')
    print(now(), 'main:')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    PLOT_CROSS_SECTIONS = control_d['PLOT_CROSS_SECTIONS']
    PLOT_MAPS = control_d['PLOT_MAPS']

    # get the master dictionary
    master_d = Core_Citcom.get_all_pid_data(control_d['pid_file'])

    # get times to process and plot
    time_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec'],
                                                  master_d['time_d'])
    master_d['control_d'] = control_d
    # N.B. master_d['time_d'] contains all the time info from citcoms model
    # N.B. master_d['control_d']['time_d'] contains specific times to process
    master_d['control_d']['time_d'] = time_d

    # func_d is a separate dictionary that is used to transport
    # temporary files and objects between functions
    master_d['func_d'] = {'rm_list': []}

    # find track locations
    make_profile_track_files(master_d)

    make_cpts(master_d)

    # make cross-sections
    if PLOT_CROSS_SECTIONS:
        make_cross_section_diagnostics(master_d)

    # make maps
    if PLOT_MAPS:
        ps_l = []
        for tt in range(len(time_d['time_list'])):
            ps = make_map_postscript(master_d, tt)
            ps_l.append(ps)
        pdf_name = control_d['prefix'] + '_'
        pdf_name += 'Map.pdf'
        Core_Util.make_pdf_from_ps_list(ps_l, pdf_name)

    # clean up
    Core_Util.remove_files(master_d['func_d']['rm_list'])
Exemplo n.º 3
0
def main():
    """main sequence of script actions"""

    print(now(), 'pub_global.py:')
    print(now(), 'main:')

    # parse cmd line input for input plotting config file
    if len(sys.argv) != 2:
        usage()

    # this part is not intuitive to an uniformed user
    # can we avoid these initialize commands?
    # initialize the modules
    Core_Util.initialize()
    Core_Citcom.initialize()

    # Get the framework dictionary
    geoframe_dict = Core_Util.geoframe_dict

    # read settings from control file
    dict = Core_Citcom.parse_configuration_file(sys.argv[1])

    # move loose parameters (not within Figure_X) from dict to a
    # temporary new dictionary (adict) then deepcopy to
    # dict['All_Figure']
    # this cleans up dict by ensuring the keys are e.g.
    # 'All_Figure', 'Figure_A', 'Figure_B' etc.
    #adict = {}
    #for key in list(dict):
    #    if not key.startswith('Figure'):
    #        adict[key] = dict.pop(key)
    #dict['figure_keys'] = sorted(dict.keys())

    # ??? set_global_defaults( adict )
    #dict['All_Figure'] = copy.deepcopy( adict )
    #del adict # delete temporary dictionary

    # ??? set_positioning( dict )

    # set adict as pointer to dict['All_Figure']
    #adict = dict['All_Figure']

    print(dict)

    ps = 'test.ps'

    make_postscript(dict, ps)
def main():

    # parameters
    nlon = 30
    nlat = 30

    # preliminaries
    master_d = Core_Citcom.get_all_pid_data( 'pid23039.cfg' )
    coor_by_cap = make_coordinate_files( master_d )

    # algorithm 1: brute force
    t0 = time.time()
    for nn in range(10):
        brute_force( master_d, coor_by_cap, nlon, nlat )
    t1 = time.time()
    total = t1-t0
    print( now(),' brute_force=', total )

    #t1 = timeit.timeit(stmt='brute_force_algorithm()', 
    #    setup='from __main__ import brute_force_algorithm')
    #print( t1 )

    # algorithm 2: kd tree
    # specific preliminaries
    coor_by_cap = Core_Util.flatten_nested_structure( coor_by_cap )
    coor_by_cap = np.array( coor_by_cap )
    tree = spatial.KDTree( coor_by_cap )
    #pts = np.array( [[0, 0],[1,2],[30,40],[56,56],[180,76],[240,-24],
    #    [270,-60],[37,5],[345,3],[356,-87]] )

    pts = np.array([30,30])
    t0 = time.time()
    print( tree.query( pts )[1] )
    t1 = time.time()
    total = t1-t0
    print( now(), 'kd_tree=', total )
Exemplo n.º 5
0
def main():
    '''Main sequence of script actions.'''

    print(now(), 'plot_lith_age.py:')
    print(now(), 'main:')

    if len(sys.argv) != 3:
        usage()

    # parameters
    pid_file = sys.argv[1]
    time_spec = sys.argv[2]

    ### parse and setup dictionaries ###

    master_d = Core_Citcom.get_all_pid_data(pid_file)
    pid_d = master_d['pid_d']
    time_d = Core_Citcom.get_time_spec_dictionary(time_spec,
                                                  master_d['time_d'])
    runtime_Myr = time_d['runtime_Myr'][0]
    age = int(round(time_d['age_Ma'][0], 0))
    datafile = pid_d['datafile']
    lith_age_depth = pid_d['lith_age_depth']
    start_age = pid_d['start_age']
    time = time_d['time_list'][0]
    geoframe_d = master_d['geoframe_d']
    depth_km = master_d['coor_d']['depth_km']
    mantle_temp = pid_d['mantle_temp']
    radius = master_d['coor_d']['radius']
    radius_outer = pid_d['radius_outer']
    radius_km = pid_d['radius_km']
    scalet = pid_d['scalet']
    rm_list = []  # list of files to remove

    ###################################
    ### input directories and files ###
    ###################################

    # reconstructed cross-section (plate frame of reference)
    cross_section_dir = 'aus_xsect/'
    cross_section_name = cross_section_dir + 'reconstructed_%(age)s.00Ma.xy' % vars(
    )

    # continental grids
    cont_dir = geoframe_d['age_grid_cont_dir'] + '/'
    cont_name = cont_dir + geoframe_d[
        'age_grid_cont_prefix'] + '%(age)s.grd' % vars()

    # directory of lith_age3_%(age)s.grd files from make_history_for_age.py
    lith_age_dir = '/net/beno2/nobackup1/danb/global/lith_age/'
    lith_age_name = lith_age_dir + 'lith_age3_%(age)s.grd' % vars()

    ### end input directories and files ###

    ### process cross_section_name ###

    infile = open(cross_section_name, 'r')
    lines = infile.readlines()
    infile.close
    out = []
    for line in lines:
        if line.startswith('>'):
            pass
        else:
            out.append(line.strip())

    # profile start location
    lon0 = float(out[0].split()[0])
    lat0 = float(out[0].split()[1])
    print(now(), '(lon0, lat0)', lon0, lat0)
    # profile end location
    lon1 = float(out[1].split()[0])
    lat1 = float(out[1].split()[1])
    print(now(), '(lon1, lat1)', lon1, lat1)

    # min and max bounds for GMT region (R)
    lon_min = min(lon0, lon1) - 10
    lon_max = max(lon0, lon1) + 10
    lat_min = min(lat0, lat1) - 15
    lat_max = max(lat0, lat1) + 15
    print(now(), '(lon_min, lat_min)', lon_min, lat_min)
    print(now(), '(lon_max, lat_max)', lon_max, lat_max)

    # Nico's 1-D profile
    # interpolate for data points between end values
    proj_name = cross_section_name.rstrip('xy') + 'p.xy'
    rm_list.append(proj_name)
    dlon = lon1 - lon0
    dlat = lat1 - lat0

    outfile = open(proj_name, 'w')
    outfile.write('%(lon0)s %(lat0)s %(lon0)s\n' % vars())

    lon = lon0
    lat = lat0
    while 1:
        lon += dlon / 500
        lat += dlat / 500
        if lon <= lon1:  #and lat <= lat1:
            lineout = '%(lon)s %(lat)s %(lon)s\n' % vars()
            outfile.write(lineout)
        else:
            break

    outfile.close()

    # purple circles
    # map
    lon_markers = cross_section_dir + 'lon_markers_map.xy'
    rm_list.append(lon_markers)
    ofile = open(lon_markers, 'w')
    lon_floor = int(np.floor(lon0))
    lon_ceil = int(np.ceil(lon1))
    for lon in range(lon_floor, lon_ceil + 1):
        if not lon % 5:
            olat = (lon - lon0) / dlon * dlat + lat0
            outline = '%(lon)s %(olat)s\n' % vars()
            ofile.write(outline)
    ofile.close()

    # annulus
    lon_markers_ann = cross_section_dir + 'lon_markers_ann.xy'
    rm_list.append(lon_markers_ann)
    plon, plat = np.loadtxt(lon_markers, unpack=True)
    prad = np.tile(radius_outer, len(plon))
    np.savetxt(lon_markers_ann, np.column_stack((plon, prad)))

    ### end process cross_section_name ###

    ### build list of temperature grids to track through ###
    # these grids must have previously been created using grid_maker.py
    gpfx = 'grid/' + datafile
    temp_list = []
    for depth in depth_km:
        gsfx = '.temp.' + str(int(depth)) + '.' + str(time) + '.grd'
        temp_list.append(gpfx + gsfx)

    # take just from 500 km depth and less
    depth_km_array = np.array(depth_km)
    znode = np.min(np.where(depth_km_array < 500)) - 1

    temp_list = temp_list[znode:]

    ### end of build temperature grid list ###

    #### idealized thermal structure from age grids
    ideal_lith_xyz = cross_section_dir + 'ideal.lith.%(age)s.xyz' % vars()
    rm_list.append(ideal_lith_xyz)
    Core_Util.find_value_on_line(proj_name, lith_age_name, ideal_lith_xyz)
    lithlon, lithlat, lithdist1, lithage_Ma = np.loadtxt(ideal_lith_xyz,
                                                         unpack=True)
    lithdist = np.tile(lithdist1, pid_d['nodez'])
    lithage_Ma = np.tile(lithage_Ma, pid_d['nodez'])
    lithrad = []
    for rad in radius:
        lithrad.extend([rad for xx in range(len(lithdist1))])
    lithrad = np.array(lithrad)
    lithtemp = erf((1.0 - lithrad) / (2.0 * np.sqrt(lithage_Ma / scalet)))
    lithtemp *= float(mantle_temp)

    nan = np.where(np.isnan(lithtemp))
    #nnan = np.where(~np.isnan( lithtemp ))
    np.savetxt(ideal_lith_xyz, np.column_stack((lithdist, lithrad, lithtemp)))

    #nan_values = np.ones( np.size( nan ) )*-1
    #f_handle = open( ideal_lith_xyz, 'ab')
    #np.savetxt(f_handle, np.column_stack( (lithdist[nan], lithrad[nan], nan_values) ))
    #f_handle.close()

    #### end of idealized thermal structure from age grids

    # make temperature xyz
    temp_xyz = cross_section_dir + 'citcom.temp.%(age)s.xyz' % vars()
    rm_list.append(temp_xyz)
    # this is hacky, but loop over only the top 500 km
    master_d['coor_d']['radius'] = master_d['coor_d']['radius'][znode:]
    pao, x_ann_max = Core_Util.make_annulus_xyz(master_d, proj_name, temp_xyz,
                                                temp_list)

    ### make idealized lithosphere and citcom temperature grid ###
    blockmedian_I = '0.2/0.0035'
    surface_I = '0.1/0.00125'
    surface_T = '0.25'
    rad_in = '0.92151939'
    rad_out = '1.0'
    # for plotting data
    R_ann = str(lon0) + '/' + str(lon1) + '/' + rad_in + '/' + rad_out
    # for dimensional psbasemap
    psbase_R = str(lon0) + '/' + str(lon1) + '/' + str(5871) + '/' + str(
        radius_km)

    grid_names = []
    for xyz in [temp_xyz, ideal_lith_xyz]:
        block_name = xyz.rstrip('xyz') + 'b.xyz'
        rm_list.append(block_name)
        grid_name = block_name.rstrip('xyz') + 'grd'
        grid_names.append(grid_name)
        rm_list.append(grid_name)
        cmd = xyz + ' -I' + blockmedian_I + ' -R' + R_ann
        callgmt('blockmedian', cmd, '', '>', block_name)
        cmd = block_name + ' -I' + surface_I + ' -R' + R_ann
        cmd += ' -T' + surface_T
        cmd += ' -Ll0 -Lu1'
        callgmt('surface', cmd, '', '', '-G' + grid_name)

    ### end of make temperature grids ###

    ### percentage error between temperature fields ###
    cmd = grid_names[0] + ' ' + grid_names[1] + ' SUB '
    cmd += grid_names[1] + ' DIV'
    cmd += ' 100 MUL'
    temp_diff_grid = cross_section_dir + 'temp.difference.grd'
    grid_names.append(temp_diff_grid)
    rm_list.append(temp_diff_grid)
    callgmt('grdmath', cmd, '', '=', temp_diff_grid)

    ### end percentage error

    ### lith_age_depth overlay line
    xy = cross_section_dir + 'lith_depth.xy'
    rm_list.append(xy)
    lith_age_radius = pid_d['radius_outer'] - pid_d['lith_age_depth']
    lith_depth = np.tile(lith_age_radius, len(lithdist1))
    np.savetxt(xy, np.column_stack((lithdist1, lith_depth)))

    ### end overlay line

    ### make cpts ###

    # age grid
    cpt_pfx = cross_section_dir
    cpt_name = cpt_pfx + 'age.cpt'
    rm_list.append(cpt_name)
    cmd = '-Crainbow -T0/370/10'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # continental types
    cpt_name = cpt_pfx + 'cont.cpt'
    rm_list.append(cpt_name)
    cmd = '-Crainbow -T-4/0/1'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # differential temperature
    cpt_name = cpt_pfx + 'diff.cpt'
    rm_list.append(cpt_name)
    cmd = '-Cpolar -T-10/10/1'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # temperature
    cpt_name = cpt_pfx + 'temp.cpt'
    cmd = '-Cpolar -T0/1/0.0675'
    rm_list.append(cpt_name)
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # for temperature contours
    cpt_name = cpt_pfx + 'temp.cont'
    cmd = '-Cjet -T0.1/0.4/0.1'
    rm_list.append(cpt_name)
    callgmt('makecpt', cmd, '', '>', cpt_name)

    ### plotting ###
    ps = datafile + '.lith.age.analysis.%(age)sMa.ps' % vars()

    callgmt('gmtset', 'PAGE_ORIENTATION', '', '', 'portrait')
    callgmt('gmtset', 'LABEL_FONT_SIZE', '', '', '12')
    callgmt('gmtset', 'LABEL_FONT', '', '', '4')
    callgmt('gmtset', 'LABEL_OFFSET', '', '', '0.02')
    callgmt('gmtset', 'ANNOT_FONT_SIZE_PRIMARY', '', '', '10p')
    callgmt('gmtset', 'ANNOT_FONT_PRIMARY', '', '', '4')

    opts_d = Core_GMT.start_postscript(ps)

    # pre-initialize for pstext commands
    pstext_d = opts_d.copy()
    pstext_d['R'] = '0/8.5/0/11'
    pstext_d['J'] = 'x1.0'

    # title information
    stdin = '1 10.5 14 0 4 ML Model = %(datafile)s\n' % vars()
    stdin += '1 10.3 14 0 4 ML lith_age_depth = %(lith_age_depth)s\n' % vars()
    stdin += '7.5 10.5 14 0 4 MR Current Age = %(age)s Ma\n' % vars()
    stdin += '7.5 10.3 14 0 4 MR start_age = %(start_age)s Ma\nEOF' % vars()
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    # plot maps #
    map_d = opts_d.copy()
    map_d['B'] = 'a20f10/a10f5::WeSn'
    map_d['R'] = '%(lon_min)s/%(lon_max)s/%(lat_min)s/%(lat_max)s' % vars()
    map_d['C'] = cross_section_dir + 'age.cpt'
    map_d['J'] = 'M3'
    map_d['X'] = 'a1'
    map_d['Y'] = 'a8'
    map_grid = lith_age_name

    callgmt('grdimage', lith_age_name, map_d, '>>', ps)

    C = cross_section_dir + 'age.cpt'
    cmd = '-Ba50f10:"Age (Ma)": -D2.5/7.5/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    del map_d['B']
    del map_d['C']
    map_d['m'] = ' '
    map_d['W'] = '5,white'
    callgmt('psxy', proj_name, map_d, '>>', ps)
    del map_d['m']
    del map_d['W']
    map_d['G'] = 'purple'
    map_d['S'] = 'c0.05'
    callgmt('psxy', lon_markers, map_d, '>>', ps)
    del map_d['G']
    del map_d['S']

    # continental types
    map_d['B'] = 'a20f10/a10f5::wESn'
    map_d['C'] = cross_section_dir + 'cont.cpt'
    map_d['X'] = 'a4.5'
    map_d['Y'] = 'a8'
    callgmt('grdimage', cont_name, map_d, '>>', ps)

    C = cross_section_dir + 'cont.cpt'
    cmd = '-Ba1:"Continental type (stencil value)": -D6/7.5/2.5/0.1h -C%(C)s -K -O' % vars(
    )
    callgmt('psscale', cmd, '', '>>', ps)

    del map_d['B']
    del map_d['C']
    map_d['m'] = ' '
    map_d['W'] = '5,black'
    callgmt('psxy', proj_name, map_d, '>>', ps)
    del map_d['m']
    del map_d['W']
    map_d['G'] = 'purple'
    map_d['S'] = 'c0.05'
    callgmt('psxy', lon_markers, map_d, '>>', ps)
    del map_d['G']
    del map_d['S']

    # end plot maps #

    # plot cross-sections

    # temperature cross-section
    psbase_d = opts_d.copy()
    psbase_d['B'] = 'a10/500::WsNe'
    psbase_d['J'] = 'Pa6/' + str(pao) + 'z'
    psbase_d['R'] = psbase_R
    psbase_d['X'] = 'a1.25'
    psbase_d['Y'] = 'a5.25'
    callgmt('psbasemap', '', psbase_d, '>>', ps)

    opts_d['C'] = cross_section_dir + 'temp.cpt'
    opts_d['J'] = 'Pa6/' + str(pao)
    opts_d['R'] = R_ann
    opts_d['X'] = 'a1.25'
    opts_d['Y'] = 'a5.25'
    callgmt('grdimage', grid_names[0], opts_d, '>>', ps)

    # profile of lith_age_depth on this cross-section
    del opts_d['C']
    opts_d['W'] = '3,black,-'
    callgmt('psxy', xy, opts_d, '>>', ps)
    del opts_d['W']
    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 6.25 12 0 4 ML CitcomS\n'
    stdin += '7.5 6.25 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    C = cross_section_dir + 'temp.cpt'
    cmd = '-Ba0.2f0.1 -D4.25/5.7/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    # idealized lith temperature cross-section
    psbase_d['Y'] = 'a3.75'
    callgmt('psbasemap', '', psbase_d, '>>', ps)

    opts_d['C'] = cross_section_dir + 'temp.cpt'
    opts_d['Y'] = 'a3.75'
    callgmt('grdimage', grid_names[1], opts_d, '>>', ps)
    del opts_d['C']

    # profile of lith_age_depth on this cross-section
    opts_d['W'] = '3,black,-'
    callgmt('psxy', xy, opts_d, '>>', ps)
    del opts_d['W']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 4.75 12 0 4 ML Idealised\n'
    stdin += '7.5 4.75 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    C = cross_section_dir + 'temp.cpt'
    cmd = '-Ba0.2f0.1 -D4.25/4.2/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    # contours plot
    psbase_d['Y'] = 'a2.25'
    callgmt('psbasemap', '', psbase_d, '>>', ps)
    opts_d['Y'] = 'a2.25'
    opts_d['C'] = cross_section_dir + 'temp.cont'
    opts_d['W'] = '3,red'
    callgmt('grdcontour', grid_names[0], opts_d, '>>', ps)
    opts_d['W'] = '3,green'
    callgmt('grdcontour', grid_names[1], opts_d, '>>', ps)
    del opts_d['C']
    del opts_d['W']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 3.25 12 0 4 ML Contours\n'
    stdin += '7.5 3.25 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    # difference of temperature fields (relative)
    psbase_d['Y'] = 'a0.75'
    callgmt('psbasemap', '', psbase_d, '>>', ps)
    opts_d['C'] = cross_section_dir + 'diff.cpt'
    opts_d['Y'] = 'a0.75'
    callgmt('grdimage', grid_names[2], opts_d, '>>', ps)
    del opts_d['C']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    C = cross_section_dir + 'diff.cpt'
    cmd = '-Ba5f1 -D4.25/1.2/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    stdin = '1 1.75 12 0 4 ML Delta (\045)\n'
    stdin += '7.5 1.75 12 0 4 MR Temp\nEOF'
    #stdin += '4.25 0.6 12 0 4 MC Note: No assimilation regions are shown in BLACK\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    Core_GMT.end_postscript(ps)

    # clean up temporary files
    Core_Util.remove_files(rm_list)
Exemplo n.º 6
0
def main():
    print(now(), 'index_citcom.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])
    #Core_Util.tree_print( control_d )

    # set the pid file
    pid_file = control_d['pid_file']

    # get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    coor_d = master_d['coor_d']
    pid_d = master_d['pid_d']

    # Double check for essential data
    if master_d['time_d'] == None:
        print(now())
        print(
            'ERROR: Required file "[CASE_NAME].time:" is missing from this model run.'
        )
        print('       Aborting processing.')
        sys.exit(-1)

    # set up working variables
    datadir = pid_d['datadir']
    datafile = pid_d['datafile']
    startage = pid_d['start_age']
    output_format = pid_d['output_format']

    depth_list = coor_d['depth_km']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    found_depth_list = []

    # Check how to read and parse the time spec:
    read_time_d = True

    # Compute the timesteps to process
    if read_time_d:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'], master_d['time_d'])
    else:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'])
    print(now(), 'index_citcom.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # levels to process
    level_spec_d = Core_Util.get_spec_dictionary(control_d['level_spec'])
    print(now(), 'index_citcom.py: level_spec_d = ')
    Core_Util.tree_print(level_spec_d)

    #
    # Main looping, first over times, then sections, then levels
    #

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'index_citcom.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for T, time in enumerate(time_spec_d['time_list']):
        #print( now(), 'index_citcom.py: Processing time = ', time)

        if 'Ma' in time:
            # strip off units and make a number
            time = float(time.replace('Ma', ''))

            # determine what time steps are available for this age
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_age(
                master_d, 'temp', time)

        else:
            # model time steps
            time = float(time)

            # determine what time steps are available for this timestep
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_timestep(
                master_d, 'temp', time)

        # end of check on time format

        # set variables for subsequent loops
        timestep = found_d['found_timestep']
        runtime_Myr = found_d['found_runtime']
        # convert the found age to an int
        age_Ma = int(np.around(found_d['found_age']))

        print(now(),
              'index_citcom.py: time data: requested value ->found value ')
        print( now(), '  ', \
'age =', found_d['request_age'],      '->', age_Ma, \
'step =', found_d['request_timestep'], '->', timestep, \
'r_tm =', found_d['request_runtime'],  '->', runtime_Myr )

        # empty file_data
        file_data = []

        # Loop over sections (fields)
        for S, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            #print( now(), 'index_citcom.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            #print('')
            #print( now(), 'index_citcom.py: Processing: field =', field_name)

            # set the region
            #if nproc_surf == 12:
            #    grid_R = 'g'
            #    # optionally adjust the lon bounds of the grid to -180/180
            #    if 'shift_lon' in control_d :
            #        print( now(), 'index_citcom.py: grid_R set to to "d" : -180/+180/-90/90')
            #        grid_R = 'd'
            #    else :
            #        print( now(), 'index_citcom.py: grid_R set to to "g" : 0/360/-90/90')
            #else:
            #    grid_R  = str(pid_d['lon_min']) + '/' + str(pid_d['lon_max']) + '/'
            #    grid_R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']

            # report
            #print( now(), 'index_citcom.py: field = ', field_name, '; file_comp =', file_name_component, '; col =', field_column)
            # process data from Citcoms
            file_format = ''

            # check for various data dirs:
            if os.path.exists(datadir + '/0/'):
                file_format = datadir + '/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists(datadir + '/'):
                file_format = datadir + '/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('data'):
                file_patt = './data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('Data'):
                file_patt = './Data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            # report error
            else:
                print(now())
                print('ERROR: Cannot find output data.')
                print('       Skipping this section.')
                print(now(), 'index_citcom.py: file_format = ', file_format)
                continue  # to next section

            print(now(), 'index_citcom.py: file_format = ', file_format)

            #
            # Loop over levels
            #
            for L, level in enumerate(level_spec_d['list']):

                #    print( now(), 'index_citcom.py: Processing level = ', level)

                # ensure level is an int value
                level = int(level)
                depth = int(depth_list[level])
                found_depth_list.append(depth)

                #print( now(), '------------------------------------------------------------------------------')
                print( now(), 'index_citcom.py: ', s, \
': ts =', timestep, \
'; age =', age_Ma, \
#'; runtime_Myr =', runtime_Myr, \
'; level =', level, \
'; depth_km =', depth, \
'; field =', field_name,\
)
                #print( now(), '------------------------------------------------------------------------------')

                # FIXME: is it ok to chanage the default name to have age, rather than timestep?
                xyz_filename = datafile + '-' + field_name + '-' + str(
                    age_Ma) + 'Ma-' + str(depth) + '.xyz'
                #print( now(), 'index_citcom.py: xyz_filename =', xyz_filename)

                #xy_filename = ''
                #xy_path = master_d['geoframe_d']['gplates_line_dir']
                #xy_filename = xy_path + '/' + 'topology_platepolygons_' + age + '.00Ma.xy'
                #print( now(), 'index_citcom.py: xy_filename = ', xy_filename)

                # Make a plot of the grids

                # citcoms

            # end of loop over levels

        # end of loop over sections

    # end of loop over times

    print(now(), 'depth_list = ', depth_list)
    print(now(), 'found_depth_list = ', found_depth_list)
Exemplo n.º 7
0
def main():
    print(now(), 'grid_maker_gplates.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1], False, False)
    Core_Util.tree_print(control_d)

    time_spec_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec'])
    print(now(), 'grid_maker_gplates.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # Get the coordinate data from the 0 Ma files
    print(now(), 'grid_maker_gplates.py: get coordinate data from .xy files:')
    lon = []
    lat = []
    for i in range(control_d['nproc_surf']):
        # get the lat lon from the .xy file
        vel_xy_filename = control_d['velocity_prefix'] + '0.%(i)s.xy' % vars()
        print(now(), 'grid_maker_gplates.py: vel_xy_filename = ',
              vel_xy_filename)
        i_lat, i_lon = np.loadtxt(vel_xy_filename, usecols=(0, 1), unpack=True)
        lat.append(i_lat)
        lon.append(i_lon)

    lon = Core_Util.flatten_nested_structure(lon)
    lat = Core_Util.flatten_nested_structure(lat)

    print(now(), 'grid_maker_gplates.py: len(lon) = ', len(lon))
    print(now(), 'grid_maker_gplates.py: len(lat) = ', len(lat))

    #
    # Main looping, first over times, then sections, then levels
    #

    # Variables that will be updated each loop:
    # time will be a zero padded string value used for filenames and reporting
    # depth will be a zero padded string value used for filenames and reporting

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'grid_maker_gplates.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for tt, time in enumerate(time_spec_d['time_list']):

        print(now(), 'grid_maker_gplates.py: Processing time = ', time)

        # empty file_data
        file_data = []

        # cache for the file_format
        file_format_cache = ''

        # Loop over sections (fields)
        for ss, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?
            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            print(now(), 'grid_maker_gplates.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            print('')
            print(now(), 'grid_maker_gplates.py: Processing: field =',
                  field_name)

            # reset region to use -Rg for gplates
            grid_R = 'g'

            if 'shift_lon' in control_d:
                print(
                    now(),
                    'grid_maker_gplates.py: grid_R set to to "d" : -180/+180/-90/90'
                )
                grid_R = 'd'
            else:
                print(
                    now(),
                    'grid_maker_gplates.py: grid_R set to to "g" : 0/360/-90/90'
                )

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']
            print(now(), 'grid_maker_gplates.py: file_name_component = ',
                  file_name_component)

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']
            print(now(), 'grid_maker_gplates.py: field_column = ',
                  field_column)

            # remove potential zero padding from age values
            time = time.replace('Ma', '')
            # process data from GPlates
            file_format = control_d['velocity_prefix'] + '%(time)s.#' % vars()

            print(now(), 'grid_maker_gplates.py: file_format = ', file_format)

            # read data in by cap
            file_data = Core_Citcom.read_cap_files_to_cap_list(
                control_d, file_format)

            # flatten data since we don't care about specific cap numbers for the loop over levels/depths
            file_data = Core_Util.flatten_nested_structure(file_data)
            print(now(), 'grid_maker_gplates.py: len(file_data) = ',
                  len(file_data))

            # Get the specific column for this field_name
            field_data = np.array([line[field_column] for line in file_data])

            print(now(), 'grid_maker_gplates.py: type(field_data) = ',
                  type(field_data))
            print(now(), 'grid_maker_gplates.py:  len(field_data) = ',
                  len(field_data))
            print(now())

            # check for gplates_vmag
            if field_name == 'gplates_vmag':
                # read the vy data from col 1
                field_data_vy = [line[1] for line in file_data]
                # compute the magnitude
                vx_a = np.array(field_data)
                vy_a = np.array(field_data_vy)
                vmag_a = np.hypot(vx_a, vy_a)
                # convert back to list
                field_data = vmag_a.tolist()

            print(
                now(),
                '------------------------------------------------------------------------------'
            )
            print(now(), 'grid_maker_gplates.py: tt,ss = ', tt, ',', ss, ';')
            print(now(), 'grid_maker_gplates.py: summary for', s, ': time =',
                  time, '; field_name =', field_name)
            print(
                now(),
                '------------------------------------------------------------------------------'
            )

            depth = 0
            field_slice = field_data
            xyz_filename = field_name + '-' + str(time) + '-' + str(
                depth) + '.xyz'

            print(now(), 'grid_maker_gplates.py: xyz_filename =', xyz_filename)

            print(now(), 'grid_maker_gplates.py: type(field_slice) = ',
                  type(field_slice))
            print(now(), 'grid_maker_gplates.py:  len(field_slice) = ',
                  len(field_slice))
            print(now())

            # create the xyz data
            xyz_data = np.column_stack((lon, lat, field_slice))
            np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')

            # create the median file
            median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'

            blockmedian_I = control_d[s].get('blockmedian_I', '0.5')
            cmd = xyz_filename + ' -I' + str(blockmedian_I) + ' -R' + grid_R

            Core_GMT.callgmt('blockmedian', cmd, '', '>', median_xyz_filename)

            # get a T value for median file
            if not 'Ll' in control_d[s] or not 'Lu' in control_d[s]:
                T = Core_GMT.get_T_from_minmax(median_xyz_filename)
            else:
                dt = (control_d[s]['Lu'] - control_d[s]['Ll']) / 10
                T = '-T' + str(control_d[s]['Ll']) + '/'
                T += str(control_d[s]['Lu']) + '/' + str(dt)

            print(now(), 'grid_maker_gplates.py: T =', T)

            # create the grid
            grid_filename = xyz_filename.rstrip('xyz') + 'grd'

            surface_I = control_d[s].get('surface_I', '0.25')
            cmd = median_xyz_filename + ' -I' + str(surface_I) + ' -R' + grid_R

            if 'Ll' in control_d[s]:
                cmd += ' -Ll' + str(control_d[s]['Ll'])
            if 'Lu' in control_d[s]:
                cmd += ' -Lu' + str(control_d[s]['Lu'])
            if 'T' in control_d[s]:
                cmd += ' -T' + str(control_d[s]['T'])

            #opt_a =
            Core_GMT.callgmt('surface', cmd, '', '', ' -G' + grid_filename)

            # label the variables

            # −Dxname/yname/zname/scale/offset/title/remark
            cmd = grid_filename + ' -D/=/=/' + str(field_name) + '/=/=/' + str(
                field_name) + '/' + str(field_name)
            Core_GMT.callgmt('grdedit', cmd, '', '', '')

            # Assoicate this grid with GPlates exported line data in .xy format:
            # compute age value
            age_float = 0.0
            if field_name.startswith('gplates_'):
                # time_list value for gplates data is set with age values
                age_float = float(time)

            # truncate to nearest int and make a string for the gplates .xy file name
            geoframe_d = Core_Util.parse_geodynamic_framework_defaults()

            if age_float < 0: age_float = 0.0
            xy_path = geoframe_d['gplates_line_dir']
            xy_filename = xy_path + '/' + 'topology_platepolygons_' + str(
                int(age_float)) + '.00Ma.xy'
            print(now(), 'grid_maker_gplates.py: xy_filename = ', xy_filename)

            # Make a plot of the grids
            J = 'X5/3'  #'R0/6'
            #J = 'M5/3'
            if 'J' in control_d[s]:
                J = control_d[s]['J']

            C = 'polar'
            if 'C' in control_d[s]:
                C = control_d[s]['C']

            # gplates
            Core_GMT.plot_grid(grid_filename, xy_filename, grid_R,
                               '-T-10/10/1')
            # end of plotting

            # Optional step to transform grid to plate frame
            if 'make_plate_frame_grid' in control_d:
                cmd = 'frame_change_pygplates.py %(time)s %(grid_filename)s %(grid_R)s' % vars(
                )
                print(now(), 'grid_maker_gplates.py: cmd =', cmd)
                os.system(cmd)
                filename = grid_filename.replace('.grd', '-plateframe.grd')
                Core_GMT.plot_grid(filename, xy_filename, grid_R, '-T-10/10/1')
Exemplo n.º 8
0
# FYI Mark Turner to see how to read and write processor and cap data
# updated to amend temperature field for dynamic topography (DT)
# restart
# written by Dan J. Bower, 11/21/13

# This takes a bit of time to run since it is crunching a lot of data
# see generated output here:
#     /home/danb/mark/read_write

# read CitcomS data
inp_file = '/net/beno2/nobackup1/danb/regional/ram/gam01/data/#/gam01.velo.#.0'
pid_file = '/net/beno2/nobackup1/danb/regional/ram/gam01/pid24611.cfg'

# get dictionaries etc
pid_d = cu.parse_configuration_file(pid_file)
pid_d.update(cc.derive_extra_citcom_parameters(pid_d))

# read in data to a cap list from processor files
data_by_cap = cc.read_proc_files_to_cap_list(pid_d, inp_file, 'temp')

##########################################
##### UPDATE LITHOSPHERE TEMPERATURE #####
##########################################

# user specified in cfg
# where dt stands for 'dynamic topography'
lithosphere_depth_DT = 300.0  # km
lithosphere_temperature_DT = 0.5  # non-dimensional

# find out which node lithosphere_depth_dt corresponds to by searching in depth list
# TODO
Exemplo n.º 9
0
def main():
    print(now(), 'copy_citcom_model_from_cluster.py')

    # Mark - these should probably be user inputs
    # You could also allow the user to specify the usual types of
    # time strings like we have for grid_maker.py  Therefore, the user
    # could use timesteps, run times, or ages in the various comma-sep
    # lists or start/end/range formats

    # for testing I was running this script on citerra in this directory:
    # /home/danb/lat/lat01

    field_list = ['velo', 'visc']  # list to loop over
    time_list = ['0', '290']  # list to loop over
    # local processing directory that can be 'seen' from the cluster
    # e.g., I can see this from citerra and is just a test location
    rootdir = '/home/danb/beno/test_copy/model'

    pid_file = 'pid14289.cfg'

    # pid_file should also be an input argument
    # I'm assuming the script will always be run in the directory of
    # the CitcomS model on the cluster where the data was generated

    # parsing the pid file is helpful because it gives us the datafile
    # etc.
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    pid_d = master_d['pid_d']

    # make data directory and determine structure
    datafile = pid_d['datafile']
    datadir = pid_d['datadir']

    if datadir.endswith('%RANK'):
        print('data stored by processor')
        datadir = datadir[:-5]  # strip '%RANK'
        print(datadir)
        PROC = True
    else:
        PROC = False  # not sure if this will be necessary, but
        # easy to include in this development draft

    # copy top level files
    cmd = 'cp %(pid_file)s %(rootdir)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stderr.txt %(rootdir)s/stderr.txt' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stdout.txt %(rootdir)s/stdout.txt' % vars()
    subprocess.call(cmd, shell=True)
    # copy user-created coordinate file if it exists
    coor_file = pid_d['coor_file']
    cmd = 'cp %(coor_file)s %(rootdir)s/%(coor_file)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp %(datafile)s.cfg %(rootdir)s/%(datafile)s.cfg' % vars()
    subprocess.call(cmd, shell=True)

    datadir_abs = rootdir + '/' + datadir

    # make the root (if doesn't exist) and data directory
    Core_Util.make_dir(datadir_abs)

    # copy data
    if PROC:
        for proc in range(pid_d['total_proc']):
            datadir_proc = datadir_abs + str(proc) + '/'
            Core_Util.make_dir(datadir_proc)
            for field in field_list:
                # always need coordinate file
                coord_name = str(proc) + '/' + datafile + '.coord.' + str(proc)
                filename1 = datadir + coord_name
                filename2 = datadir_abs + coord_name
                cmd = 'cp %(filename1)s %(filename2)s' % vars()
                print(cmd)
                # Mark - this command actually calls the copy command
                subprocess.call(cmd, shell=True)
                for time in time_list:
                    # create filename
                    file_name = str(proc) + '/' + datafile + '.' + field + '.'
                    file_name += str(proc) + '.' + str(time)
                    filename1 = datadir + file_name
                    filename2 = datadir_abs + file_name
                    cmd = 'cp %(filename1)s %(filename2)s' % vars()
                    print(cmd)
                    #subprocess.call( cmd, shell=True )

        # now copy essential files from 0/ directory
        zero_proc_dir = datadir_abs + '0/' + datafile
        for suffix in ['.time', '.log']:
            file_name = '0/' + datafile + suffix
            filename1 = datadir + file_name
            filename2 = datadir_abs + file_name
            cmd = 'cp %(filename1)s %(filename2)s' % vars()
            print(cmd)
            subprocess.call(cmd, shell=True)

    else:

        # non-processor (%RANK) branch
        # all files are stored in data
        # although we could code this up here, I think having
        # all the files in one directory will break grid_maker.py
        # at the moment.
        pass
Exemplo n.º 10
0
def main():
    '''This is the main function of simple_map.py

    main performs several steps detailed below:
    
    Parse the configuration control file into a control dictionary (control_d).  

    Read the citcoms pid file and establish a master dictionary 
    of citcoms parameters (master_d)

    Getting Surfce Coordinate Data (lat and lon)

    
    # Loop over each subsection in the control_d dictionary 
'''
    # associate the script global variable with this function
    global verbose

    # In general each script and each function should report it's name
    # as the first step, to better debug workflows.
    #
    # Most diagnostic output to the user will include the now() function
    # to easily measure wall clock runtimes of processes.
    print(now(), 'simple_map.py')

    # Parse cmd line input for basic parameters of plot
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # under verbose diagnostics, print out the control dictionary
    if verbose:
        print(now(), 'main: control_d =')
        # Core_Util.tree_print() gives nice formatted printing for dictionaries and lists
        Core_Util.tree_print(control_d)

    # Set the pid file as a variable name.
    pid_file = control_d['pid_file']

    # Get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)

    # Now master_d has all the information realted to a citcom run, and the geoframe defaults.

    # We could print out all the citcom run data file info with this code:
    #
    #if verbose:
    #    print( now(), 'main: master_d =')
    #    Core_Util.tree_print( master_d )
    #
    # but this usually gives tens of thousands of lines of data,
    # showing all the time values, all the level coordinate values, etc.

    # We can define aliases for the most commonly used sub dictionaries:
    geo_d = master_d[
        'geoframe_d']  # holds all the geoframe default settings and paths
    pid_d = master_d['pid_d']  # holds all the .pid file info
    coor_d = master_d['coor_d']  # holds all the coordinate info
    time_d = master_d['time_d']  # holds all the time info

    # Under verbose mode it's good to show the basic defaults, for reference in the script log
    if verbose:
        print(now(), 'main: geo_d =')
        Core_Util.tree_print(geo_d)

    # We also want to establish variables some commonly used data about the citcom run:
    datafile = pid_d['datafile']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    # Now we are ready to set up the basic info common to all subsection maps:

    # Getting Surfce Coordinate Data (lat and lon)
    #
    # Because the surface coordinates are depth-independant, we get this information first,
    # before any looping over sections.

    # First, check if an optional 'coord_dir' entry was in the control file,
    # and then check for CitcomS *.coord.* files in that user-specified coord_dir.
    # In this case you should manually copy all the processor *.coord.* files to a single directory.
    try:
        if 'coord_dir' in control_d:
            coord_file_format = control_d[
                'coord_dir'] + '/%(datafile)s.coord.#' % vars()
            coord = Core_Citcom.read_citcom_surface_coor(
                master_d['pid_d'], coord_file_format)
    except FileNotFoundError:
        print(now(), 'WARNING: *.coord.* files not found in:',
              control_d['coord_dir'])
    # Second, check for CitcomS *.coord.* files in data/%RANK dirs
    try:
        coord_file_format = 'data/#/' + datafile + '.coord.#'
        coord = Core_Citcom.read_citcom_surface_coor(master_d['pid_d'],
                                                     coord_file_format)
    # If the coordinate files are missing we cannot continue:
    except FileNotFoundError:
        print(coord_file_format)
        print(now(), 'ERROR: cannot find coordinate files in',
              control_d['coord_dir'], 'or data/%RANK')

    # Now flatten the coordinate data since we don't care about specific cap numbers for a given depth
    coord = Core_Util.flatten_nested_structure(coord)

    # extract lon and lat data as lists from tuples
    lon = [line[0] for line in coord]
    lat = [line[1] for line in coord]

    # Now that we have the coordinate data for all levels and all times,
    # we can process each sub section of the control file.
    #
    # The control_d dictionary has a special top level entry with key of '_SECTIONS_'.
    # The value is a list of all the subsection names.  We can iterate this way:
    # Loop over each subsection in the control_d dictionary
    for section_name in control_d['_SECTIONS_']:

        print(now())
        print('Processing subsection:', section_name)

        # get the subsection dictionary
        section_d = control_d[section_name]

        # Get the specific time and level field data to map:
        time = section_d['time']
        level = section_d['level']
        field_name = section_d['field']

        # We can use time_d to get equivalent times:
        time_triple = Core_Citcom.get_time_triple_from_timestep(
            time_d['triples'], float(time))
        age = time_triple[1]  # get the equivalent reconstuction age in Ma
        runtime = time_triple[2]  # get the equivalent model runtime in Myr
        print('time    =', time, 'steps')
        print('age     =', age, 'Ma')
        print('runtime =', runtime, 'Myr')

        # We can use the coor_d to find equivalent values for the level to map:
        radius = coor_d['radius'][
            level]  # The non-dimentional value of the radius for this level
        radius_km = coor_d['radius_km'][level]  # the equivalent radius in km
        depth = coor_d['depth'][
            level]  # The non-dimentional value of the depth for this level
        depth_km = coor_d['depth_km'][level]  # the equivalent depth in km
        print('level =', level)
        print('non-dim radius =', radius, '; radius in km =', radius_km)
        print('non-dim depth =', depth, '; depth in km =', depth_km)

        #
        # Now we will extract data for this specific time, level and field:
        #

        # Core_Citcom module has the standard mapping from field_name to the specific info
        # for file name component, and column number, for each field.

        # get the file name component for this field
        file_name_component = Core_Citcom.field_to_file_map[field_name]['file']

        # get that column number for this field
        field_column = Core_Citcom.field_to_file_map[field_name]['column']

        # Create the total filename to read
        file_format = 'data/#/' + datafile + '.' + file_name_component + '.#.' + str(
            time)

        # For data read in by proc, e.g., velo, visc, comp_nd use this form:
        file_data = Core_Citcom.read_proc_files_to_cap_list(
            master_d['pid_d'], file_format)

        # the next few diagnostic messages show how the data is reduced with each step
        print(now(), 'main: len(file_data) = ', len(file_data))

        # flatten the field data since we don't care about specific cap numbers for a single level
        file_data = Core_Util.flatten_nested_structure(file_data)

        # get the specific data column for this field_name
        field_data = [line[field_column] for line in file_data]
        print(now(), 'main: len(field_data) = ', len(field_data))

        # slice out the values for this level
        field_slice = field_data[level::nodez]
        print(now(), 'main: len(field_slice) = ', len(field_slice))

        #
        # Creating an .xyz file
        #

        # Some fields will require scaling: use the NumPy functions on slices:
        if field_name == 'visc': field_slice = np.log10(field_slice)

        # Assemble the coordinate data with the field data to create a .xyz file
        xyz_data = np.column_stack((lon, lat, field_slice))
        # create the xyz file name from other filename components:
        xyz_filename = datafile + '.' + field_name + '.' + str(
            depth_km) + 'km.' + str(time) + '.xyz'
        # write the file
        np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')
        print(now(), 'main: write: xyz_filename =', xyz_filename)

        #
        # Creating a grid file
        #

        # Set the region based on the the model run:
        if nproc_surf == 12:
            R = 'g'
        else:
            R = str(pid_d['lon_min']) + '/' + str(pid_d['lon_max']) + '/'
            R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

        # Set some defaults for the gridding process
        blockmedian_I = '0.1'
        surface_I = '0.1'

        # use Core_GMT.callgmt() to create the median file
        median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'
        args = xyz_filename + ' -I' + str(blockmedian_I) + ' -R' + R
        Core_GMT.callgmt('blockmedian', args, '', '>', median_xyz_filename)

        # Use Core_GMT to create the grid with required arguments ...
        args = median_xyz_filename + ' -I' + str(surface_I) + ' -R' + R
        # ... and with any optional argumetns passed in via the control file sub section_d
        if 'Ll' in section_d:
            cmd += ' -Ll' + str(control_d[s]['Ll'])
        if 'Lu' in section_d:
            cmd += ' -Lu' + str(control_d[s]['Lu'])
        if 'T' in section_d:
            cmd += ' -T' + str(control_d[s]['T'])
        grid_filename = datafile + '.' + field_name + '.' + str(
            depth_km) + 'km.' + str(time) + '.grd'
        Core_GMT.callgmt('surface', args, '', '', ' -G' + grid_filename)

        #
        # Creating the Map
        #

        # Get the GPlates exported line data for this age
        # be sure to truncate age to nearest int and make a string for the file name
        age = str(int(age))

        # Get the base path for gplates line data, as set in the geo framework defaults file:
        xy_path = master_d['geoframe_d']['gplates_line_dir']

        # get the full path to the line data:
        xy_filename = xy_path + '/' + 'topology_platepolygons_' + age + '.00Ma.xy'
        print(now(), 'main: xy_filename = ', xy_filename)

        # make a plot of the grid 'TEST.ps'
        Core_GMT.plot_grid(grid_filename, xy_filename, R)

    # end of loop over sub section dictionary

    # exit the script
    sys.exit()
Exemplo n.º 11
0
def main():
    print(now(), 'grid_maker.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1], False, False)
    Core_Util.tree_print(control_d)

    # set the pid file
    pid_file = control_d['pid_file']

    # get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    coor_d = master_d['coor_d']
    pid_d = master_d['pid_d']

    # Double check for essential data
    if master_d['time_d'] == None:
        print(now())
        print(
            'ERROR: Required file "[CASE_NAME].time:" is missing from this model run.'
        )
        print('       Aborting processing.')
        sys.exit(-1)

    # set up working variables
    # get basic info about the model run
    datadir = pid_d['datadir']
    datafile = pid_d['datafile']
    start_age = pid_d['start_age']
    output_format = pid_d['output_format']

    depth_list = coor_d['depth_km']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    # Check how to read and parse the time spec:
    read_time_d = True

    # Compute the timesteps to process
    if read_time_d:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'], master_d['time_d'])
    else:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'])
    print(now(), 'grid_maker.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # levels to process
    level_spec_d = Core_Util.get_spec_dictionary(control_d['level_spec'])
    print(now(), 'grid_maker.py: level_spec_d = ')
    Core_Util.tree_print(level_spec_d)

    # Get coordinate data
    lon = []
    lat = []

    # Check for existing coordinate data
    lon_file_cache = '_cache_lon_coords.txt'
    lat_file_cache = '_cache_lat_coords.txt'

    if os.path.exists(lon_file_cache) and os.path.exists(lat_file_cache):
        print(now(), 'grid_maker.py: loadtxt: ', lon_file_cache)
        print(now(), 'grid_maker.py: loadtxt: ', lat_file_cache)
        lon = np.loadtxt(lon_file_cache)
        lat = np.loadtxt(lat_file_cache)
    else:
        # gets lon, lat for one depth because these are depth-independent
        coord_file_format = control_d.get(
            'coord_dir', '') + '/%(datafile)s.coord.#' % vars()
        coord = Core_Citcom.read_citcom_surface_coor(master_d['pid_d'],
                                                     coord_file_format)

        # flatten data since we don't care about specific cap numbers for the loop over depth
        coord = Core_Util.flatten_nested_structure(coord)

        # extract data from tuples and make into numpy array
        lon = [line[0] for line in coord]
        lat = [line[1] for line in coord]

        # save the lat data
        np.savetxt(lon_file_cache, lon, fmt='%f')
        np.savetxt(lat_file_cache, lat, fmt='%f')

    # end of get coords
    print(now(), 'grid_maker.py: len(lon) = ', len(lon))
    print(now(), 'grid_maker.py: len(lat) = ', len(lat))

    #
    # Main looping, first over times, then sections, then levels
    #

    # Variables that will be updated each loop:
    # age_Ma will be a zero padded string value used for filenames and reporting
    # depth will be a zero padded string value used for filenames and reporting

    # Variables to hold data for all grids created
    # grid_list is a list of tuples: (grid_filename, age_Ma)
    grid_list = []

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'grid_maker.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for tt, time in enumerate(time_spec_d['time_list']):

        print(now(), 'grid_maker.py: Processing time = ', time)

        if 'Ma' in time:

            # strip off units and make a number
            time = float(time.replace('Ma', ''))

            # determine what time steps are available for this age
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_age(
                master_d, 'temp', time)
            print(
                now(),
                'grid_maker.py: WARNING: Adjusting times to match available data:'
            )
            print(now(), '  request_age =', found_d['request_age'],
                  '; request_timestep =', found_d['request_timestep'],
                  '; request_runtime =', found_d['request_runtime'])
            print(now(), '  found_age =', found_d['found_age'],
                  '; found_timestep =', found_d['found_timestep'],
                  '; found_runtime =', found_d['found_runtime'])

            # set variables for subsequent loops
            timestep = found_d['found_timestep']
            runtime_Myr = found_d['found_runtime']

            # convert the found age to an int
            age_Ma = int(np.around(found_d['found_age']))

            # make a string and pad with zeros
            age_Ma = '%03d' % age_Ma

        else:

            time = float(time)

            # determine what time steps are available for this timestep
            # NOTE: 'temp' is requried to set which output files to check

            found_d = Core_Citcom.find_available_timestep_from_timestep(
                master_d, 'temp', time)

            print(
                now(),
                'grid_maker.py: WARNING: Adjusting times to match available data:'
            )
            print(now(), '  request_age =', found_d['request_age'],
                  '; request_timestep =', found_d['request_timestep'],
                  '; request_runtime =', found_d['request_runtime'])
            print(now(), '  found_age =', found_d['found_age'],
                  '; found_timestep =', found_d['found_timestep'],
                  '; found_runtime =', found_d['found_runtime'])

            # set variables for subsequent loops
            timestep = found_d['found_timestep']
            runtime_Myr = found_d['found_runtime']

            # convert the found age to an int
            age_Ma = int(np.around(found_d['found_age']))

            # make a string and pad with zeros
            age_Ma = '%03d' % age_Ma

        # report on integer age
        print(now(), '  age_Ma =', age_Ma)

        # empty file_data
        file_data = []

        # cache for the file_format
        file_format_cache = ''

        # Loop over sections (fields)
        for ss, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            print(now(), 'grid_maker.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            # check for compound field
            field_name_req = ''
            if field_name == 'horiz_vmag':
                # save the requested name
                field_name_req = field_name
                # reset to get one component
                field_name = 'vx'

            print('')
            print(now(), 'grid_maker.py: Processing: field =', field_name)

            # set the region
            if nproc_surf == 12:
                grid_R = 'g'
                # optionally adjust the lon bounds of the grid to -180/180
                if 'shift_lon' in control_d:
                    print(
                        now(),
                        'grid_maker.py: grid_R set to to "d" : -180/+180/-90/90'
                    )
                    grid_R = 'd'
                else:
                    print(
                        now(),
                        'grid_maker.py: grid_R set to to "g" : 0/360/-90/90')
            else:
                grid_R = str(pid_d['lon_min']) + '/' + str(
                    pid_d['lon_max']) + '/'
                grid_R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']
            print(now(), 'grid_maker.py: file_name_component = ',
                  file_name_component)

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']
            print(now(), 'grid_maker.py: field_column = ', field_column)

            # create the total citcoms data filenames to read
            file_format = ''

            # check for various data dirs
            if os.path.exists(datadir + '/0/'):
                print(now(), 'grid_maker.py: path found = ', datadir + '/0/')
                file_format = datadir + '/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists(datadir + '/'):
                print(now(), 'grid_maker.py: path found = ', datadir + '/')
                file_format = datadir + '/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('data'):
                print(now(), 'grid_maker.py: path found = ', 'data')
                file_format = './data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('Data'):
                print(now(), 'grid_maker.py: path found = ', 'Data')
                file_format = './Data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            # report error
            else:
                print(now())
                print('ERROR: Cannot find output data.')
                print('       Skipping this section.')
                print(now(), 'grid_maker.py: file_format = ', file_format)
                continue  # to next section

            print(now(), 'grid_maker.py: file_format = ', file_format)

            # check if this file data has already been read in
            if not file_format == file_format_cache:

                # read data by proc, e.g., velo, visc, comp_nd, surf, botm
                file_data = Core_Citcom.read_proc_files_to_cap_list(
                    master_d['pid_d'], file_format, field_name)
                # flatten data since we don't care about specific cap numbers for the loop over levels/depths
                file_data = Core_Util.flatten_nested_structure(file_data)
                print(now(), 'grid_maker.py: len(file_data) = ',
                      len(file_data))

                # update cache for next pass in loop over fields
                file_format_cache = file_format

            # Get the specific column for this field_name
            field_data = np.array([line[field_column] for line in file_data])
            print(now(), 'grid_maker.py:  len(field_data) = ', len(field_data))

            # Check for compound field
            if field_name_req == 'horiz_vmag':

                # Get the second component data ('vy')
                field_column = 1
                # read data by proc, e.g., velo, visc, comp_nd, surf, botm
                file_data2 = Core_Citcom.read_proc_files_to_cap_list(
                    master_d['pid_d'], file_format, field_name)
                # flatten data since we don't care about specific cap numbers for the loop over levels/depths
                file_data2 = Core_Util.flatten_nested_structure(file_data2)
                print(now(), 'grid_maker.py: len(file_data2) = ',
                      len(file_data2))
                field_data2 = np.array(
                    [line[field_column] for line in file_data2])
                print(now(), 'grid_maker.py:  len(field_data2) = ',
                      len(field_data))

                # combine the data and rest the main variable
                field_data3 = np.hypot(field_data, field_data2)
                field_data = field_data3

                # put back field name to requested name
                field_name = field_name_req
            # end if check on compound field

            print(now(), 'grid_maker.py:  len(field_data) = ', len(field_data))
            print(now())

            #
            # Loop over levels
            #
            for ll, level in enumerate(level_spec_d['list']):

                print(now(), 'grid_maker.py: Processing level = ', level)

                # ensure level is an int value
                level = int(level)
                depth = int(depth_list[level])
                # pad the depth value
                depth = '%04d' % depth

                print(
                    now(),
                    '------------------------------------------------------------------------------'
                )
                print(now(), 'grid_maker.py: tt,ss,ll = ', tt, ',', ss, ',',
                      ll, ';')
                print(now(), 'grid_maker.py: summary for', s, ': timestep =',
                      timestep, '; age =', age_Ma, '; runtime_Myr =',
                      runtime_Myr, '; level =', level, '; depth =', depth,
                      ' km; field_name =', field_name)
                print(
                    now(),
                    '------------------------------------------------------------------------------'
                )

                if field_name.startswith('vertical_'):
                    # perform a z slice for citcom data
                    field_slice = field_data[
                        level::nodez]  # FIXME : how to get a v slice
                    xyz_filename = datafile + '-' + field_name + '-' + str(
                        age_Ma) + 'Ma-' + str(depth) + 'km.xyz'
                else:
                    # perform a z slice for citcom data
                    field_slice = field_data[level::nodez]
                    #xyz_filename = datafile + '-' + field_name + '-' + str(timestep) + '-' + str(depth) + '.xyz'
                    xyz_filename = datafile + '-' + field_name + '-' + str(
                        age_Ma) + 'Ma-' + str(depth) + 'km.xyz'

                print(now(), 'grid_maker.py: xyz_filename =', xyz_filename)

                if field_name == 'visc': field_slice = np.log10(field_slice)

                print(now(), 'grid_maker.py: type(field_slice) = ',
                      type(field_slice))
                print(now(), 'grid_maker.py:  len(field_slice) = ',
                      len(field_slice))
                print(now())

                # create the xyz data
                xyz_data = np.column_stack((lon, lat, field_slice))
                np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')

                #print( now(), 'grid_maker.py: type(xyz_data) = ', type(xyz_data) )
                #print( now(), 'grid_maker.py:  len(xyz_data) = ', len(xyz_data) )
                #print( now() )

                # recast the slice
                #fs = np.array( field_slice )
                #fs.shape = ( len(lat), len(lon) )
                #print( now(), 'grid_maker.py: type(fs) = ', type(field_slice) )
                #print( now(), 'grid_maker.py:  len(fs) = ', len(field_slice) )
                #print( now() )

                # check for a grid_R
                if 'R' in control_d[s]:
                    grid_R = control_d[s]['R']

                # create the median file
                median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'

                blockmedian_I = control_d[s].get('blockmedian_I', '0.5')
                cmd = xyz_filename + ' -I' + str(
                    blockmedian_I) + ' -R' + grid_R

                Core_GMT.callgmt('blockmedian', cmd, '', '>',
                                 median_xyz_filename)

                # get a T value for median file
                if not 'Ll' in control_d[s] or not 'Lu' in control_d[s]:
                    T = Core_GMT.get_T_from_minmax(median_xyz_filename)
                else:
                    dt = (control_d[s]['Lu'] - control_d[s]['Ll']) / 10
                    T = '-T' + str(control_d[s]['Ll']) + '/'
                    T += str(control_d[s]['Lu']) + '/' + str(dt)

                print(now(), 'grid_maker.py: T =', T)

                # create the grid
                grid_filename = xyz_filename.rstrip('xyz') + 'grd'

                surface_I = control_d[s].get('surface_I', '0.25')
                cmd = median_xyz_filename + ' -I' + str(
                    surface_I) + ' -R' + grid_R

                if 'Ll' in control_d[s]:
                    cmd += ' -Ll' + str(control_d[s]['Ll'])
                if 'Lu' in control_d[s]:
                    cmd += ' -Lu' + str(control_d[s]['Lu'])
                if 'T' in control_d[s]:
                    cmd += ' -T' + str(control_d[s]['T'])

                #opt_a =
                Core_GMT.callgmt('surface', cmd, '', '', ' -G' + grid_filename)

                # label the variables

                # −Dxname/yname/zname/scale/offset/title/remark
                cmd = grid_filename + ' -D/=/=/' + str(
                    field_name) + '/=/=/' + str(field_name) + '/' + str(
                        field_name)
                Core_GMT.callgmt('grdedit', cmd, '', '', '')

                # Dimensionalize grid

                if control_d[s].get('dimensional'):
                    print(now(), 'grid_maker.py: dimensional = ',
                          control_d[s]['dimensional'])
                    dim_grid_name = grid_filename.replace(
                        '.grd', '.dimensional.grd')
                    Core_Citcom.dimensionalize_grid(pid_file, field_name,
                                                    grid_filename,
                                                    dim_grid_name)

                    # FIXME: for dynamic topo remove  mean
                    # grdinfo to get mean ; see To_Refactor for example

                # save this grid and its age in a list
                if control_d[s].get('dimensional'):
                    grid_list.append((dim_grid_name, age_Ma))
                else:
                    grid_list.append((grid_filename, age_Ma))

                # Optional step to transform grid to plate frame
                if 'make_plate_frame_grid' in control_d:
                    cmd = 'frame_change_pygplates.py %(age_Ma)s %(grid_filename)s %(grid_R)s' % vars(
                    )
                    print(now(), 'grid_maker.py: cmd =', cmd)
                    os.system(cmd)

                # Assoicate this grid with GPlates exported line data in .xy format:
                # compute age value
                age_float = 0.0

                # time_list values for citcom data uses timesteps; get age
                time_triple = Core_Citcom.get_time_triple_from_timestep(
                    master_d['time_d']['triples'], timestep)
                age_float = time_triple[1]

                # truncate to nearest int and make a string for the gplates .xy file name
                if age_float < 0: age_float = 0.0
                xy_path = master_d['geoframe_d']['gplates_line_dir']
                xy_filename = xy_path + '/' + 'topology_platepolygons_' + str(
                    int(age_float)) + '.00Ma.xy'
                print(now(), 'grid_maker.py: xy_filename = ', xy_filename)

                # Make a plot of the grids
                J = 'X5/3'  #'R0/6'
                #J = 'M5/3'
                if 'J' in control_d[s]:
                    J = control_d[s]['J']

                C = 'polar'
                if 'C' in control_d[s]:
                    C = control_d[s]['C']

                # citcoms
                # plot non-dimensional grid
                Core_GMT.plot_grid(grid_filename, xy_filename, grid_R, T, J, C)

                # also plot dimensional grid
                if control_d[s].get('dimensional'):
                    print(now(), 'grid_maker.py: plotting dimensional = ',
                          control_d[s]['dimensional'])
                    dim_grid_name = grid_filename.replace(
                        '.grd', '.dimensional.grd')
                    T = Core_GMT.get_T_from_grdinfo(dim_grid_name)
                    Core_GMT.plot_grid(dim_grid_name, xy_filename, grid_R, T,
                                       J)

                # plot plate frame grid
                if 'make_plate_frame_grid' in control_d:
                    plateframe_grid_name = grid_filename.replace(
                        '.grd', '-plateframe.grd')
                    xy_filename = ''
                    xy_path = master_d['geoframe_d']['gplates_line_dir']
                    # present day plate outlines : use '0'
                    xy_filename = xy_path + '/' + 'topology_platepolygons_0.00Ma.xy'
                    print(now(), 'grid_maker.py: xy_filename = ', xy_filename)

                    T = Core_GMT.get_T_from_grdinfo(plateframe_grid_name)
                    print(now(), 'grid_maker.py: T =', T)
                    Core_GMT.plot_grid(plateframe_grid_name, xy_filename,
                                       grid_R, T, J)
Exemplo n.º 12
0
def main():
    '''This is the main function for restart_citcoms.py'''
    print(now(), 'restart_citcoms.py: START')

    # get the control .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # parse master run input pid file
    master_run_cfg = control_d['master_run_cfg']
    master_run_cfg_d = Core_Util.parse_configuration_file(master_run_cfg)

    # parse master run output pid file
    master_run_pid = control_d['master_run_pid']

    # get the master dictionary and define aliases
    master_run_d = Core_Citcom.get_all_pid_data(master_run_pid)
    master_run_d['control_d'] = control_d
    master_run_pid_d = master_run_d['pid_d']

    # report details of input data
    if verbose:
        print(now(), 'restart_citcoms: control_d = ')
        Core_Util.tree_print(control_d)
        print(now(), 'restart_citcoms: master_run_cfg_d = ')
        Core_Util.tree_print(master_run_cfg_d)
        print(now(), 'restart_citcoms: master_run_pid_d = ')
        Core_Util.tree_print(master_run_pid_d)

    # SAVE, might need later ... ?
    # copy of the geo frame defaults
    #geoframe_d = master_run_d['geoframe_d']

    # process the control entry to get a list of ages
    time_spec_d = Core_Citcom.get_time_spec_dictionary(
        control_d['restart_ages'], master_run_d['time_d'])

    print(now(), 'restart_citcoms: time_spec_d =')
    Core_Util.tree_print(time_spec_d)

    # Get the restart type and local copy of the restart parameter replacement dictionary
    rs_replace_d = {}
    rs_type = control_d['restart_type']
    if rs_type == 'dynamic_topography':
        rs_replace_d = Core_Citcom.dynamic_topography_restart_params
    elif rs_type == 'total_topography':
        rs_replace_d = Core_Citcom.total_topography_restart_params
    else:
        print(now(), 'restart_citcoms: ERROR: unknown value for restart_type.')
        print(now(),
              'Valid values are "dynamic_topography", or "total_topography"')
        sys.exit(-1)

    # Now update rs_replace_d values directly from those set in control_d
    for p in sorted(control_d):
        if p.startswith('CitcomS.'):
            rs_replace_d[p] = control_d[p]

    # Show the final rs_replace_d that will pass to the input creation function
    if verbose:
        print(now(), 'restart_citcoms: rs_replace_d = ')
        Core_Util.tree_print(rs_replace_d)

    # Set placeholders for the directory and file structre  and names
    rs_dir_prefix = 'restart_' + rs_type
    rs_inp_cfg_suffix = ''

    rs_structure = control_d['restart_structure']
    if rs_structure == 'all-in-one':
        # create the all-in-one restart directory from section name
        Core_Util.make_dir(rs_dir_prefix)

    # Now it's time to Loop over restart ages and create restart files for that age
    for a in time_spec_d['age_Ma']:

        # determine what time steps are available for this age
        # NOTE: 'temp' is requried to set which output files to check
        found_d = Core_Citcom.find_available_timestep_from_age(
            master_run_d, 'temp', a)

        timestep = found_d['found_timestep']

        # convert the found age to an int
        age = int(np.around(found_d['found_age']))

        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )
        print(now(), 'Creating files for restart run at age:', age, '(',
              str(a), 'Ma; timestep = ', timestep, ')')
        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )

        # Set the name of the restart directory
        rs_dir = ''
        if rs_structure == 'separate':
            # create restart directory from section name
            rs_dir = rs_dir_prefix + '_' + str(age) + 'Ma'
            Core_Util.make_dir(rs_dir)
            Core_Util.make_dir(rs_dir + f'/Age{age}Ma')
        else:
            # this is an all-in-on case
            rs_dir = rs_dir_prefix

        # update the new restart input cfg file name suffix
        rs_inp_cfg_suffix = rs_type + '_' + str(age) + 'Ma'

        # create a new set of initial conditions for the restart run,
        # and set file name patterns in control_d
        if rs_type == 'dynamic_topography':
            create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                                rs_inp_cfg_suffix, age, timestep)

        # else, no need to adjust files for 'total_topography' runs

        # create new run input .cfg for this restart run
        restart_run_cfg = {}
        restart_run_cfg = create_restart_run_cfg(control_d, master_run_cfg_d,
                                                 rs_replace_d, rs_dir,
                                                 rs_inp_cfg_suffix, age,
                                                 timestep)

    # End of loop over restart runs

    # Close up shop
    sys.exit(0)
Exemplo n.º 13
0
def create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                        rs_inp_cfg_suffix, age, timestep):
    '''read master run velo files and modify the temperature using z>some_node '''
    # (6) Read in velo file from master run for closest age (use read_proc_files_to_cap_list() )
    # (7) Modify the temperature using z>some_node to set temperatures to background for models
    #  without the lithosphere
    # (8) write out `new' IC files using write_cap_or_proc_list_to_files()

    lithosphere_depth_DT = control_d['lithosphere_depth_DT']
    lithosphere_temperature_DT = control_d['lithosphere_temperature_DT']

    # Get nodez from depth
    znode = Core_Citcom.get_znode_from_depth(master_run_d['coor_d'],
                                             lithosphere_depth_DT)
    print(now(), 'create_no_lith_temp: lithosphere_depth_DT = ',
          lithosphere_depth_DT, '; znode=', znode)

    # choose the field to process
    field_name = 'temp'

    # get params for the run
    pid_d = master_run_d['pid_d']
    datafile = pid_d['datafile']

    # get the data file name specifics for this field
    file_name_component = Core_Citcom.field_to_file_map[field_name]['file']
    print(now(), 'create_no_lith_temp: file_name_component = ',
          file_name_component)

    # process data from Citcoms
    if os.path.exists(master_run_d['pid_d']['datadir'] + '/0/'):
        file_format = master_run_d['pid_d']['datadir'] + '/#/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'] + '/'):
        file_format = master_run_d['pid_d']['datadir'] + '/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'].replace('%RANK',
                                                                 '0')):
        file_format = master_run_d['pid_d']['datadir'].replace(
            '%RANK', '#') + '/' + master_run_d['pid_d'][
                'datafile'] + '.' + file_name_component + '.#.' + str(timestep)
    else:
        file_format = 'data/#/' + datafile + '.' + file_name_component + '.#.' + str(
            timestep)
    print(now(), 'create_no_lith_temp: create_no_lith_temp: file_format = ',
          file_format)

    # read data by proc, e.g., velo, visc, comp_nd, surf, botm
    data_by_cap = Core_Citcom.read_proc_files_to_cap_list(
        master_run_d['pid_d'], file_format, field_name)

    # find index of all nodes in a cap that have znode > requested_znode
    # first, make array of znode number for a cap
    nodex = pid_d['nodex']
    nodey = pid_d['nodey']
    nodez = pid_d['nodez']

    # znodes for one cap (same for every cap)
    znode_array = np.tile(range(nodez), nodex * nodey)

    # this gives  a mask of all the znodes that we need to correct the temperature for
    mask = np.where(znode_array > znode, True, False)

    # loop over all cap lists
    for nn, cap_list in enumerate(data_by_cap):
        print(now(), 'create_no_lith_temp: working on cap number', nn)
        # convert to numpy array
        cap_array = np.array(cap_list)
        # swap in new temperature values for lithosphere
        # temperature is fourth column
        np.place(cap_array[:, 3], mask, lithosphere_temperature_DT)
        # update master list of data with corrected list
        data_by_cap[nn] = cap_array.tolist()

    # check values have been updated
    #if verbose: print( now(), 'create_no_lith_temp: spot check: data_by_cap[0][0:nodez]', data_by_cap[0][0:nodez])

    # map the data from cap lists to processor lists
    out_data_by_proc = Core_Citcom.get_proc_list_from_cap_list(
        master_run_d['pid_d'], data_by_cap)

    # set up output info
    rs_datafile = datafile + '_restart_' + str(int(np.around(age))) + 'Ma'

    ic_dir = rs_dir + '/ic_dir'
    Core_Util.make_dir(ic_dir)
    out_name = ic_dir + '/' + rs_datafile + '.velo.#.' + str(timestep)
    print(now(), 'create_no_lith_temp: out_name =', out_name)

    # now write out data to processor files (with header, necessary for restart)
    Core_Citcom.write_cap_or_proc_list_to_files(master_run_d['pid_d'],
                                                out_name, (out_data_by_proc, ),
                                                'proc', True)

    # Update control_d with file name patterns
    control_d['rs_datafile'] = rs_datafile
    control_d['rs_datadir'] = './ic_dir/'

    return
Exemplo n.º 14
0
def make_map_postscript(master_d, tt):
    '''Make summary map.'''

    control_d = master_d['control_d']
    coor_d = master_d['coor_d']
    func_d = master_d['func_d']
    geoframe_d = master_d['geoframe_d']
    pid_d = master_d['pid_d']
    time_d = master_d['control_d']['time_d']
    datafile = pid_d['datafile']
    FULL_SPHERE = pid_d['FULL_SPHERE']
    ivel_prefix = control_d.get('ivel_prefix', None)
    nodez = pid_d['nodez']
    REGIONAL = pid_d['REGIONAL']
    rm_list = func_d['rm_list']

    # positions for all figures
    X_pos = [0.5, 4.5] * 4
    Y_pos = [8.375, 8.375, 5.75, 5.75, 3.125, 3.125, 0.5, 0.5]
    # XXX DJB
    #znode_list = [53,52,51,50,49,48,47,46]
    znode_list = [64, 62, 60, 59, 58, 57, 56, 54]

    runtime_Myr = time_d['runtime_Myr'][tt]
    age_Ma = time_d['age_Ma'][tt]
    age_int = int(round(age_Ma, 0))  # to get ivel file
    time = time_d['time_list'][tt]

    if ivel_prefix:
        ivel_filename = ivel_prefix + str(age_int)
        if FULL_SPHERE: ivel_filename += '.#'
        ivel_data = Core_Citcom.read_cap_files_to_cap_list(
            pid_d, ivel_filename)

        control_d['coord_file'] = control_d.get(
            'coord_dir', '') + '/' + pid_d['datafile'] + '.coord.#'
        control_d['OUTPUT_IVEL'] = False
        write_coordinates_by_cap(master_d)

    # postscript name
    ps = control_d['prefix'] + '_Map_%(time)s.ps' % vars()

    arg = 'PAGE_ORIENTATION portrait'
    callgmt('gmtset', arg)

    opts_d = Core_GMT.start_postscript(ps)

    # loop over figures
    for nn, znode in enumerate(znode_list):

        if FULL_SPHERE:
            # XXX DJB - zoom in on Laramide flat slab
            #opts_d['B'] = 'a10/a10'
            #opts_d['J'] = 'M277/2'
            #opts_d['R'] = '250/294/25/55'

            # XXX DJB - zoom in on Izu-Bonin-Marianas
            opts_d['B'] = 'a10/a10'
            opts_d['J'] = 'M140/2'
            opts_d['R'] = '120/160/-10/30'

            # for entire globe
            #opts_d['B'] = 'a30'
            #opts_d['J'] = 'H4'
            #opts_d['R'] = 'g'
        elif REGIONAL:
            lon_min = pid_d['lon_min']
            lon_max = pid_d['lon_max']
            lat_min = pid_d['lat_min']
            lat_max = pid_d['lat_max']
            opts_d['B'] = 'a10/a5::WeSn'
            opts_d['J'] = 'X3.5/2.125'
            opts_d[
                'R'] = '%(lon_min)s/%(lon_max)s/%(lat_min)s/%(lat_max)s' % vars(
                )

        X = apos(X_pos[nn])
        Y = apos(Y_pos[nn])
        opts_d['X'] = X
        opts_d['Y'] = Y
        depth = int(coor_d['depth_km'][znode])

        # grdimage temperature
        # XXX DJB
        #grid = 'grid/' + datafile + '.temp.%(depth)s.%(time)s.grd' % vars()
        #opts_d['C'] = 'temp.cpt'
        #callgmt( 'grdimage', grid, opts_d, '>>', ps )
        #del opts_d['B']
        #del opts_d['C']

        # grdimage age
        # XXX DJB
        grid = '/net/beno/raid2/nflament/Agegrids/20130828_rev210/Mask/agegrid_final_mask_%(age_int)s.grd' % vars(
        )
        opts_d['C'] = 'age.cpt'
        callgmt('grdimage', grid, opts_d, '>>', ps)
        del opts_d['B']
        del opts_d['C']

        # overlay GPlates line data for global models only
        if FULL_SPHERE:
            #W = '3,grey'
            #Core_GMT.plot_gplates_coastline( geoframe_d, opts_d, ps, age_int, W )
            #W = '3,yellow'
            #Core_GMT.plot_gplates_ridge_and_transform( geoframe_d, opts_d, ps, age_int, W )
            W = '3,black'
            Core_GMT.plot_gplates_slab_polygon(geoframe_d, opts_d, ps, age_int,
                                               W)
            W = '3,black'
            G = 'black'
            Core_GMT.plot_gplates_sawtooth_subduction(geoframe_d, opts_d, ps,
                                                      age_int, W, G)
            W = '3,black'
            G = 'white'
            Core_GMT.plot_gplates_sawtooth_leading_edge(
                geoframe_d, opts_d, ps, age_int, W, G)

        # overlay psbasemap again to estimate location of cross-sections
        #opts_d['B'] = 'a10g10/a10g10'
        #callgmt( 'psbasemap', '', opts_d, '>>', ps )
        #del opts_d['B']

        if REGIONAL:
            for cc in range(pid_d['nproc_surf']):
                # coarse mesh that ivels are constructed using
                xyz_file = func_d['coarse_coor_cap_names'][cc]
                rm_list.append(xyz_file)
                opts_d['S'] = 'c0.03'
                callgmt('psxy', xyz_file, opts_d, '>>', ps)
                del opts_d['S']

        if ivel_prefix:
            # extract ivels for this depth
            for cc in range(pid_d['nproc_surf']):
                ivel_slice = ivel_data[cc][znode::nodez]
                ivel_stencil = np.array([entry[3] for entry in ivel_slice])
                # where ivels are not applied (stencil=2, ignored)
                xyz_filename = 'ivel.slice.2.xyz'
                rm_list.append(xyz_filename)
                index = np.where(ivel_stencil == 2)[0]
                coord_for_index = np.array(func_d['coor_by_cap'][cc])[index]
                np.savetxt(xyz_filename, coord_for_index)
                opts_d['S'] = 'c0.03'
                opts_d['G'] = 'white'
                if REGIONAL:
                    callgmt('psxy', xyz_filename, opts_d, '>>', ps)
                del opts_d['S']
                del opts_d['G']
                # where ivels are applied (stencil=1)
                xyz_filename = 'ivel.slice.1.xyz'
                rm_list.append(xyz_filename)
                index = np.where(ivel_stencil == 1)[0]
                coord_for_index = np.array(func_d['coor_by_cap'][cc])[index]
                np.savetxt(xyz_filename, coord_for_index)
                opts_d['S'] = 'c0.03'
                opts_d['G'] = 'purple'
                callgmt('psxy', xyz_filename, opts_d, '>>', ps)
                del opts_d['S']
                del opts_d['G']

        # plot profiles
        for section in control_d['_SECTIONS_']:

            # section dictionary
            section_d = control_d[section]
            arg = 'annular_project_' + section + '.xy -W5,white'
            callgmt('psxy', arg, opts_d, '>>', ps)
            arg = 'annular_project_' + section + '.xy -W3,black,-'
            callgmt('psxy', arg, opts_d, '>>', ps)

            # label start (X) and end (X') points
            lon0 = section_d['lon0']
            lat0 = section_d['lat0']
            lon1 = section_d['lon1']
            lat1 = section_d['lat1']
            letter = section.split('_')[1]
            opts_d['N'] = ' '
            opts_d['W'] = 'white'
            text = '%(lon0)s %(lat0)s 8 0 4 MC %(letter)s\n' % vars()
            text += '''%(lon1)s %(lat1)s 8 0 4 MC %(letter)s'\n''' % vars()
            text += 'EOF'
            callgmt('pstext', '', opts_d, '<< EOF >>', ps + '\n' + text)
            del opts_d['N']
            del opts_d['W']

        # plot depth label
        strdepth = str(depth) + ' km'
        cmd = '-R0/8.5/0/11 -Jx1.0'
        cmd += ' -X%(X)s -Y%(Y)s -K -O' % vars()
        X_text = 0.03
        Y_text = 2.25
        text = '%(X_text)s %(Y_text)s 12 0 4 ML %(strdepth)s\nEOF' % vars()
        callgmt('pstext', cmd, '', '<< EOF >>', ps + '\n' + text)

    # psscale
    psscaleopts_d = {}
    psscaleopts_d['B'] = 'a30'
    psscaleopts_d['C'] = 'age.cpt'
    psscaleopts_d['D'] = '5.0/0.5/1.5/0.125h'
    psscaleopts_d['K'] = ' '
    psscaleopts_d['O'] = ' '
    callgmt('psscale', '', psscaleopts_d, '>>', ps)

    Core_GMT.end_postscript(ps)

    return ps
Exemplo n.º 15
0
#                        ALL RIGHTS RESERVED
#=====================================================================
'''This is a test script to show a simple example of using the geodynamic framework modules'''
#=====================================================================
#=====================================================================
import os, sys, datetime, pprint, subprocess

import Core_Util
import Core_Citcom

if not len(sys.argv) == 2:
    print('Run this script like this: ./test_script.py sample.cfg')
    sys.exit(-1)

# create an empty dictionary to hold all the main data
master_d = {}

# get the main data 
master_d = Core_Citcom.get_master_data( sys.argv[1] )

# show the dict
print('\n', Core_Util.now(), 'master_d = ')
Core_Util.tree_print(master_d)

# do something with the data ... 
print()
print('the pid file has nx =', master_d['pid']['nx'], 'nodes')
print('the coor file has', len( master_d['coor']['depth'] ), 'levels')

#=====================================================================