def get_T_from_grdinfo(grid_filename): '''get a -T value from grdinfo on a grid file''' if which('GMT'): cmd = 'grdinfo -C %(grid_filename)s' % vars() else: cmd = 'gmt grdinfo -C %(grid_filename)s' % vars() s = subprocess.check_output( cmd, shell=True, universal_newlines=True) if verbose: print( Core_Util.now(), cmd ) l = s.split() min = float(l[5]) max = float(l[6]) # FIXME: stop gap measure if min == max : min = 0.0 max = 1.0 dt = 0.01 T = '-T%(min)s/%(max)s/%(dt)s' % vars() if verbose: print( Core_Util.now(), 'get_T_from_grdinfo: WARNING: min==max, setting T =', T ) return T if max >= 10000000 : dt = 1000000.0 elif max >= 100000 : dt = 1000.0 elif max >= 1000 : dt = 100.0 elif max >= 1 : dt = .1 elif max >= 0.1 : dt = .01 elif max >= 0.01 : dt = .001 else : dt = 1.0 T = '-T%(min)s/%(max)s/%(dt)s' % vars() if verbose: print( Core_Util.now(), 'get_T_from_grdinfo: T =', T ) return T
def plot_gplates_transform(opts_d, ps, age): '''Plot GPlates transforms.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer gplates_line_dir = geoframe_d['gplates_line_dir'] linefile = gplates_line_dir + \ '/topology_ridge_transform_boundaries_%(age)s.00Ma.xy' % vars() if not os.path.exists(linefile): return # process to write out only ">Transform" data to temporary file # for plotting infile = open(linefile, 'r') lines = infile.readlines() infile.close() outname = 'ridges.xy' outfile = open(outname, 'w') flag = 0 for line in lines: if line.startswith('>'): flag = 0 # reset if line.startswith('>Transform'): flag = 1 # write out all subsequent lines if flag: outfile.write(line) outfile.close() callgmt('psxy', outname, opts_d, '>>', ps) Core_Util.remove_files([outname])
def get_T_from_minmax(xyz_filename) : ''' get a -T value from minmax on a xyz file''' if which('GMT'): cmd = 'minmax -C %(xyz_filename)s' % vars() else: cmd = 'gmt minmax -C %(xyz_filename)s' % vars() s = subprocess.check_output( cmd, shell=True, universal_newlines=True) if verbose: print( Core_Util.now(), cmd ) l = s.split() min = float(l[4]) max = float(l[5]) # FIXME: stop gap measure if min == max : print( Core_Util.now(), 'get_T_from_minmax: WARNING: min == max: min =', min, '; max =', max ) min = 0.0 max = 1.0 dt = 0.01 T = '-T%(min)s/%(max)s/%(dt)s' % vars() if verbose: print( Core_Util.now(), 'get_T_from_minmax: T =', T ) return T if max >= 10000000 : dt = 1000000.0 elif max >= 100000 : dt = 1000.0 elif max >= 1000 : dt = 100.0 elif max >= 1 : dt = .1 elif max >= 0.1 : dt = .01 elif max >= 0.01 : dt = .001 else : dt = 1.0 T = '-T%(min)s/%(max)s/%(dt)s' % vars() if verbose: print( Core_Util.now(), 'get_T_from_minmax: T =', T ) return T
def brute_force( master_d, coor_by_cap, nlon, nlat ): pid_d = master_d['pid_d'] nodex = pid_d['nodex'] nodey = pid_d['nodey'] nproc_surf = pid_d['nproc_surf'] nearest_point = [] for cc in range( nproc_surf ): ccoor = coor_by_cap[cc] nearest_point.append( cc ) nearest_point[cc] = [] for cline in ccoor: ( zlon, zlat ) = cline # compute distance between points zdist = Core_Util.get_distance( nlon, nlat, zlon, zlat ) nearest_point[cc].append( zdist ) # get index of nearest node to this coordinate fnearest_point = Core_Util.flatten_nested_structure( nearest_point ) fnearest_coor = Core_Util.flatten_nested_structure( coor_by_cap ) min_index, min_value = min(enumerate(fnearest_point), key=operator.itemgetter(1)) (zzlon, zzlat) = fnearest_coor[min_index] print( now(), nlon, nlat, min_index, min_value, zzlon, zzlat ) # find location of node in cap list cap_index = int( min_index / (nodex*nodey) ) entry_index = min_index % (nodex*nodey) print( now(), cap_index, entry_index )
def test_ws(): '''test the web services with a POST request''' # Get the sample json input json_file = open('./geodynamic_framework_data/TEST.json', 'r') json_string = json_file.readline() json_file.close() # create the URL for a POST req url = 'http://gplates.gps.caltech.edu:8080/reconstruct_feature_collection/?' # set up the URL params values = { 'feature_collection': json_string, 'time': '0', 'output': 'geojson', 'test': 'True', } if verbose: Core_Util.tree_print(values) data = urllib.parse.urlencode(values) data = data.encode('utf-8') req = urllib.request.Request(url, data) rsp = urllib.request.urlopen(req) content = rsp.read() print('content =', content)
def test(argv): '''Core_Rhea.py module self test''' global verbose verbose = True print(now(), 'Core_Rhea.py: test(): sys.argv = ', sys.argv) # run the tests # read the defaults frame_d = Core_Util.parse_geodynamic_framework_defaults() # read the first argument as a .cfg file cfg_d = Core_Util.parse_configuration_file(sys.argv[1])
def lith_buoyancy_profiles(control_d, section, x_trench): '''Description.''' # parameters from dictionary lith_profile_distance = float(control_d['lith_profile_distance']) res = float(control_d['profile_resolution']) # km xpos = x_trench - lith_profile_distance depth_min = 0 # km depth_max = 300.0 # km cc = int((depth_max - depth_min) / res) # store all profiles names for plotting out_l = [] # *** profiles across lithosphere *** dist = np.tile(xpos, cc) depth = [depth_min + pp * res for pp in range(cc)] xy = 'lithp_' + section + '.c.xy' out_l.append(xy) np.savetxt(xy, np.column_stack((dist, depth))) # get values from grids along profile line val_list = [] for field in ['temp', 'tangent']: xy_out = 'lithp_' + section + '_' + field + '.c.xy' grid = field + '_' + section + '.c.grd' Core_Util.find_value_on_line(xy, grid, xy_out) # xy defined above dist, val = np.loadtxt(xy_out, usecols=(1, 2), unpack=True) val_list.append(val) np.savetxt(xy_out, np.column_stack((dist, val))) out_l.append(xy_out) # integrals across lithosphere temp = val_list[0] velo = val_list[1] lithtempint = abs(round(simps(temp - 1, x=None, dx=res), 2)) # velo through profile surface lithveloint = abs(round(simps(velo, x=None, dx=res), 2)) lithveloint /= depth_max # buoyancy flux through profile surface lithflux = -(temp - 1) * velo xy = 'lithp_' + section + '_flux.c.xy' out_l.append(xy) np.savetxt(xy, np.column_stack((dist, lithflux))) lithfluxint = abs(round(simps((temp - 1) * velo, x=None, dx=res), 2)) int_l = np.array([lithtempint, lithveloint, lithfluxint]) return out_l, int_l
def main(): """main sequence of script actions""" print(now(), 'assimilation_diagnostic.py:') print(now(), 'main:') # get the .cfg file as a dictionary control_d = Core_Util.parse_configuration_file(sys.argv[1]) PLOT_CROSS_SECTIONS = control_d['PLOT_CROSS_SECTIONS'] PLOT_MAPS = control_d['PLOT_MAPS'] # get the master dictionary master_d = Core_Citcom.get_all_pid_data(control_d['pid_file']) # get times to process and plot time_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec'], master_d['time_d']) master_d['control_d'] = control_d # N.B. master_d['time_d'] contains all the time info from citcoms model # N.B. master_d['control_d']['time_d'] contains specific times to process master_d['control_d']['time_d'] = time_d # func_d is a separate dictionary that is used to transport # temporary files and objects between functions master_d['func_d'] = {'rm_list': []} # find track locations make_profile_track_files(master_d) make_cpts(master_d) # make cross-sections if PLOT_CROSS_SECTIONS: make_cross_section_diagnostics(master_d) # make maps if PLOT_MAPS: ps_l = [] for tt in range(len(time_d['time_list'])): ps = make_map_postscript(master_d, tt) ps_l.append(ps) pdf_name = control_d['prefix'] + '_' pdf_name += 'Map.pdf' Core_Util.make_pdf_from_ps_list(ps_l, pdf_name) # clean up Core_Util.remove_files(master_d['func_d']['rm_list'])
def end_postscript(ps): '''End a postscript''' if verbose: print(Core_Util.now(), 'end_postscript:') opts = {'T': '', 'O': '', 'R': '0/1/0/1', 'J': 'x1.0'} callgmt('psxy', '', opts, '>>', ps)
def main(): """main sequence of script actions""" print(now(), 'pub_global.py:') print(now(), 'main:') # parse cmd line input for input plotting config file if len(sys.argv) != 2: usage() # this part is not intuitive to an uniformed user # can we avoid these initialize commands? # initialize the modules Core_Util.initialize() Core_Citcom.initialize() # Get the framework dictionary geoframe_dict = Core_Util.geoframe_dict # read settings from control file dict = Core_Citcom.parse_configuration_file(sys.argv[1]) # move loose parameters (not within Figure_X) from dict to a # temporary new dictionary (adict) then deepcopy to # dict['All_Figure'] # this cleans up dict by ensuring the keys are e.g. # 'All_Figure', 'Figure_A', 'Figure_B' etc. #adict = {} #for key in list(dict): # if not key.startswith('Figure'): # adict[key] = dict.pop(key) #dict['figure_keys'] = sorted(dict.keys()) # ??? set_global_defaults( adict ) #dict['All_Figure'] = copy.deepcopy( adict ) #del adict # delete temporary dictionary # ??? set_positioning( dict ) # set adict as pointer to dict['All_Figure'] #adict = dict['All_Figure'] print(dict) ps = 'test.ps' make_postscript(dict, ps)
def plot_gplates_coastline(opts_d, ps, age): '''Plot GPlates coastlines.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer gplates_line_dir = geoframe_d['gplates_coast_dir'] arg = gplates_line_dir + '/reconstructed_%(age)s.00Ma.xy' % vars() if os.path.exists(arg): callgmt('psxy', arg, opts_d, '>>', ps)
def make_profile_track_files(master_d): '''Make annular and rectangular track files.''' if verbose: print(now(), 'make_profile_track_files:') control_d = master_d['control_d'] func_d = master_d['func_d'] pid_d = master_d['pid_d'] rm_list = func_d['rm_list'] annular_project = [] rectangular_project = [] # loop over all sections for section in control_d['_SECTIONS_']: # dictionary for this section section_d = control_d[section] lon0 = section_d['lon0'] lat0 = section_d['lat0'] lon1 = section_d['lon1'] lat1 = section_d['lat1'] # annular proj_name = 'annular_project_' + section + '.xy' annular_project.append(proj_name) rm_list.append(proj_name) incr = 0.5 # sample every 0.5 degrees Core_Util.make_great_circle_with_two_points(lon0, lat0, lon1, lat1, incr, 'w', proj_name) # rectangular proj_name2 = 'rectangular_project_' + section + '.xy' rectangular_project.append(proj_name2) rm_list.append(proj_name2) lon, lat, dist = np.loadtxt(proj_name, unpack=True) xx = np.radians(dist) * pid_d['radius_km'] rr = np.tile(0, len(xx)) # track at 0 km depth np.savetxt(proj_name2, np.column_stack((xx, rr))) # store project files for processing and plotting routines func_d['annular_project'] = annular_project func_d['rectangular_project'] = rectangular_project
def get_T_from_minmax(xyz_filename): ''' get a -T value from minmax on a xyz file''' cmd = 'minmax -C %(xyz_filename)s' % vars() s = subprocess.check_output(cmd, shell=True, universal_newlines=True) if verbose: print(Core_Util.now(), cmd) l = s.split() min = float(l[4]) max = float(l[5]) if max >= 10000000: dt = 1000000. elif max >= 100000: dt = 1000. elif max >= 1000: dt = 100. elif max >= 1: dt = .01 else: dt = 1.0 T = '-T%(min)s/%(max)s/%(dt)s' % vars() if verbose: print(Core_Util.now(), 'T =', T) return T
def plot_gplates_ridge_and_transform(opts_d, ps, age): '''Plot GPlates ridges and transforms.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer gplates_line_dir = geoframe_d['gplates_line_dir'] arg = gplates_line_dir + \ '/topology_ridge_transform_boundaries_%(age)s.00Ma.xy' % vars() if os.path.exists(arg): callgmt('psxy', arg, opts_d, '>>', ps)
def plot_gplates_slab_polygon(opts_d, ps, age): '''Plot GPlates slab polygons.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer gplates_line_dir = geoframe_d['gplates_line_dir'] arg = gplates_line_dir + \ '/topology_slab_polygons_%(age)s.00Ma.xy' % vars() if os.path.exists(arg): callgmt('psxy', arg, opts_d, '>>', ps)
def plot_age_grid_continent(opts_d, ps, age): geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer age_grid_dir = geoframe_d['age_grid_cont_dir'] age_grid_prefix = geoframe_d['age_grid_cont_prefix'] arg = age_grid_dir + '/' + age_grid_prefix arg += '%(age)s.grd' % vars() if os.path.exists(arg): callgmt('grdimage', arg, opts_d, '>>', ps)
def plot_gplates_no_assimilation_stencil(opts_d, ps, age): '''Plot GPlates no assimilation stencils.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer no_ass_dir = geoframe_d['no_ass_dir'] arg = no_ass_dir + \ '/topology_network_polygons_%(age)0.2fMa.xy' % vars() if os.path.exists(arg): callgmt('psxy', arg, opts_d, '>>', ps) else: print('cannot find file:', arg)
def start_postscript(ps): '''Start a postscript''' if verbose: print(Core_Util.now(), 'start_postscript:') arg = 'PS_MEDIA letter PROJ_LENGTH_UNIT inch ' arg += 'MAP_ORIGIN_X 0 MAP_ORIGIN_Y 0' callgmt('gmtset', arg, '', '', '') opts = {'K': '', 'T': '', 'R': '0/1/0/1', 'J': 'x1.0'} callgmt('psxy', '', opts, '>', ps) opts = {'K': '', 'O': ''} return opts
def plot_grid(grid_filename, xy_filename=None, R_value='g', T_value='-T0/1/.1'): '''simple function to make a test plot''' global verbose verbose = True # postscript name ps = grid_filename.rstrip('.grd') + '.ps' # page orientation must be set before start_postscript() arg = 'PAGE_ORIENTATION landscape' callgmt('gmtset', arg, '', '', '') # start postscript # the returned dictionary has 'O' and 'K' set opts = start_postscript(ps) # psbasemap 2 opts['X'] = apos(3) opts['Y'] = apos(3) opts['R'] = R_value # either regional : '0/57/-14/14' ; or global: 'g' opts['B'] = 'a30' opts['J'] = 'X5/3' #'R0/6' callgmt('psbasemap', '', opts, '>>', ps) # create a cpt for this grid cpt_file = grid_filename.replace('.grd', '.cpt') cmd = '-Cpolar ' + T_value callgmt('makecpt', cmd, '', '>', cpt_file) # grdimage opts['C'] = cpt_file callgmt('grdimage', grid_filename, opts, '>>', ps) # psxy del opts['C'] opts['m'] = ' ' callgmt('psxy', xy_filename, opts, '>>', ps) # end postscript end_postscript(ps) # create a .png image file #cmd = 'convert -resize 300% -rotate 90 ' + ps + ' ' + ps.replace('.ps', '.png') cmd = 'convert -rotate 90 ' + ps + ' ' + ps.replace('.ps', '.png') if verbose: print(Core_Util.now(), cmd) # call subprocess.call(cmd, shell=True)
def start_postscript( ps ): '''Start a postscript''' if verbose: print( Core_Util.now(), 'start_postscript:' ) arg = 'PAPER_MEDIA letter MEASURE_UNIT inch ' arg += 'X_ORIGIN 0 Y_ORIGIN 0' callgmt( 'gmtset', arg, '', '', '' ) opts = {'K':'','T':'','R':'0/1/0/1','J':'x1.0'} callgmt( 'psxy', '', opts, '>', ps ) opts = {'K':'', 'O':''} return opts
def callgmt( gmtcmd, arg, opts='', redirect='', out='' ): '''Call a generic mapping tools (GMT) command. The arguments to this function are all single strings, and typically constructed from parameters in client code. Only gmtcmd is required, all other arguments are optional, depending on the GMT command to call. gmtcmd : the actual GMT command to call, almost always a single string value; arg : the required arguments for the command (string). opts : the optional arguments for the command (in a dictionary). redirect : the termnal redirect symbol, usually '>', sometimes a pipe '|' or input '<'. out : the output file name. ''' # build list of commands cmd_list = [gmtcmd] # (required) arguments if arg: cmd_list.append( arg ) # options if opts: cmd_list.extend('-'+str(k)+str(v) for k, v in opts.items()) # redirect if redirect: cmd_list.append(redirect) # out file if out: cmd_list.append(out) # create one string cmd = ' '.join(cmd_list) # always report on calls to GMT for log files print( Core_Util.now(), cmd ) # capture output (returned as bytes) p = subprocess.check_output( cmd, shell=True ) # convert bytes output to string s = bytes.decode(p) return s
def plot_gplates_leading_edge(opts_d, ps, age, linestyle='sawtooth'): '''Plot GPlates leading edge.''' geoframe_d = Core_Util.parse_geodynamic_framework_defaults() age = int(age) # ensure integer gplates_line_dir = geoframe_d['gplates_line_dir'] gplates_subduction_prefix = gplates_line_dir + \ '/topology_' for subtype in ['slab_edges_leading']: for polarity in ['sL', 'sR']: symbarg = polarity[-1].lower() suffix = '_%(polarity)s_%(age)0.2fMa.xy' % vars() arg = gplates_subduction_prefix + subtype + suffix if os.path.exists(arg): if linestyle == 'sawtooth': S = 'f0.2i/0.05i+%(symbarg)s+t' % vars() arg += ' -S%(S)s' % vars() callgmt('psxy', arg, opts_d, '>>', ps)
def test(argv): '''geodynamic framework module self test''' global verbose verbose = True print(now(), 'test: sys.argv = ', sys.argv) # run the tests # read the defaults frame_d = Core_Util.parse_geodynamic_framework_defaults() # read the first command line argument as a .cfg file #cfg_d = parse_configuration_file( sys.argv[1] ) # TODO : comment in and out functions as needed #get_IRIS_WebServices_Catalog() #get_CMT_Catalog(1) get_EHB_Catalog()
def main(): '''Main sequence of script actions.''' # input arguments pid_file = sys.argv[1] grid = sys.argv[2] WATER_LOAD = sys.argv[3] pid_d = Core_Util.parse_configuration_file( pid_file ) # XXX HARD-CODED pid_d['rho_mantle'] = 3300 # kg m^-3 pid_d['rho_water'] = 1025 # kg m^-3 # scaled (dimensional) air loaded topography grid is required # for water loading air_grid = air_loaded_topography( pid_d, grid ) if WATER_LOAD: water_loaded_topography( pid_d, air_grid )
def main(): '''This is the main workflow of the script''' # report the start time and the name of the script print(now(), 'example.py') # a way to show what version of python is being used: print(now(), 'sys.version_info =', str(sys.version_info)) print("os.path.dirname(__file__) =", os.path.dirname(__file__)) # Get the configuration control file as a dictionary control_d = Core_Util.parse_configuration_file(sys.argv[1]) # Get the string string = control_d['string'] # print the string print(now(), 'string =', string) sys.exit()
def main(): # parameters nlon = 30 nlat = 30 # preliminaries master_d = Core_Citcom.get_all_pid_data( 'pid23039.cfg' ) coor_by_cap = make_coordinate_files( master_d ) # algorithm 1: brute force t0 = time.time() for nn in range(10): brute_force( master_d, coor_by_cap, nlon, nlat ) t1 = time.time() total = t1-t0 print( now(),' brute_force=', total ) #t1 = timeit.timeit(stmt='brute_force_algorithm()', # setup='from __main__ import brute_force_algorithm') #print( t1 ) # algorithm 2: kd tree # specific preliminaries coor_by_cap = Core_Util.flatten_nested_structure( coor_by_cap ) coor_by_cap = np.array( coor_by_cap ) tree = spatial.KDTree( coor_by_cap ) #pts = np.array( [[0, 0],[1,2],[30,40],[56,56],[180,76],[240,-24], # [270,-60],[37,5],[345,3],[356,-87]] ) pts = np.array([30,30]) t0 = time.time() print( tree.query( pts )[1] ) t1 = time.time() total = t1-t0 print( now(), 'kd_tree=', total )
def main(): '''Main sequence of script actions.''' print(now(), 'plot_lith_age.py:') print(now(), 'main:') if len(sys.argv) != 3: usage() # parameters pid_file = sys.argv[1] time_spec = sys.argv[2] ### parse and setup dictionaries ### master_d = Core_Citcom.get_all_pid_data(pid_file) pid_d = master_d['pid_d'] time_d = Core_Citcom.get_time_spec_dictionary(time_spec, master_d['time_d']) runtime_Myr = time_d['runtime_Myr'][0] age = int(round(time_d['age_Ma'][0], 0)) datafile = pid_d['datafile'] lith_age_depth = pid_d['lith_age_depth'] start_age = pid_d['start_age'] time = time_d['time_list'][0] geoframe_d = master_d['geoframe_d'] depth_km = master_d['coor_d']['depth_km'] mantle_temp = pid_d['mantle_temp'] radius = master_d['coor_d']['radius'] radius_outer = pid_d['radius_outer'] radius_km = pid_d['radius_km'] scalet = pid_d['scalet'] rm_list = [] # list of files to remove ################################### ### input directories and files ### ################################### # reconstructed cross-section (plate frame of reference) cross_section_dir = 'aus_xsect/' cross_section_name = cross_section_dir + 'reconstructed_%(age)s.00Ma.xy' % vars( ) # continental grids cont_dir = geoframe_d['age_grid_cont_dir'] + '/' cont_name = cont_dir + geoframe_d[ 'age_grid_cont_prefix'] + '%(age)s.grd' % vars() # directory of lith_age3_%(age)s.grd files from make_history_for_age.py lith_age_dir = '/net/beno2/nobackup1/danb/global/lith_age/' lith_age_name = lith_age_dir + 'lith_age3_%(age)s.grd' % vars() ### end input directories and files ### ### process cross_section_name ### infile = open(cross_section_name, 'r') lines = infile.readlines() infile.close out = [] for line in lines: if line.startswith('>'): pass else: out.append(line.strip()) # profile start location lon0 = float(out[0].split()[0]) lat0 = float(out[0].split()[1]) print(now(), '(lon0, lat0)', lon0, lat0) # profile end location lon1 = float(out[1].split()[0]) lat1 = float(out[1].split()[1]) print(now(), '(lon1, lat1)', lon1, lat1) # min and max bounds for GMT region (R) lon_min = min(lon0, lon1) - 10 lon_max = max(lon0, lon1) + 10 lat_min = min(lat0, lat1) - 15 lat_max = max(lat0, lat1) + 15 print(now(), '(lon_min, lat_min)', lon_min, lat_min) print(now(), '(lon_max, lat_max)', lon_max, lat_max) # Nico's 1-D profile # interpolate for data points between end values proj_name = cross_section_name.rstrip('xy') + 'p.xy' rm_list.append(proj_name) dlon = lon1 - lon0 dlat = lat1 - lat0 outfile = open(proj_name, 'w') outfile.write('%(lon0)s %(lat0)s %(lon0)s\n' % vars()) lon = lon0 lat = lat0 while 1: lon += dlon / 500 lat += dlat / 500 if lon <= lon1: #and lat <= lat1: lineout = '%(lon)s %(lat)s %(lon)s\n' % vars() outfile.write(lineout) else: break outfile.close() # purple circles # map lon_markers = cross_section_dir + 'lon_markers_map.xy' rm_list.append(lon_markers) ofile = open(lon_markers, 'w') lon_floor = int(np.floor(lon0)) lon_ceil = int(np.ceil(lon1)) for lon in range(lon_floor, lon_ceil + 1): if not lon % 5: olat = (lon - lon0) / dlon * dlat + lat0 outline = '%(lon)s %(olat)s\n' % vars() ofile.write(outline) ofile.close() # annulus lon_markers_ann = cross_section_dir + 'lon_markers_ann.xy' rm_list.append(lon_markers_ann) plon, plat = np.loadtxt(lon_markers, unpack=True) prad = np.tile(radius_outer, len(plon)) np.savetxt(lon_markers_ann, np.column_stack((plon, prad))) ### end process cross_section_name ### ### build list of temperature grids to track through ### # these grids must have previously been created using grid_maker.py gpfx = 'grid/' + datafile temp_list = [] for depth in depth_km: gsfx = '.temp.' + str(int(depth)) + '.' + str(time) + '.grd' temp_list.append(gpfx + gsfx) # take just from 500 km depth and less depth_km_array = np.array(depth_km) znode = np.min(np.where(depth_km_array < 500)) - 1 temp_list = temp_list[znode:] ### end of build temperature grid list ### #### idealized thermal structure from age grids ideal_lith_xyz = cross_section_dir + 'ideal.lith.%(age)s.xyz' % vars() rm_list.append(ideal_lith_xyz) Core_Util.find_value_on_line(proj_name, lith_age_name, ideal_lith_xyz) lithlon, lithlat, lithdist1, lithage_Ma = np.loadtxt(ideal_lith_xyz, unpack=True) lithdist = np.tile(lithdist1, pid_d['nodez']) lithage_Ma = np.tile(lithage_Ma, pid_d['nodez']) lithrad = [] for rad in radius: lithrad.extend([rad for xx in range(len(lithdist1))]) lithrad = np.array(lithrad) lithtemp = erf((1.0 - lithrad) / (2.0 * np.sqrt(lithage_Ma / scalet))) lithtemp *= float(mantle_temp) nan = np.where(np.isnan(lithtemp)) #nnan = np.where(~np.isnan( lithtemp )) np.savetxt(ideal_lith_xyz, np.column_stack((lithdist, lithrad, lithtemp))) #nan_values = np.ones( np.size( nan ) )*-1 #f_handle = open( ideal_lith_xyz, 'ab') #np.savetxt(f_handle, np.column_stack( (lithdist[nan], lithrad[nan], nan_values) )) #f_handle.close() #### end of idealized thermal structure from age grids # make temperature xyz temp_xyz = cross_section_dir + 'citcom.temp.%(age)s.xyz' % vars() rm_list.append(temp_xyz) # this is hacky, but loop over only the top 500 km master_d['coor_d']['radius'] = master_d['coor_d']['radius'][znode:] pao, x_ann_max = Core_Util.make_annulus_xyz(master_d, proj_name, temp_xyz, temp_list) ### make idealized lithosphere and citcom temperature grid ### blockmedian_I = '0.2/0.0035' surface_I = '0.1/0.00125' surface_T = '0.25' rad_in = '0.92151939' rad_out = '1.0' # for plotting data R_ann = str(lon0) + '/' + str(lon1) + '/' + rad_in + '/' + rad_out # for dimensional psbasemap psbase_R = str(lon0) + '/' + str(lon1) + '/' + str(5871) + '/' + str( radius_km) grid_names = [] for xyz in [temp_xyz, ideal_lith_xyz]: block_name = xyz.rstrip('xyz') + 'b.xyz' rm_list.append(block_name) grid_name = block_name.rstrip('xyz') + 'grd' grid_names.append(grid_name) rm_list.append(grid_name) cmd = xyz + ' -I' + blockmedian_I + ' -R' + R_ann callgmt('blockmedian', cmd, '', '>', block_name) cmd = block_name + ' -I' + surface_I + ' -R' + R_ann cmd += ' -T' + surface_T cmd += ' -Ll0 -Lu1' callgmt('surface', cmd, '', '', '-G' + grid_name) ### end of make temperature grids ### ### percentage error between temperature fields ### cmd = grid_names[0] + ' ' + grid_names[1] + ' SUB ' cmd += grid_names[1] + ' DIV' cmd += ' 100 MUL' temp_diff_grid = cross_section_dir + 'temp.difference.grd' grid_names.append(temp_diff_grid) rm_list.append(temp_diff_grid) callgmt('grdmath', cmd, '', '=', temp_diff_grid) ### end percentage error ### lith_age_depth overlay line xy = cross_section_dir + 'lith_depth.xy' rm_list.append(xy) lith_age_radius = pid_d['radius_outer'] - pid_d['lith_age_depth'] lith_depth = np.tile(lith_age_radius, len(lithdist1)) np.savetxt(xy, np.column_stack((lithdist1, lith_depth))) ### end overlay line ### make cpts ### # age grid cpt_pfx = cross_section_dir cpt_name = cpt_pfx + 'age.cpt' rm_list.append(cpt_name) cmd = '-Crainbow -T0/370/10' callgmt('makecpt', cmd, '', '>', cpt_name) # continental types cpt_name = cpt_pfx + 'cont.cpt' rm_list.append(cpt_name) cmd = '-Crainbow -T-4/0/1' callgmt('makecpt', cmd, '', '>', cpt_name) # differential temperature cpt_name = cpt_pfx + 'diff.cpt' rm_list.append(cpt_name) cmd = '-Cpolar -T-10/10/1' callgmt('makecpt', cmd, '', '>', cpt_name) # temperature cpt_name = cpt_pfx + 'temp.cpt' cmd = '-Cpolar -T0/1/0.0675' rm_list.append(cpt_name) callgmt('makecpt', cmd, '', '>', cpt_name) # for temperature contours cpt_name = cpt_pfx + 'temp.cont' cmd = '-Cjet -T0.1/0.4/0.1' rm_list.append(cpt_name) callgmt('makecpt', cmd, '', '>', cpt_name) ### plotting ### ps = datafile + '.lith.age.analysis.%(age)sMa.ps' % vars() callgmt('gmtset', 'PAGE_ORIENTATION', '', '', 'portrait') callgmt('gmtset', 'LABEL_FONT_SIZE', '', '', '12') callgmt('gmtset', 'LABEL_FONT', '', '', '4') callgmt('gmtset', 'LABEL_OFFSET', '', '', '0.02') callgmt('gmtset', 'ANNOT_FONT_SIZE_PRIMARY', '', '', '10p') callgmt('gmtset', 'ANNOT_FONT_PRIMARY', '', '', '4') opts_d = Core_GMT.start_postscript(ps) # pre-initialize for pstext commands pstext_d = opts_d.copy() pstext_d['R'] = '0/8.5/0/11' pstext_d['J'] = 'x1.0' # title information stdin = '1 10.5 14 0 4 ML Model = %(datafile)s\n' % vars() stdin += '1 10.3 14 0 4 ML lith_age_depth = %(lith_age_depth)s\n' % vars() stdin += '7.5 10.5 14 0 4 MR Current Age = %(age)s Ma\n' % vars() stdin += '7.5 10.3 14 0 4 MR start_age = %(start_age)s Ma\nEOF' % vars() callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin) # plot maps # map_d = opts_d.copy() map_d['B'] = 'a20f10/a10f5::WeSn' map_d['R'] = '%(lon_min)s/%(lon_max)s/%(lat_min)s/%(lat_max)s' % vars() map_d['C'] = cross_section_dir + 'age.cpt' map_d['J'] = 'M3' map_d['X'] = 'a1' map_d['Y'] = 'a8' map_grid = lith_age_name callgmt('grdimage', lith_age_name, map_d, '>>', ps) C = cross_section_dir + 'age.cpt' cmd = '-Ba50f10:"Age (Ma)": -D2.5/7.5/2.5/0.1h -C%(C)s -K -O' % vars() callgmt('psscale', cmd, '', '>>', ps) del map_d['B'] del map_d['C'] map_d['m'] = ' ' map_d['W'] = '5,white' callgmt('psxy', proj_name, map_d, '>>', ps) del map_d['m'] del map_d['W'] map_d['G'] = 'purple' map_d['S'] = 'c0.05' callgmt('psxy', lon_markers, map_d, '>>', ps) del map_d['G'] del map_d['S'] # continental types map_d['B'] = 'a20f10/a10f5::wESn' map_d['C'] = cross_section_dir + 'cont.cpt' map_d['X'] = 'a4.5' map_d['Y'] = 'a8' callgmt('grdimage', cont_name, map_d, '>>', ps) C = cross_section_dir + 'cont.cpt' cmd = '-Ba1:"Continental type (stencil value)": -D6/7.5/2.5/0.1h -C%(C)s -K -O' % vars( ) callgmt('psscale', cmd, '', '>>', ps) del map_d['B'] del map_d['C'] map_d['m'] = ' ' map_d['W'] = '5,black' callgmt('psxy', proj_name, map_d, '>>', ps) del map_d['m'] del map_d['W'] map_d['G'] = 'purple' map_d['S'] = 'c0.05' callgmt('psxy', lon_markers, map_d, '>>', ps) del map_d['G'] del map_d['S'] # end plot maps # # plot cross-sections # temperature cross-section psbase_d = opts_d.copy() psbase_d['B'] = 'a10/500::WsNe' psbase_d['J'] = 'Pa6/' + str(pao) + 'z' psbase_d['R'] = psbase_R psbase_d['X'] = 'a1.25' psbase_d['Y'] = 'a5.25' callgmt('psbasemap', '', psbase_d, '>>', ps) opts_d['C'] = cross_section_dir + 'temp.cpt' opts_d['J'] = 'Pa6/' + str(pao) opts_d['R'] = R_ann opts_d['X'] = 'a1.25' opts_d['Y'] = 'a5.25' callgmt('grdimage', grid_names[0], opts_d, '>>', ps) # profile of lith_age_depth on this cross-section del opts_d['C'] opts_d['W'] = '3,black,-' callgmt('psxy', xy, opts_d, '>>', ps) del opts_d['W'] opts_d['G'] = 'purple' opts_d['N'] = ' ' opts_d['S'] = 'c0.06' callgmt('psxy', lon_markers_ann, opts_d, '>>', ps) del opts_d['G'] del opts_d['N'] del opts_d['S'] stdin = '1 6.25 12 0 4 ML CitcomS\n' stdin += '7.5 6.25 12 0 4 MR Temp\nEOF' callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin) C = cross_section_dir + 'temp.cpt' cmd = '-Ba0.2f0.1 -D4.25/5.7/2.5/0.1h -C%(C)s -K -O' % vars() callgmt('psscale', cmd, '', '>>', ps) # idealized lith temperature cross-section psbase_d['Y'] = 'a3.75' callgmt('psbasemap', '', psbase_d, '>>', ps) opts_d['C'] = cross_section_dir + 'temp.cpt' opts_d['Y'] = 'a3.75' callgmt('grdimage', grid_names[1], opts_d, '>>', ps) del opts_d['C'] # profile of lith_age_depth on this cross-section opts_d['W'] = '3,black,-' callgmt('psxy', xy, opts_d, '>>', ps) del opts_d['W'] opts_d['G'] = 'purple' opts_d['N'] = ' ' opts_d['S'] = 'c0.06' callgmt('psxy', lon_markers_ann, opts_d, '>>', ps) del opts_d['G'] del opts_d['N'] del opts_d['S'] stdin = '1 4.75 12 0 4 ML Idealised\n' stdin += '7.5 4.75 12 0 4 MR Temp\nEOF' callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin) C = cross_section_dir + 'temp.cpt' cmd = '-Ba0.2f0.1 -D4.25/4.2/2.5/0.1h -C%(C)s -K -O' % vars() callgmt('psscale', cmd, '', '>>', ps) # contours plot psbase_d['Y'] = 'a2.25' callgmt('psbasemap', '', psbase_d, '>>', ps) opts_d['Y'] = 'a2.25' opts_d['C'] = cross_section_dir + 'temp.cont' opts_d['W'] = '3,red' callgmt('grdcontour', grid_names[0], opts_d, '>>', ps) opts_d['W'] = '3,green' callgmt('grdcontour', grid_names[1], opts_d, '>>', ps) del opts_d['C'] del opts_d['W'] opts_d['G'] = 'purple' opts_d['N'] = ' ' opts_d['S'] = 'c0.06' callgmt('psxy', lon_markers_ann, opts_d, '>>', ps) del opts_d['G'] del opts_d['N'] del opts_d['S'] stdin = '1 3.25 12 0 4 ML Contours\n' stdin += '7.5 3.25 12 0 4 MR Temp\nEOF' callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin) # difference of temperature fields (relative) psbase_d['Y'] = 'a0.75' callgmt('psbasemap', '', psbase_d, '>>', ps) opts_d['C'] = cross_section_dir + 'diff.cpt' opts_d['Y'] = 'a0.75' callgmt('grdimage', grid_names[2], opts_d, '>>', ps) del opts_d['C'] opts_d['G'] = 'purple' opts_d['N'] = ' ' opts_d['S'] = 'c0.06' callgmt('psxy', lon_markers_ann, opts_d, '>>', ps) del opts_d['G'] del opts_d['N'] del opts_d['S'] C = cross_section_dir + 'diff.cpt' cmd = '-Ba5f1 -D4.25/1.2/2.5/0.1h -C%(C)s -K -O' % vars() callgmt('psscale', cmd, '', '>>', ps) stdin = '1 1.75 12 0 4 ML Delta (\045)\n' stdin += '7.5 1.75 12 0 4 MR Temp\nEOF' #stdin += '4.25 0.6 12 0 4 MC Note: No assimilation regions are shown in BLACK\nEOF' callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin) Core_GMT.end_postscript(ps) # clean up temporary files Core_Util.remove_files(rm_list)
def main(): print(now(), 'grid_maker_gplates.py') # get the .cfg file as a dictionary control_d = Core_Util.parse_configuration_file(sys.argv[1], False, False) Core_Util.tree_print(control_d) time_spec_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec']) print(now(), 'grid_maker_gplates.py: time_spec_d = ') Core_Util.tree_print(time_spec_d) # Get the coordinate data from the 0 Ma files print(now(), 'grid_maker_gplates.py: get coordinate data from .xy files:') lon = [] lat = [] for i in range(control_d['nproc_surf']): # get the lat lon from the .xy file vel_xy_filename = control_d['velocity_prefix'] + '0.%(i)s.xy' % vars() print(now(), 'grid_maker_gplates.py: vel_xy_filename = ', vel_xy_filename) i_lat, i_lon = np.loadtxt(vel_xy_filename, usecols=(0, 1), unpack=True) lat.append(i_lat) lon.append(i_lon) lon = Core_Util.flatten_nested_structure(lon) lat = Core_Util.flatten_nested_structure(lat) print(now(), 'grid_maker_gplates.py: len(lon) = ', len(lon)) print(now(), 'grid_maker_gplates.py: len(lat) = ', len(lat)) # # Main looping, first over times, then sections, then levels # # Variables that will be updated each loop: # time will be a zero padded string value used for filenames and reporting # depth will be a zero padded string value used for filenames and reporting print( now(), '=========================================================================' ) print( now(), 'grid_maker_gplates.py: Main looping, first over times, then sections, then levels' ) print( now(), '=========================================================================' ) # Loop over times for tt, time in enumerate(time_spec_d['time_list']): print(now(), 'grid_maker_gplates.py: Processing time = ', time) # empty file_data file_data = [] # cache for the file_format file_format_cache = '' # Loop over sections (fields) for ss, s in enumerate(control_d['_SECTIONS_']): # FIXME: this extra indent is probably from when sections loop was inside level loop ? # FIXME: this extra indent is probably from when sections loop was inside level loop ? print(now(), 'grid_maker_gplates.py: Processing section = ', s) # check for required parameter 'field' if not 'field' in control_d[s]: print( 'ERROR: Required parameter "field" missing from section.') print(' Skipping this section.') continue # to next section # get the field name field_name = control_d[s]['field'] print('') print(now(), 'grid_maker_gplates.py: Processing: field =', field_name) # reset region to use -Rg for gplates grid_R = 'g' if 'shift_lon' in control_d: print( now(), 'grid_maker_gplates.py: grid_R set to to "d" : -180/+180/-90/90' ) grid_R = 'd' else: print( now(), 'grid_maker_gplates.py: grid_R set to to "g" : 0/360/-90/90' ) # get the data file name specifics for this field file_name_component = Core_Citcom.field_to_file_map[field_name][ 'file'] print(now(), 'grid_maker_gplates.py: file_name_component = ', file_name_component) # get the data file column name specifics for this field field_column = Core_Citcom.field_to_file_map[field_name]['column'] print(now(), 'grid_maker_gplates.py: field_column = ', field_column) # remove potential zero padding from age values time = time.replace('Ma', '') # process data from GPlates file_format = control_d['velocity_prefix'] + '%(time)s.#' % vars() print(now(), 'grid_maker_gplates.py: file_format = ', file_format) # read data in by cap file_data = Core_Citcom.read_cap_files_to_cap_list( control_d, file_format) # flatten data since we don't care about specific cap numbers for the loop over levels/depths file_data = Core_Util.flatten_nested_structure(file_data) print(now(), 'grid_maker_gplates.py: len(file_data) = ', len(file_data)) # Get the specific column for this field_name field_data = np.array([line[field_column] for line in file_data]) print(now(), 'grid_maker_gplates.py: type(field_data) = ', type(field_data)) print(now(), 'grid_maker_gplates.py: len(field_data) = ', len(field_data)) print(now()) # check for gplates_vmag if field_name == 'gplates_vmag': # read the vy data from col 1 field_data_vy = [line[1] for line in file_data] # compute the magnitude vx_a = np.array(field_data) vy_a = np.array(field_data_vy) vmag_a = np.hypot(vx_a, vy_a) # convert back to list field_data = vmag_a.tolist() print( now(), '------------------------------------------------------------------------------' ) print(now(), 'grid_maker_gplates.py: tt,ss = ', tt, ',', ss, ';') print(now(), 'grid_maker_gplates.py: summary for', s, ': time =', time, '; field_name =', field_name) print( now(), '------------------------------------------------------------------------------' ) depth = 0 field_slice = field_data xyz_filename = field_name + '-' + str(time) + '-' + str( depth) + '.xyz' print(now(), 'grid_maker_gplates.py: xyz_filename =', xyz_filename) print(now(), 'grid_maker_gplates.py: type(field_slice) = ', type(field_slice)) print(now(), 'grid_maker_gplates.py: len(field_slice) = ', len(field_slice)) print(now()) # create the xyz data xyz_data = np.column_stack((lon, lat, field_slice)) np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f') # create the median file median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz' blockmedian_I = control_d[s].get('blockmedian_I', '0.5') cmd = xyz_filename + ' -I' + str(blockmedian_I) + ' -R' + grid_R Core_GMT.callgmt('blockmedian', cmd, '', '>', median_xyz_filename) # get a T value for median file if not 'Ll' in control_d[s] or not 'Lu' in control_d[s]: T = Core_GMT.get_T_from_minmax(median_xyz_filename) else: dt = (control_d[s]['Lu'] - control_d[s]['Ll']) / 10 T = '-T' + str(control_d[s]['Ll']) + '/' T += str(control_d[s]['Lu']) + '/' + str(dt) print(now(), 'grid_maker_gplates.py: T =', T) # create the grid grid_filename = xyz_filename.rstrip('xyz') + 'grd' surface_I = control_d[s].get('surface_I', '0.25') cmd = median_xyz_filename + ' -I' + str(surface_I) + ' -R' + grid_R if 'Ll' in control_d[s]: cmd += ' -Ll' + str(control_d[s]['Ll']) if 'Lu' in control_d[s]: cmd += ' -Lu' + str(control_d[s]['Lu']) if 'T' in control_d[s]: cmd += ' -T' + str(control_d[s]['T']) #opt_a = Core_GMT.callgmt('surface', cmd, '', '', ' -G' + grid_filename) # label the variables # −Dxname/yname/zname/scale/offset/title/remark cmd = grid_filename + ' -D/=/=/' + str(field_name) + '/=/=/' + str( field_name) + '/' + str(field_name) Core_GMT.callgmt('grdedit', cmd, '', '', '') # Assoicate this grid with GPlates exported line data in .xy format: # compute age value age_float = 0.0 if field_name.startswith('gplates_'): # time_list value for gplates data is set with age values age_float = float(time) # truncate to nearest int and make a string for the gplates .xy file name geoframe_d = Core_Util.parse_geodynamic_framework_defaults() if age_float < 0: age_float = 0.0 xy_path = geoframe_d['gplates_line_dir'] xy_filename = xy_path + '/' + 'topology_platepolygons_' + str( int(age_float)) + '.00Ma.xy' print(now(), 'grid_maker_gplates.py: xy_filename = ', xy_filename) # Make a plot of the grids J = 'X5/3' #'R0/6' #J = 'M5/3' if 'J' in control_d[s]: J = control_d[s]['J'] C = 'polar' if 'C' in control_d[s]: C = control_d[s]['C'] # gplates Core_GMT.plot_grid(grid_filename, xy_filename, grid_R, '-T-10/10/1') # end of plotting # Optional step to transform grid to plate frame if 'make_plate_frame_grid' in control_d: cmd = 'frame_change_pygplates.py %(time)s %(grid_filename)s %(grid_R)s' % vars( ) print(now(), 'grid_maker_gplates.py: cmd =', cmd) os.system(cmd) filename = grid_filename.replace('.grd', '-plateframe.grd') Core_GMT.plot_grid(filename, xy_filename, grid_R, '-T-10/10/1')
#!/usr/bin/env python import Core_Citcom, Core_GMT, Core_Util, subprocess import numpy as np from Core_GMT import callgmt # standard arguments geoframe_d = Core_Util.parse_geodynamic_framework_defaults() opts2_d = {'R': 'g', 'J': 'H180/8', 'X': 'a1.5', 'Y': 'a1.5'} str_list = ['vx', 'vy', 'vz'] age_list = [29] cmd = 'LABEL_FONT_SIZE 14p' cmd += ' LABEL_OFFSET 0.05' callgmt('gmtset', cmd) for age in age_list: print('age=', age) filename = 'debug_ivel.%(age)s.xy' % vars() lon, lat, subparallel, sub, vx, vy, vz = np.loadtxt(filename, unpack=True) for nn, comp in enumerate([vx, vy, vz]): str_comp = str_list[nn] temp_name = 'output.%(age)s.xyz' % vars() np.savetxt(temp_name, np.column_stack((lon, lat, comp))) ps = 'output.%(age)s.%(str_comp)s.ps' % vars() opts_d = Core_GMT.start_postscript(ps) opts_d.update(opts2_d)
def main(): print(now(), 'index_citcom.py') # get the .cfg file as a dictionary control_d = Core_Util.parse_configuration_file(sys.argv[1]) #Core_Util.tree_print( control_d ) # set the pid file pid_file = control_d['pid_file'] # get the master dictionary and define aliases master_d = Core_Citcom.get_all_pid_data(pid_file) coor_d = master_d['coor_d'] pid_d = master_d['pid_d'] # Double check for essential data if master_d['time_d'] == None: print(now()) print( 'ERROR: Required file "[CASE_NAME].time:" is missing from this model run.' ) print(' Aborting processing.') sys.exit(-1) # set up working variables datadir = pid_d['datadir'] datafile = pid_d['datafile'] startage = pid_d['start_age'] output_format = pid_d['output_format'] depth_list = coor_d['depth_km'] nodez = pid_d['nodez'] nproc_surf = pid_d['nproc_surf'] found_depth_list = [] # Check how to read and parse the time spec: read_time_d = True # Compute the timesteps to process if read_time_d: time_spec_d = Core_Citcom.get_time_spec_dictionary( control_d['time_spec'], master_d['time_d']) else: time_spec_d = Core_Citcom.get_time_spec_dictionary( control_d['time_spec']) print(now(), 'index_citcom.py: time_spec_d = ') Core_Util.tree_print(time_spec_d) # levels to process level_spec_d = Core_Util.get_spec_dictionary(control_d['level_spec']) print(now(), 'index_citcom.py: level_spec_d = ') Core_Util.tree_print(level_spec_d) # # Main looping, first over times, then sections, then levels # print( now(), '=========================================================================' ) print( now(), 'index_citcom.py: Main looping, first over times, then sections, then levels' ) print( now(), '=========================================================================' ) # Loop over times for T, time in enumerate(time_spec_d['time_list']): #print( now(), 'index_citcom.py: Processing time = ', time) if 'Ma' in time: # strip off units and make a number time = float(time.replace('Ma', '')) # determine what time steps are available for this age # NOTE: 'temp' is requried to set which output files to check found_d = Core_Citcom.find_available_timestep_from_age( master_d, 'temp', time) else: # model time steps time = float(time) # determine what time steps are available for this timestep # NOTE: 'temp' is requried to set which output files to check found_d = Core_Citcom.find_available_timestep_from_timestep( master_d, 'temp', time) # end of check on time format # set variables for subsequent loops timestep = found_d['found_timestep'] runtime_Myr = found_d['found_runtime'] # convert the found age to an int age_Ma = int(np.around(found_d['found_age'])) print(now(), 'index_citcom.py: time data: requested value ->found value ') print( now(), ' ', \ 'age =', found_d['request_age'], '->', age_Ma, \ 'step =', found_d['request_timestep'], '->', timestep, \ 'r_tm =', found_d['request_runtime'], '->', runtime_Myr ) # empty file_data file_data = [] # Loop over sections (fields) for S, s in enumerate(control_d['_SECTIONS_']): # FIXME: this extra indent is probably from when sections loop was inside level loop ? #print( now(), 'index_citcom.py: Processing section = ', s) # check for required parameter 'field' if not 'field' in control_d[s]: print( 'ERROR: Required parameter "field" missing from section.') print(' Skipping this section.') continue # to next section # get the field name field_name = control_d[s]['field'] #print('') #print( now(), 'index_citcom.py: Processing: field =', field_name) # set the region #if nproc_surf == 12: # grid_R = 'g' # # optionally adjust the lon bounds of the grid to -180/180 # if 'shift_lon' in control_d : # print( now(), 'index_citcom.py: grid_R set to to "d" : -180/+180/-90/90') # grid_R = 'd' # else : # print( now(), 'index_citcom.py: grid_R set to to "g" : 0/360/-90/90') #else: # grid_R = str(pid_d['lon_min']) + '/' + str(pid_d['lon_max']) + '/' # grid_R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max']) # get the data file name specifics for this field file_name_component = Core_Citcom.field_to_file_map[field_name][ 'file'] # get the data file column name specifics for this field field_column = Core_Citcom.field_to_file_map[field_name]['column'] # report #print( now(), 'index_citcom.py: field = ', field_name, '; file_comp =', file_name_component, '; col =', field_column) # process data from Citcoms file_format = '' # check for various data dirs: if os.path.exists(datadir + '/0/'): file_format = datadir + '/#/' + datafile + '.' + file_name_component + '.#.' + str( timestep) elif os.path.exists(datadir + '/'): file_format = datadir + '/' + datafile + '.' + file_name_component + '.#.' + str( timestep) elif os.path.exists('data'): file_patt = './data/#/' + datafile + '.' + file_name_component + '.#.' + str( timestep) elif os.path.exists('Data'): file_patt = './Data/#/' + datafile + '.' + file_name_component + '.#.' + str( timestep) # report error else: print(now()) print('ERROR: Cannot find output data.') print(' Skipping this section.') print(now(), 'index_citcom.py: file_format = ', file_format) continue # to next section print(now(), 'index_citcom.py: file_format = ', file_format) # # Loop over levels # for L, level in enumerate(level_spec_d['list']): # print( now(), 'index_citcom.py: Processing level = ', level) # ensure level is an int value level = int(level) depth = int(depth_list[level]) found_depth_list.append(depth) #print( now(), '------------------------------------------------------------------------------') print( now(), 'index_citcom.py: ', s, \ ': ts =', timestep, \ '; age =', age_Ma, \ #'; runtime_Myr =', runtime_Myr, \ '; level =', level, \ '; depth_km =', depth, \ '; field =', field_name,\ ) #print( now(), '------------------------------------------------------------------------------') # FIXME: is it ok to chanage the default name to have age, rather than timestep? xyz_filename = datafile + '-' + field_name + '-' + str( age_Ma) + 'Ma-' + str(depth) + '.xyz' #print( now(), 'index_citcom.py: xyz_filename =', xyz_filename) #xy_filename = '' #xy_path = master_d['geoframe_d']['gplates_line_dir'] #xy_filename = xy_path + '/' + 'topology_platepolygons_' + age + '.00Ma.xy' #print( now(), 'index_citcom.py: xy_filename = ', xy_filename) # Make a plot of the grids # citcoms # end of loop over levels # end of loop over sections # end of loop over times print(now(), 'depth_list = ', depth_list) print(now(), 'found_depth_list = ', found_depth_list)