def brute_force( master_d, coor_by_cap, nlon, nlat ):

    pid_d = master_d['pid_d']
    nodex = pid_d['nodex']
    nodey = pid_d['nodey']
    nproc_surf = pid_d['nproc_surf']

    nearest_point = []
    for cc in range( nproc_surf ):
        ccoor = coor_by_cap[cc]
        nearest_point.append( cc )
        nearest_point[cc] = []
        for cline in ccoor:
            ( zlon, zlat ) = cline
            # compute distance between points
            zdist = Core_Util.get_distance( nlon, nlat, zlon, zlat )
            nearest_point[cc].append( zdist )

    # get index of nearest node to this coordinate
    fnearest_point = Core_Util.flatten_nested_structure( nearest_point )
    fnearest_coor = Core_Util.flatten_nested_structure( coor_by_cap )
    min_index, min_value = min(enumerate(fnearest_point), key=operator.itemgetter(1))
    (zzlon, zzlat) = fnearest_coor[min_index]

    print( now(), nlon, nlat, min_index, min_value, zzlon, zzlat )

    # find location of node in cap list
    cap_index = int( min_index / (nodex*nodey) )
    entry_index = min_index % (nodex*nodey)

    print( now(), cap_index, entry_index )
def volume_error( high, low, grid, volume_target ):

    '''compute volume error of a basin relative to a target volume.'''

    volume = volume_of_basin( high, low, grid )
    err = volume - volume_target

    if verbose:
        print( now(), 'contour=', high )
        print( now(), 'volume=', volume )
        print( now(), 'volume_target=', volume_target )
        print( now(), 'error=', err )

    return err
Exemplo n.º 3
0
def main():
    """main sequence of script actions"""

    print(now(), 'assimilation_diagnostic.py:')
    print(now(), 'main:')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    PLOT_CROSS_SECTIONS = control_d['PLOT_CROSS_SECTIONS']
    PLOT_MAPS = control_d['PLOT_MAPS']

    # get the master dictionary
    master_d = Core_Citcom.get_all_pid_data(control_d['pid_file'])

    # get times to process and plot
    time_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec'],
                                                  master_d['time_d'])
    master_d['control_d'] = control_d
    # N.B. master_d['time_d'] contains all the time info from citcoms model
    # N.B. master_d['control_d']['time_d'] contains specific times to process
    master_d['control_d']['time_d'] = time_d

    # func_d is a separate dictionary that is used to transport
    # temporary files and objects between functions
    master_d['func_d'] = {'rm_list': []}

    # find track locations
    make_profile_track_files(master_d)

    make_cpts(master_d)

    # make cross-sections
    if PLOT_CROSS_SECTIONS:
        make_cross_section_diagnostics(master_d)

    # make maps
    if PLOT_MAPS:
        ps_l = []
        for tt in range(len(time_d['time_list'])):
            ps = make_map_postscript(master_d, tt)
            ps_l.append(ps)
        pdf_name = control_d['prefix'] + '_'
        pdf_name += 'Map.pdf'
        Core_Util.make_pdf_from_ps_list(ps_l, pdf_name)

    # clean up
    Core_Util.remove_files(master_d['func_d']['rm_list'])
Exemplo n.º 4
0
def taskThreadFunction(config_filename, tasks, icAge):
    cwd = os.getcwd()
    IC = 0
    for age in tasks:
        if (age == icAge): IC = 1
        cmd = 'mkdir %(age)s' % vars()
        if verbose: print(now(), cmd)
        subprocess.call(cmd, shell=True)

        # commands for batch file
        line = 'cp geodynamic_framework_defaults.conf %(cwd)s/%(age)s;' % vars(
        )
        line += 'cd %(cwd)s/%(age)s; make_history_for_age.py ' % vars()
        line += '%(cwd)s/%(config_filename)s %(age)s ' % vars()
        line += '%(IC)s\n' % vars()

        fout = sys.stdout
        ferr = sys.stderr
        _errcode, _sout, _serr = run(line,
                                     shell=True,
                                     timeout=-1,
                                     sout=fout,
                                     eout=ferr)
        if (_errcode == -9):
            print('Error detected. Check parameters..!')
        #end if

        IC = 0
Exemplo n.º 5
0
def usage():
    """print usage message and exit"""

    print(
        now(), '''usage: Create_History.py [-d] [-e] configuration_file.cfg

options and arguments:

-d: if the optional -d argument is geiven this script will generate 
the (required) geodynamic_framework_defaults.conf file in the current 
working directory.  This file can then be modified by the user.

-e: if the optional -e argument is given this script will print to
standard out an example configuration control file.  The parameter 
values in the example configuration_file.cfg file may need to be 
edited or commented out depending on intended use.

citation:
    Bower, D.J., M. Gurnis, and N. Flament (2015)
    Assimilating lithosphere and slab history in 4-D Earth models,
    Physics of the Earth and Planetary Interiors,
    238, 8--22, doi:10.1016/j.pepi.2014.10.013
''')

    sys.exit(0)
Exemplo n.º 6
0
def main():
    """main sequence of script actions"""

    print(now(), 'pub_global.py:')
    print(now(), 'main:')

    # parse cmd line input for input plotting config file
    if len(sys.argv) != 2:
        usage()

    # this part is not intuitive to an uniformed user
    # can we avoid these initialize commands?
    # initialize the modules
    Core_Util.initialize()
    Core_Citcom.initialize()

    # Get the framework dictionary
    geoframe_dict = Core_Util.geoframe_dict

    # read settings from control file
    dict = Core_Citcom.parse_configuration_file(sys.argv[1])

    # move loose parameters (not within Figure_X) from dict to a
    # temporary new dictionary (adict) then deepcopy to
    # dict['All_Figure']
    # this cleans up dict by ensuring the keys are e.g.
    # 'All_Figure', 'Figure_A', 'Figure_B' etc.
    #adict = {}
    #for key in list(dict):
    #    if not key.startswith('Figure'):
    #        adict[key] = dict.pop(key)
    #dict['figure_keys'] = sorted(dict.keys())

    # ??? set_global_defaults( adict )
    #dict['All_Figure'] = copy.deepcopy( adict )
    #del adict # delete temporary dictionary

    # ??? set_positioning( dict )

    # set adict as pointer to dict['All_Figure']
    #adict = dict['All_Figure']

    print(dict)

    ps = 'test.ps'

    make_postscript(dict, ps)
Exemplo n.º 7
0
def main():
    '''This is the main workflow of the script'''

    # report the start time and the name of the script
    print(now(), 'example.py')

    # a way to show what version of python is being used:
    print(now(), 'sys.version_info =', str(sys.version_info))

    print("os.path.dirname(__file__) =", os.path.dirname(__file__))

    # Get the configuration control file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # Get the string
    string = control_d['string']

    # print the string
    print(now(), 'string =', string)

    sys.exit()
Exemplo n.º 8
0
def make_annulus_grid(dict):

    # XXX BROKEN - TO FIX FOR PYTHON 3.0 AND NEW WORK FLOW

    if verbose: print now(), 'make_annulus_grid:'

    # output grid name
    dict['G'] = annpfx(dict) + '.grd'

    # always make grid with maximum extent
    if dict.get('R'): dict['R2'] = dict['R']

    #if not dict.get('R'):
    Core_GMT_djb.get_high_precision_region(dict)

    # update with correct grid_min and grid_max for this field
    dict.update(annulus_grid_range(dict)[dict['field']])
    dict.setdefault('grid_tension', 0.25)  # default

    # grid_increment for blockmedian already set
    dict['xyz'] = Core_GMT_djb.blockmedian(dict)

    # update with plotting grid increment
    dict['grid_increment'] = dict['grid_increment2']
    grid = Core_GMT_djb.surface(dict)

    # xyz2grd commented out
    #dict['grid_increment'] = '6/150'
    #del dict['A']
    #dict['grid'] = Core_GMT_djb.xyz2grd( dict )
    # sometimes get_great_circle_proj sets 'A'
    del dict['grid_increment']
    del dict['grid_tension']
    del dict['G']
    del dict['xyz']

    # replace user specified region for plotting
    if dict.get('R2'): dict['R'] = dict['R2']

    return grid
Exemplo n.º 9
0
def make_baloo_pbs_sub_script( control_d ):

    '''Write PBS submission script to a file.'''

    if verbose: print( now(), 'make_baloo_pbs_submission_script:' )

    # get variables
    jobname = control_d.get('jobname','Create_History_Parallel.py')
    ppn = control_d.get('ppn','16')
    nodes = int(control_d['nodes']/ppn)
    # 12 hour default walltime if not specified
    walltime = control_d.get('walltime','12:00:00')
    # by default send job information to Nico
    email = control_d.get('email','*****@*****.**')

    text='''#!/bin/csh -f 
#PBS -N %(jobname)s
#PBS -l nodes=%(nodes)s:ppn=%(ppn)s
#PBS -l walltime=%(walltime)s
#PBS -m bae
#PBS -M %(email)s

#change the working directory (default is home directory)
echo Working directory is $PBS_O_WORKDIR
cd $PBS_O_WORKDIR

# Write out some information on the job
echo Running on host `hostname`
echo Time is `date`

### Define number of processors
#NPROCS=`wc -l < $PBS_NODEFILE`
#echo This job has allocated $NPROCS cpus

# Tell me which nodes it is run on
echo " "
echo This jobs runs on the following processors:
echo `cat $PBS_NODEFILE`
echo " "

# 
# Run the parallel job
#

parallel -a commands.batch''' % vars()

    filename = '%(jobname)s.pbs' % vars()
    control_d['qsub'] = filename
    file = open( filename, 'w' )
    file.write( '%(text)s' % vars() )
    file.close()
Exemplo n.º 10
0
def make_pbs_sub_script( control_d ):

    '''Write PBS submission script to a file.'''

    if verbose: print( now(), 'make_pbs_submission_script:' )

    # get variables
    jobname = control_d.get('jobname','Create_History_Parallel.py')
    nodes = control_d['nodes']
    # 12 hour default walltime if not specified
    walltime = control_d.get('walltime','12:00:00')

    text='''#PBS -N %(jobname)s
#PBS -l nodes=%(nodes)s
#PBS -S /bin/bash
#PBS -V
#PBS -l walltime=%(walltime)s
#PBS -q default
#PBS -m ae
#PBS -o out.$PBS_JOBID.$PBS_JOBNAME
#PBS -e err.$PBS_JOBID.$PBS_JOBNAME

#change the working directory (default is home directory)
echo Working directory is $PBS_O_WORKDIR
cd $PBS_O_WORKDIR

# Write out some information on the job
echo Running on host `hostname`
echo Time is `date`

### Define number of processors
NPROCS=`wc -l < $PBS_NODEFILE`
echo This job has allocated $NPROCS cpus

# Tell me which nodes it is run on
echo " "
echo This jobs runs on the following processors:
echo `cat $PBS_NODEFILE`
echo " "

# 
# Run the parallel job
#

parallel --sshloginfile $PBS_NODEFILE  -a commands.batch''' % vars()

    filename = 'qsub.Create_History_Parallel'
    control_d['qsub'] = filename
    file = open( filename, 'w' )
    file.write( '%(text)s' % vars() )
    file.close()
Exemplo n.º 11
0
def usage():
    """print usage message and exit"""

    print(
        now(), '''usage: assimilation_diagnostic.py [-e] configuration_file.cfg

options and arguments:

-e: if the optional -e argument is given this script will print to
standard out an example configuration control file.  The parameter 
values in the example configuration_file.cfg file may need to be 
edited or commented out depending on intended use.
''')

    sys.exit(0)
def main():

    # parameters
    nlon = 30
    nlat = 30

    # preliminaries
    master_d = Core_Citcom.get_all_pid_data( 'pid23039.cfg' )
    coor_by_cap = make_coordinate_files( master_d )

    # algorithm 1: brute force
    t0 = time.time()
    for nn in range(10):
        brute_force( master_d, coor_by_cap, nlon, nlat )
    t1 = time.time()
    total = t1-t0
    print( now(),' brute_force=', total )

    #t1 = timeit.timeit(stmt='brute_force_algorithm()', 
    #    setup='from __main__ import brute_force_algorithm')
    #print( t1 )

    # algorithm 2: kd tree
    # specific preliminaries
    coor_by_cap = Core_Util.flatten_nested_structure( coor_by_cap )
    coor_by_cap = np.array( coor_by_cap )
    tree = spatial.KDTree( coor_by_cap )
    #pts = np.array( [[0, 0],[1,2],[30,40],[56,56],[180,76],[240,-24],
    #    [270,-60],[37,5],[345,3],[356,-87]] )

    pts = np.array([30,30])
    t0 = time.time()
    print( tree.query( pts )[1] )
    t1 = time.time()
    total = t1-t0
    print( now(), 'kd_tree=', total )
def grdinfo( grid ):

    '''return zmin and zmax of a GMT grd file'''

    cmd = ['gmt', 'grdinfo', grid ]
    p1 = Popen( cmd, stdout=PIPE, stderr=PIPE )
    stdout, stderr = p1.communicate()
    stdout = stdout.decode()

    zmin = float(stdout.split('z_min:')[1].split()[0])
    zmax = float(stdout.split('z_max:')[1].split()[0])

    if verbose:
        print( now(), 'zmin=', zmin, ', zmax=', zmax )

    return (zmin, zmax)
Exemplo n.º 14
0
def make_profile_track_files(master_d):
    '''Make annular and rectangular track files.'''

    if verbose: print(now(), 'make_profile_track_files:')

    control_d = master_d['control_d']
    func_d = master_d['func_d']
    pid_d = master_d['pid_d']
    rm_list = func_d['rm_list']

    annular_project = []
    rectangular_project = []

    # loop over all sections
    for section in control_d['_SECTIONS_']:

        # dictionary for this section
        section_d = control_d[section]

        lon0 = section_d['lon0']
        lat0 = section_d['lat0']
        lon1 = section_d['lon1']
        lat1 = section_d['lat1']

        # annular
        proj_name = 'annular_project_' + section + '.xy'
        annular_project.append(proj_name)
        rm_list.append(proj_name)
        incr = 0.5  # sample every 0.5 degrees
        Core_Util.make_great_circle_with_two_points(lon0, lat0, lon1, lat1,
                                                    incr, 'w', proj_name)

        # rectangular
        proj_name2 = 'rectangular_project_' + section + '.xy'
        rectangular_project.append(proj_name2)
        rm_list.append(proj_name2)
        lon, lat, dist = np.loadtxt(proj_name, unpack=True)
        xx = np.radians(dist) * pid_d['radius_km']
        rr = np.tile(0, len(xx))  # track at 0 km depth
        np.savetxt(proj_name2, np.column_stack((xx, rr)))

    # store project files for processing and plotting routines
    func_d['annular_project'] = annular_project
    func_d['rectangular_project'] = rectangular_project
Exemplo n.º 15
0
def make_sbatch_sub_script( control_d ):

    '''Write SBATCH submission script to a file.'''

    if verbose: print( now(), 'make_sbatch_sub_script:' )

    # get variables
    jobname = control_d.get('jobname','Create_History_Parallel.py')
    nodes = control_d['nodes']
    # 2 hour default walltime if not specified
    #walltime = settings.get('walltime','12:00:00')

    text = '''#!/bin/bash

# Batch file for running CitcomS on Titan at UiO
# ALB Oct/Nov 2011

# Job Details
#SBATCH --job-name=%(jobname)s
#SBATCH --account=pgp
#SBATCH --constraint=intel
# Job time and memory limits
#SBATCH --time=96:00:00 ## YOU MUST CHANGE THIS FOR LONG JOBS
#SBATCH --mem-per-cpu=2GB
#
#Parallel and mpi settings
#SBATCH --ntasks=12 ## MPI KNOWS HOW MANY NODES IT CAN USE, DON'T SPECIFY THEM
#
# Set up job environment:
source /site/bin/jobsetup
module load python/2.6.2
module load openmpi/1.4.3.intel

## Copy the CASE1 dir to the scratch, just in case
srun --ntasks=$SLURM_JOB_NUM_NODES cp -r OUTPUTFILES/ $SCRATCH

#Run program
bin/citcoms cookbook1.cfg --solver.datadir=/usit/titan/u1/abigailb/CITCOM_S/CitcomS_CIG/OUTPUTFILES/Cookbook1''' % vars()

    filename = 'qsub.Create_History_Parallel'
    settings['qsub'] = filename
    file = open(filename,'w')
    file.write('%(text)s' % vars())
    file.close()
Exemplo n.º 16
0
def test(argv):
    '''geodynamic framework module self test'''
    global verbose
    verbose = True
    print(now(), 'test: sys.argv = ', sys.argv)
    # run the tests

    # read the defaults
    frame_d = Core_Util.parse_geodynamic_framework_defaults()

    # read the first command line argument as a .cfg file
    #cfg_d = parse_configuration_file( sys.argv[1] )

    # TODO : comment in and out functions as needed

    #get_IRIS_WebServices_Catalog()

    #get_CMT_Catalog(1)

    get_EHB_Catalog()
Exemplo n.º 17
0
def main():
    '''This is the main function of simple_map.py

    main performs several steps detailed below:
    
    Parse the configuration control file into a control dictionary (control_d).  

    Read the citcoms pid file and establish a master dictionary 
    of citcoms parameters (master_d)

    Getting Surfce Coordinate Data (lat and lon)

    
    # Loop over each subsection in the control_d dictionary 
'''
    # associate the script global variable with this function
    global verbose

    # In general each script and each function should report it's name
    # as the first step, to better debug workflows.
    #
    # Most diagnostic output to the user will include the now() function
    # to easily measure wall clock runtimes of processes.
    print(now(), 'simple_map.py')

    # Parse cmd line input for basic parameters of plot
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # under verbose diagnostics, print out the control dictionary
    if verbose:
        print(now(), 'main: control_d =')
        # Core_Util.tree_print() gives nice formatted printing for dictionaries and lists
        Core_Util.tree_print(control_d)

    # Set the pid file as a variable name.
    pid_file = control_d['pid_file']

    # Get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)

    # Now master_d has all the information realted to a citcom run, and the geoframe defaults.

    # We could print out all the citcom run data file info with this code:
    #
    #if verbose:
    #    print( now(), 'main: master_d =')
    #    Core_Util.tree_print( master_d )
    #
    # but this usually gives tens of thousands of lines of data,
    # showing all the time values, all the level coordinate values, etc.

    # We can define aliases for the most commonly used sub dictionaries:
    geo_d = master_d[
        'geoframe_d']  # holds all the geoframe default settings and paths
    pid_d = master_d['pid_d']  # holds all the .pid file info
    coor_d = master_d['coor_d']  # holds all the coordinate info
    time_d = master_d['time_d']  # holds all the time info

    # Under verbose mode it's good to show the basic defaults, for reference in the script log
    if verbose:
        print(now(), 'main: geo_d =')
        Core_Util.tree_print(geo_d)

    # We also want to establish variables some commonly used data about the citcom run:
    datafile = pid_d['datafile']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    # Now we are ready to set up the basic info common to all subsection maps:

    # Getting Surfce Coordinate Data (lat and lon)
    #
    # Because the surface coordinates are depth-independant, we get this information first,
    # before any looping over sections.

    # First, check if an optional 'coord_dir' entry was in the control file,
    # and then check for CitcomS *.coord.* files in that user-specified coord_dir.
    # In this case you should manually copy all the processor *.coord.* files to a single directory.
    try:
        if 'coord_dir' in control_d:
            coord_file_format = control_d[
                'coord_dir'] + '/%(datafile)s.coord.#' % vars()
            coord = Core_Citcom.read_citcom_surface_coor(
                master_d['pid_d'], coord_file_format)
    except FileNotFoundError:
        print(now(), 'WARNING: *.coord.* files not found in:',
              control_d['coord_dir'])
    # Second, check for CitcomS *.coord.* files in data/%RANK dirs
    try:
        coord_file_format = 'data/#/' + datafile + '.coord.#'
        coord = Core_Citcom.read_citcom_surface_coor(master_d['pid_d'],
                                                     coord_file_format)
    # If the coordinate files are missing we cannot continue:
    except FileNotFoundError:
        print(coord_file_format)
        print(now(), 'ERROR: cannot find coordinate files in',
              control_d['coord_dir'], 'or data/%RANK')

    # Now flatten the coordinate data since we don't care about specific cap numbers for a given depth
    coord = Core_Util.flatten_nested_structure(coord)

    # extract lon and lat data as lists from tuples
    lon = [line[0] for line in coord]
    lat = [line[1] for line in coord]

    # Now that we have the coordinate data for all levels and all times,
    # we can process each sub section of the control file.
    #
    # The control_d dictionary has a special top level entry with key of '_SECTIONS_'.
    # The value is a list of all the subsection names.  We can iterate this way:
    # Loop over each subsection in the control_d dictionary
    for section_name in control_d['_SECTIONS_']:

        print(now())
        print('Processing subsection:', section_name)

        # get the subsection dictionary
        section_d = control_d[section_name]

        # Get the specific time and level field data to map:
        time = section_d['time']
        level = section_d['level']
        field_name = section_d['field']

        # We can use time_d to get equivalent times:
        time_triple = Core_Citcom.get_time_triple_from_timestep(
            time_d['triples'], float(time))
        age = time_triple[1]  # get the equivalent reconstuction age in Ma
        runtime = time_triple[2]  # get the equivalent model runtime in Myr
        print('time    =', time, 'steps')
        print('age     =', age, 'Ma')
        print('runtime =', runtime, 'Myr')

        # We can use the coor_d to find equivalent values for the level to map:
        radius = coor_d['radius'][
            level]  # The non-dimentional value of the radius for this level
        radius_km = coor_d['radius_km'][level]  # the equivalent radius in km
        depth = coor_d['depth'][
            level]  # The non-dimentional value of the depth for this level
        depth_km = coor_d['depth_km'][level]  # the equivalent depth in km
        print('level =', level)
        print('non-dim radius =', radius, '; radius in km =', radius_km)
        print('non-dim depth =', depth, '; depth in km =', depth_km)

        #
        # Now we will extract data for this specific time, level and field:
        #

        # Core_Citcom module has the standard mapping from field_name to the specific info
        # for file name component, and column number, for each field.

        # get the file name component for this field
        file_name_component = Core_Citcom.field_to_file_map[field_name]['file']

        # get that column number for this field
        field_column = Core_Citcom.field_to_file_map[field_name]['column']

        # Create the total filename to read
        file_format = 'data/#/' + datafile + '.' + file_name_component + '.#.' + str(
            time)

        # For data read in by proc, e.g., velo, visc, comp_nd use this form:
        file_data = Core_Citcom.read_proc_files_to_cap_list(
            master_d['pid_d'], file_format)

        # the next few diagnostic messages show how the data is reduced with each step
        print(now(), 'main: len(file_data) = ', len(file_data))

        # flatten the field data since we don't care about specific cap numbers for a single level
        file_data = Core_Util.flatten_nested_structure(file_data)

        # get the specific data column for this field_name
        field_data = [line[field_column] for line in file_data]
        print(now(), 'main: len(field_data) = ', len(field_data))

        # slice out the values for this level
        field_slice = field_data[level::nodez]
        print(now(), 'main: len(field_slice) = ', len(field_slice))

        #
        # Creating an .xyz file
        #

        # Some fields will require scaling: use the NumPy functions on slices:
        if field_name == 'visc': field_slice = np.log10(field_slice)

        # Assemble the coordinate data with the field data to create a .xyz file
        xyz_data = np.column_stack((lon, lat, field_slice))
        # create the xyz file name from other filename components:
        xyz_filename = datafile + '.' + field_name + '.' + str(
            depth_km) + 'km.' + str(time) + '.xyz'
        # write the file
        np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')
        print(now(), 'main: write: xyz_filename =', xyz_filename)

        #
        # Creating a grid file
        #

        # Set the region based on the the model run:
        if nproc_surf == 12:
            R = 'g'
        else:
            R = str(pid_d['lon_min']) + '/' + str(pid_d['lon_max']) + '/'
            R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

        # Set some defaults for the gridding process
        blockmedian_I = '0.1'
        surface_I = '0.1'

        # use Core_GMT.callgmt() to create the median file
        median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'
        args = xyz_filename + ' -I' + str(blockmedian_I) + ' -R' + R
        Core_GMT.callgmt('blockmedian', args, '', '>', median_xyz_filename)

        # Use Core_GMT to create the grid with required arguments ...
        args = median_xyz_filename + ' -I' + str(surface_I) + ' -R' + R
        # ... and with any optional argumetns passed in via the control file sub section_d
        if 'Ll' in section_d:
            cmd += ' -Ll' + str(control_d[s]['Ll'])
        if 'Lu' in section_d:
            cmd += ' -Lu' + str(control_d[s]['Lu'])
        if 'T' in section_d:
            cmd += ' -T' + str(control_d[s]['T'])
        grid_filename = datafile + '.' + field_name + '.' + str(
            depth_km) + 'km.' + str(time) + '.grd'
        Core_GMT.callgmt('surface', args, '', '', ' -G' + grid_filename)

        #
        # Creating the Map
        #

        # Get the GPlates exported line data for this age
        # be sure to truncate age to nearest int and make a string for the file name
        age = str(int(age))

        # Get the base path for gplates line data, as set in the geo framework defaults file:
        xy_path = master_d['geoframe_d']['gplates_line_dir']

        # get the full path to the line data:
        xy_filename = xy_path + '/' + 'topology_platepolygons_' + age + '.00Ma.xy'
        print(now(), 'main: xy_filename = ', xy_filename)

        # make a plot of the grid 'TEST.ps'
        Core_GMT.plot_grid(grid_filename, xy_filename, R)

    # end of loop over sub section dictionary

    # exit the script
    sys.exit()
Exemplo n.º 18
0
def main():
    '''main workflow of the script'''

    # report the start time and the name of the script
    print(now(), 'create_citcom_case.py')

    # get the case name from user cmd line
    case_name = str(sys.argv[1])
    print(now(), 'Creating GDF directory structure for case:', case_name)

    # create the top level case dir
    Core_Util.make_dir(case_name)

    # set some case level file names
    case_gdf_conf = case_name + '/' + Core_Util.gdf_conf
    pdir_gdf_conf = '..' + '/' + Core_Util.gdf_conf

    # copy the system file to the main case directory
    if not os.path.exists(case_gdf_conf):
        cmd = 'cp ' + Core_Util.sys_gdf_conf + ' ' + case_gdf_conf
        print(now(), cmd)
        subprocess.call(cmd, shell=True)
    else:
        print(now(),
              'Local GDF .conf file found; NOT copying system .conf file')

    # Make sub dirs for case-based Reconstruction/ kinematic and surface data
    Core_Util.make_dir(case_name + '/Reconstruction')

    # Create specific sub-dirs for pre- and post- processing
    in_list = ['Coord', 'ICHist', 'Tracers', 'Topologies', 'Velocity']
    for d in in_list:
        Core_Util.make_dir(case_name + '/Reconstruction/' + d)

    # NOTE: A few similar Reconstruction/ type system-wide input directories
    # are directly referenced by specific entries in the GFD .conf file.
    # (types of age grids, coastlines, velocity, etc.)
    #
    # Many GDF pre- and post- scripts use the current working directory
    # copy (or link) of the .conf file to control processing steps
    # and locate base file paths.
    #
    # Be sure to sychronize your .conf for case- and run- level work.

    # Check cmd line args to create multiple runs
    n_runs = 1
    if '-r' in sys.argv:
        n_runs = int(sys.argv[sys.argv.index('-r') + 1])

    # Create specific run directories
    for i in list(range(n_runs)):
        # make a string and pad with zeros
        if n_runs < 10: d = '%01d'
        elif n_runs < 100: d = '%02d'
        else: d = '%04d'
        r = d % i
        # make the dir
        Core_Util.make_dir(case_name + '/Run-' + r)
        # link the case-level .conf file
        Core_Util.make_link(pdir_gdf_conf,
                            case_name + '/Run-' + r + '/' + Core_Util.gdf_conf)
def water_loaded_topography( pid_d, air_grid ):

    '''build water_loaded topography grid.'''

    # grdinfo 
    zmin, zmax = grdinfo( air_grid )

    # volume of the oceans based on etopo1
    # In Flament et al. (2008) I had 1.36e18+/-2e17 m^3
    volume_true = 1.33702586599e+18 # units of m^3

    # loading factor is a constant that quantifies the ratio of
    # water-loaded topography to air-loaded topography for a given
    # (radial) normal stress
    # derived from dh_air = dP / (drho_a * g)
    # where drho_a = 3300 - 0
    # dh_water = dP / (drho_w * g)
    # where drho_w = 3300 - 1025 (1025 approx density of seawater)
    # in Flament et al. (2014) I used rho_m=3340 kg m^-3 and rho_water=1030 kg m^-3 
    ## so that Loading_fact=3340/(3340-1030)
    loading_factor = pid_d['rho_mantle']
    loading_factor /=  pid_d['rho_mantle'] - pid_d['rho_water']
    if verbose: print( now(), 'loading_factor=', loading_factor )

    # volume of the oceans with water removed
    # all calculations are then computed in the 'air-loaded' regime
    volume_target = volume_true/loading_factor

    # find sea-level (contour) using van Wijngaarden-Deker-Brent method
    # see scipy documentation online
    # it's a method for finding the root of a function in a given interval
    contour = scipy.optimize.brentq( volume_error, zmin+1,
        zmax, args=( zmin, air_grid, volume_target))

    if verbose:
        print( now(), 'found solution:')
        print( now(), 'contour=', contour )
        volume = volume_of_basin( contour, zmin, air_grid )
        print( now(), 'volume=', volume )
        print( now(), 'volume_target=', volume_target )
        rel_err = np.abs( (volume_target-volume)/volume_target)
        print( now(), 'relative_err=', rel_err )

    # remove list for clean up
    remove_l = []

    # subtract contour from original, air-loaded topography
    prefix = air_grid.rstrip('al-dim.nc')
    prefix += '-wl-dim'
    output_grd1 = prefix + '1.nc'
    remove_l.append( output_grd1 )
    arg = air_grid + ' ' + str(contour) + ' SUB' 
    callgmt( 'grdmath', arg, '', '=', output_grd1 )

    # remove positive values (that should be air-loaded) from that grid
    output_grd2 = prefix + '2.nc'
    remove_l.append( output_grd2 )
    cmd = output_grd1 + ' -Sa0/NaN'
    callgmt( 'grdclip', cmd, '', '', '-G' + output_grd2 )

    # water loading of the negative values (oceans)
    output_grd3 = prefix + '3.nc'
    remove_l.append( output_grd3 )
    arg = output_grd2 + ' ' + str(loading_factor) + ' MUL'
    callgmt( 'grdmath', arg, '', '=', output_grd3 )

    # stitching air-loaded continents and water-loaded oceans together
    output_grd4 = prefix + '.nc'
    arg = output_grd3 + ' ' + output_grd1 + ' AND'
    callgmt( 'grdmath', arg, '', '=', output_grd4 )

    Core_Util.remove_files( remove_l )

    print( 'Done!' )
Exemplo n.º 20
0
def make_annulus_xyz(dict):

    # XXX BROKEN - TO FIX FOR PYTHON 3.0 AND NEW WORK FLOW

    if verbose: print now(), 'make_annulus_xyz:'
    # annulus parameters
    if dict['model'].startswith(modelpfx): arg = dict['model'][:2]
    else: arg = dict['model']
    dict.update(annulus_parameters(dict)[arg])

    outxyz = annpfx(dict) + '.xyz'
    outfile = open(outxyz, 'w')

    Core_Util_djb.get_great_circle_proj(dict)
    dict['G'] = '0.25'

    dict['L'] = dict.get('L_GMT', '0/360')
    dict['xy'] = Core_GMT_djb.project(dict)
    if dict.get('A'): del dict['A']
    if dict.get('E'): del dict['E']
    del dict['G']
    del dict['L']

    depth_list = dict['depth_list']
    grid_pfx = dict['grid']

    for depth in depth_list:
        lradius = float(dict.get('radius', 6371000.0)) * 1E-3 - depth
        dict['grid'] = grid_pfx + '%(depth)s' % vars()
        # append age for mcm model grid convention
        if arg in modelpfx:
            age = dict.get('age', 0)
            dict['grid'] += '.%(age)s.grd' % vars()
        else:
            dict['grid'] += '.grd'

        track_file = Core_GMT_djb.grdtrack(dict)

        # process track file
        infile = open(track_file, 'r')
        lines = infile.readlines()
        infile.close()
        for line in lines:
            cols = line.split('\t')
            dist = cols[2]
            val = cols[3]  # includes \n
            lineout = '%(dist)s %(lradius)s %(val)s' % vars()
            outfile.write(lineout)

    outfile.close()

    # if cross-section
    if dict.get('L_GMT') == 'w':
        # user-defined region
        if dict.get('R'): dist = float(dict['R'].split('/')[1])
        else: dist = float(dist)
        w = 0.5 * dist  # aka polar angular offset
        # necessary to ensure multiples are not appended to dict['J']
        if not dict.get('pao'):
            dict['pao'] = w
            dict['J'] += '/%(w)sz' % vars()

    return outxyz
Exemplo n.º 21
0
def create_restart_run_cfg(control_d, master_run_cfg_d, rs_replace_d, rs_dir,
                           rs_inp_cfg_suffix, age, timestep):
    '''return a copy of master_run_cfg_dict with adjustments as set in Core_Citcom and control_d'''

    # get a copy of the restart type
    rs_type = control_d['restart_type']

    # make a local copy of the master run input cfg
    restart_run_cfg_d = {}
    restart_run_cfg_d = copy.deepcopy(master_run_cfg_d)

    # Make changes to local restart_run_cfg_d based upon control_d and rs_replace_d

    # Loop over all the params in rs_replace_d ; these are the values to update for restart
    for p in sorted(rs_replace_d):

        # Get the replacement value
        val = rs_replace_d[p]

        # Get the subsection name and param name
        strings = p.split('.')
        section_name = '.'.join(strings[0:-1])
        param_name = strings[-1]
        if verbose:
            print(now(), 'create_restart_run_cfg: param = ', p,
                  '; section_name = ', section_name, '; param_name =',
                  param_name, '; val =', val)

        # make sure this section name exists in the cfg dictionary, just in case
        #if not section_name in restart_run_cfg_d:
        #    print( now(), 'WARNING: section_name', section_name, ' not found in orginal cfg.  Adding entry to new restart cfg.')
        #    restart_run_cfg_d['_SECTIONS_'].append( section_name )
        #    restart_run_cfg_d[section_name] = {}
        #    restart_run_cfg_d[section_name]['_SECTION_NAME_'] = section_name
        #    restart_run_cfg_d[section_name][param_name] = None

        # Check if this value is to be deleted
        if val == 'DELETE':
            if param_name in restart_run_cfg_d:
                del restart_run_cfg_d[param_name]

        # check if this parameter is to be commentd out in the new cfg
        elif val == 'COMMENT':

            # double check this param has a orginal value in the restart_run_cfg_d
            if not param_name in restart_run_cfg_d[section_name]:
                continue  # the parm name was not in the master run cfg ; skip it

            # else get master run cfg value
            master_run_val = restart_run_cfg_d[section_name][param_name]
            # remove original param name
            del restart_run_cfg_d[section_name][param_name]
            # add new value and commented out
            restart_run_cfg_d[section_name]['# ' + param_name] = master_run_val

        elif val == 'RS_TIMESTEP':
            restart_run_cfg_d[param_name] = timestep
        elif val == 'RS_TIMESTEP+2':
            restart_run_cfg_d[param_name] = timestep + 2

        else:
            # this is a regular value, update restart_run_cfg_d
            restart_run_cfg_d[param_name] = val

        # Double check if this param is set in the control cfg
        if section_name + '.' + param_name in control_d:
            val = control_d[section_name + '.' + param_name]
            restart_run_cfg_d[param_name] = val

    # Now set some specific values based upon restart type, age and timestep

    # Get a copy of the master run datafile value
    if 'CitcomS.solver' in restart_run_cfg_d and all(
            x in restart_run_cfg_d['CitcomS.solver']
            for x in ['datafile', 'datadir']):
        master_run_datafile = restart_run_cfg_d['CitcomS.solver']['datafile']
        master_run_datadir = restart_run_cfg_d['CitcomS.solver']['datadir']
    elif all(x in restart_run_cfg_d for x in ['datafile', 'datadir']):
        master_run_datafile = restart_run_cfg_d['datafile']
        master_run_datadir = restart_run_cfg_d['datadir']
    else:
        sys.exit('unable to find data files!!')
    # Set the new values for datafile, datafile_old, datadir, datadir_old

    # FIXME: are the new values for these 4 params ^ ^ ^ ^
    # set correctly in the two cases below?

    if rs_type == 'total_topography':

        restart_run_cfg_d['CitcomS.solver']['datafile'] = master_run_datafile
        restart_run_cfg_d['CitcomS.solver']['datadir'] = 'Age' + str(
            age) + 'Ma'

        # FIXME: DJB, TY, and NF to double check this change from above:
        if os.path.exists(master_run_cfg_d['datadir'] + '/0/'):
            restart_run_cfg_d['CitcomS.solver'][
                'datafile_old'] = master_run_datafile
            restart_run_cfg_d['CitcomS.solver'][
                'datadir_old'] = os.path.normpath(
                    os.path.join('../',
                                 master_run_cfg_d['datadir'])) + '/%RANK'
        elif os.path.exists(master_run_cfg_d['datadir'] + '/'):
            restart_run_cfg_d['CitcomS.solver'][
                'datafile_old'] = master_run_datafile
            restart_run_cfg_d['CitcomS.solver'][
                'datadir_old'] = os.path.normpath(
                    os.path.join('../', master_run_cfg_d['datadir']))
        else:
            #restart_run_cfg_d['CitcomS.solver']['datafile_old'] = master_run_datafile
            #restart_run_cfg_d['CitcomS.solver']['datadir_old'] = '../data/%RANK'
            restart_run_cfg_d['CitcomS.solver'][
                'datafile_old'] = master_run_datafile
            restart_run_cfg_d['CitcomS.solver'][
                'datadir_old'] = '../data/%RANK'

    elif rs_type == 'dynamic_topography':

        restart_run_cfg_d['datafile'] = master_run_datafile
        restart_run_cfg_d['datadir'] = './Age' + str(age) + 'Ma/%RANK'

        # FIXME: DJB, TY, and NF to double check this change:
        # NOTE: these two values are derrived in create_no_lith_temp()
        #restart_run_cfg_d['CitcomS.solver']['datafile_old'] = master_run_datafile
        #restart_run_cfg_d['CitcomS.solver']['datadir_old'] = '../%RANK'
        restart_run_cfg_d['datafile_old'] = control_d['rs_datafile']
        restart_run_cfg_d['datadir_old'] = os.path.normpath(
            control_d['rs_datadir'])

        restart_run_cfg_d['start_age'] = str(age)
    else:
        print(
            now(),
            'ERROR: unknown restart type.  Value must be either "dynamic_topography" or "total_topography"'
        )

    # coor_file needs special tackle. maybe can move into the loop above?
    if not os.path.isabs(restart_run_cfg_d['coor_file']):
        tmp1 = os.path.normpath(
            os.path.join('../', restart_run_cfg_d['coor_file']))
        restart_run_cfg_d['coor_file'] = tmp1

    # Write out the new input cfg dictionary
    cfg_name = rs_dir + '/' + master_run_datafile + '_' + rs_inp_cfg_suffix + '.cfg'
    Core_Util.write_cfg_dictionary(restart_run_cfg_d, cfg_name, True)

    # And return it
    return restart_run_cfg_d
Exemplo n.º 22
0
def main():
    print(now(), 'copy_citcom_model_from_cluster.py')

    # Mark - these should probably be user inputs
    # You could also allow the user to specify the usual types of
    # time strings like we have for grid_maker.py  Therefore, the user
    # could use timesteps, run times, or ages in the various comma-sep
    # lists or start/end/range formats

    # for testing I was running this script on citerra in this directory:
    # /home/danb/lat/lat01

    field_list = ['velo', 'visc']  # list to loop over
    time_list = ['0', '290']  # list to loop over
    # local processing directory that can be 'seen' from the cluster
    # e.g., I can see this from citerra and is just a test location
    rootdir = '/home/danb/beno/test_copy/model'

    pid_file = 'pid14289.cfg'

    # pid_file should also be an input argument
    # I'm assuming the script will always be run in the directory of
    # the CitcomS model on the cluster where the data was generated

    # parsing the pid file is helpful because it gives us the datafile
    # etc.
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    pid_d = master_d['pid_d']

    # make data directory and determine structure
    datafile = pid_d['datafile']
    datadir = pid_d['datadir']

    if datadir.endswith('%RANK'):
        print('data stored by processor')
        datadir = datadir[:-5]  # strip '%RANK'
        print(datadir)
        PROC = True
    else:
        PROC = False  # not sure if this will be necessary, but
        # easy to include in this development draft

    # copy top level files
    cmd = 'cp %(pid_file)s %(rootdir)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stderr.txt %(rootdir)s/stderr.txt' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp stdout.txt %(rootdir)s/stdout.txt' % vars()
    subprocess.call(cmd, shell=True)
    # copy user-created coordinate file if it exists
    coor_file = pid_d['coor_file']
    cmd = 'cp %(coor_file)s %(rootdir)s/%(coor_file)s' % vars()
    subprocess.call(cmd, shell=True)
    cmd = 'cp %(datafile)s.cfg %(rootdir)s/%(datafile)s.cfg' % vars()
    subprocess.call(cmd, shell=True)

    datadir_abs = rootdir + '/' + datadir

    # make the root (if doesn't exist) and data directory
    Core_Util.make_dir(datadir_abs)

    # copy data
    if PROC:
        for proc in range(pid_d['total_proc']):
            datadir_proc = datadir_abs + str(proc) + '/'
            Core_Util.make_dir(datadir_proc)
            for field in field_list:
                # always need coordinate file
                coord_name = str(proc) + '/' + datafile + '.coord.' + str(proc)
                filename1 = datadir + coord_name
                filename2 = datadir_abs + coord_name
                cmd = 'cp %(filename1)s %(filename2)s' % vars()
                print(cmd)
                # Mark - this command actually calls the copy command
                subprocess.call(cmd, shell=True)
                for time in time_list:
                    # create filename
                    file_name = str(proc) + '/' + datafile + '.' + field + '.'
                    file_name += str(proc) + '.' + str(time)
                    filename1 = datadir + file_name
                    filename2 = datadir_abs + file_name
                    cmd = 'cp %(filename1)s %(filename2)s' % vars()
                    print(cmd)
                    #subprocess.call( cmd, shell=True )

        # now copy essential files from 0/ directory
        zero_proc_dir = datadir_abs + '0/' + datafile
        for suffix in ['.time', '.log']:
            file_name = '0/' + datafile + suffix
            filename1 = datadir + file_name
            filename2 = datadir_abs + file_name
            cmd = 'cp %(filename1)s %(filename2)s' % vars()
            print(cmd)
            subprocess.call(cmd, shell=True)

    else:

        # non-processor (%RANK) branch
        # all files are stored in data
        # although we could code this up here, I think having
        # all the files in one directory will break grid_maker.py
        # at the moment.
        pass
Exemplo n.º 23
0
def main():
    print(now(), 'grid_maker_gplates.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1], False, False)
    Core_Util.tree_print(control_d)

    time_spec_d = Core_Citcom.get_time_spec_dictionary(control_d['time_spec'])
    print(now(), 'grid_maker_gplates.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # Get the coordinate data from the 0 Ma files
    print(now(), 'grid_maker_gplates.py: get coordinate data from .xy files:')
    lon = []
    lat = []
    for i in range(control_d['nproc_surf']):
        # get the lat lon from the .xy file
        vel_xy_filename = control_d['velocity_prefix'] + '0.%(i)s.xy' % vars()
        print(now(), 'grid_maker_gplates.py: vel_xy_filename = ',
              vel_xy_filename)
        i_lat, i_lon = np.loadtxt(vel_xy_filename, usecols=(0, 1), unpack=True)
        lat.append(i_lat)
        lon.append(i_lon)

    lon = Core_Util.flatten_nested_structure(lon)
    lat = Core_Util.flatten_nested_structure(lat)

    print(now(), 'grid_maker_gplates.py: len(lon) = ', len(lon))
    print(now(), 'grid_maker_gplates.py: len(lat) = ', len(lat))

    #
    # Main looping, first over times, then sections, then levels
    #

    # Variables that will be updated each loop:
    # time will be a zero padded string value used for filenames and reporting
    # depth will be a zero padded string value used for filenames and reporting

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'grid_maker_gplates.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for tt, time in enumerate(time_spec_d['time_list']):

        print(now(), 'grid_maker_gplates.py: Processing time = ', time)

        # empty file_data
        file_data = []

        # cache for the file_format
        file_format_cache = ''

        # Loop over sections (fields)
        for ss, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?
            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            print(now(), 'grid_maker_gplates.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            print('')
            print(now(), 'grid_maker_gplates.py: Processing: field =',
                  field_name)

            # reset region to use -Rg for gplates
            grid_R = 'g'

            if 'shift_lon' in control_d:
                print(
                    now(),
                    'grid_maker_gplates.py: grid_R set to to "d" : -180/+180/-90/90'
                )
                grid_R = 'd'
            else:
                print(
                    now(),
                    'grid_maker_gplates.py: grid_R set to to "g" : 0/360/-90/90'
                )

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']
            print(now(), 'grid_maker_gplates.py: file_name_component = ',
                  file_name_component)

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']
            print(now(), 'grid_maker_gplates.py: field_column = ',
                  field_column)

            # remove potential zero padding from age values
            time = time.replace('Ma', '')
            # process data from GPlates
            file_format = control_d['velocity_prefix'] + '%(time)s.#' % vars()

            print(now(), 'grid_maker_gplates.py: file_format = ', file_format)

            # read data in by cap
            file_data = Core_Citcom.read_cap_files_to_cap_list(
                control_d, file_format)

            # flatten data since we don't care about specific cap numbers for the loop over levels/depths
            file_data = Core_Util.flatten_nested_structure(file_data)
            print(now(), 'grid_maker_gplates.py: len(file_data) = ',
                  len(file_data))

            # Get the specific column for this field_name
            field_data = np.array([line[field_column] for line in file_data])

            print(now(), 'grid_maker_gplates.py: type(field_data) = ',
                  type(field_data))
            print(now(), 'grid_maker_gplates.py:  len(field_data) = ',
                  len(field_data))
            print(now())

            # check for gplates_vmag
            if field_name == 'gplates_vmag':
                # read the vy data from col 1
                field_data_vy = [line[1] for line in file_data]
                # compute the magnitude
                vx_a = np.array(field_data)
                vy_a = np.array(field_data_vy)
                vmag_a = np.hypot(vx_a, vy_a)
                # convert back to list
                field_data = vmag_a.tolist()

            print(
                now(),
                '------------------------------------------------------------------------------'
            )
            print(now(), 'grid_maker_gplates.py: tt,ss = ', tt, ',', ss, ';')
            print(now(), 'grid_maker_gplates.py: summary for', s, ': time =',
                  time, '; field_name =', field_name)
            print(
                now(),
                '------------------------------------------------------------------------------'
            )

            depth = 0
            field_slice = field_data
            xyz_filename = field_name + '-' + str(time) + '-' + str(
                depth) + '.xyz'

            print(now(), 'grid_maker_gplates.py: xyz_filename =', xyz_filename)

            print(now(), 'grid_maker_gplates.py: type(field_slice) = ',
                  type(field_slice))
            print(now(), 'grid_maker_gplates.py:  len(field_slice) = ',
                  len(field_slice))
            print(now())

            # create the xyz data
            xyz_data = np.column_stack((lon, lat, field_slice))
            np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')

            # create the median file
            median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'

            blockmedian_I = control_d[s].get('blockmedian_I', '0.5')
            cmd = xyz_filename + ' -I' + str(blockmedian_I) + ' -R' + grid_R

            Core_GMT.callgmt('blockmedian', cmd, '', '>', median_xyz_filename)

            # get a T value for median file
            if not 'Ll' in control_d[s] or not 'Lu' in control_d[s]:
                T = Core_GMT.get_T_from_minmax(median_xyz_filename)
            else:
                dt = (control_d[s]['Lu'] - control_d[s]['Ll']) / 10
                T = '-T' + str(control_d[s]['Ll']) + '/'
                T += str(control_d[s]['Lu']) + '/' + str(dt)

            print(now(), 'grid_maker_gplates.py: T =', T)

            # create the grid
            grid_filename = xyz_filename.rstrip('xyz') + 'grd'

            surface_I = control_d[s].get('surface_I', '0.25')
            cmd = median_xyz_filename + ' -I' + str(surface_I) + ' -R' + grid_R

            if 'Ll' in control_d[s]:
                cmd += ' -Ll' + str(control_d[s]['Ll'])
            if 'Lu' in control_d[s]:
                cmd += ' -Lu' + str(control_d[s]['Lu'])
            if 'T' in control_d[s]:
                cmd += ' -T' + str(control_d[s]['T'])

            #opt_a =
            Core_GMT.callgmt('surface', cmd, '', '', ' -G' + grid_filename)

            # label the variables

            # −Dxname/yname/zname/scale/offset/title/remark
            cmd = grid_filename + ' -D/=/=/' + str(field_name) + '/=/=/' + str(
                field_name) + '/' + str(field_name)
            Core_GMT.callgmt('grdedit', cmd, '', '', '')

            # Assoicate this grid with GPlates exported line data in .xy format:
            # compute age value
            age_float = 0.0
            if field_name.startswith('gplates_'):
                # time_list value for gplates data is set with age values
                age_float = float(time)

            # truncate to nearest int and make a string for the gplates .xy file name
            geoframe_d = Core_Util.parse_geodynamic_framework_defaults()

            if age_float < 0: age_float = 0.0
            xy_path = geoframe_d['gplates_line_dir']
            xy_filename = xy_path + '/' + 'topology_platepolygons_' + str(
                int(age_float)) + '.00Ma.xy'
            print(now(), 'grid_maker_gplates.py: xy_filename = ', xy_filename)

            # Make a plot of the grids
            J = 'X5/3'  #'R0/6'
            #J = 'M5/3'
            if 'J' in control_d[s]:
                J = control_d[s]['J']

            C = 'polar'
            if 'C' in control_d[s]:
                C = control_d[s]['C']

            # gplates
            Core_GMT.plot_grid(grid_filename, xy_filename, grid_R,
                               '-T-10/10/1')
            # end of plotting

            # Optional step to transform grid to plate frame
            if 'make_plate_frame_grid' in control_d:
                cmd = 'frame_change_pygplates.py %(time)s %(grid_filename)s %(grid_R)s' % vars(
                )
                print(now(), 'grid_maker_gplates.py: cmd =', cmd)
                os.system(cmd)
                filename = grid_filename.replace('.grd', '-plateframe.grd')
                Core_GMT.plot_grid(filename, xy_filename, grid_R, '-T-10/10/1')
Exemplo n.º 24
0
def main():
    '''Main sequence of script actions.'''

    print( now(), 'Create_History.py:')
    print( now(), 'main:')

    # read settings from control file
    config_filename = sys.argv[1]
    control_d = Core_Util.parse_configuration_file( config_filename )
    
    # read job settings
    control_d['serial'] = isSerial(control_d)

    age_start = max( control_d['age_start'], control_d['age_end'] )
    age_end = min( control_d['age_end'], control_d['age_start'] )
    age_loop = list( range( age_end, age_start+1 ) )
    age_loop.reverse()
    

    IC = 1
    job = control_d['job']

    # smp and serial branch
    if (job=='smp'):
        serial = control_d['serial']
        if(serial):
            for age in age_loop:
                cmd  = 'make_history_for_age.py '
                cmd += '%(config_filename)s %(age)d %(IC)s' % vars()
                if verbose: print( now(), cmd )
                subprocess.call( cmd, shell=True )
                IC = 0
            #end for
            sys.exit(0)
        else:
            cpuCount = control_d['nproc'];
            if(cpuCount==-1): cpuCount = int(multiprocessing.cpu_count());
            else: cpuCount = min(cpuCount, int(multiprocessing.cpu_count()));
            
            div, mod = divmod(len(age_loop), cpuCount)
            
            taskList = []
            for i in range(0, cpuCount): taskList.append([])

            count = 0;
            for i in range(0, cpuCount):
                taskList[i] = age_loop[count:(count+div)];
                count = count + div
            #end for

            for i in range(0, mod):
                taskList[i].append(age_loop[count])
                count = count + 1
            #end for

            for i in range(0, cpuCount):
                #print (taskList[i])
                thread = Thread(target = taskThreadFunction, \
                                args = (config_filename, taskList[i], age_start))
                thread.start()
            #end for
            
            for i in range(0, cpuCount):
                thread.join()
            #end for       
        #end if
    # parallel branch
    else:

        # total number of ages to create (inclusive)
        # and therefore number of processors to use
        # this is req for PBS submission script
        control_d['nodes'] = len(age_loop)

        batchfile = 'commands.batch'
        file = open( batchfile, 'w')

        cwd = os.getcwd()

        # make directories for files of each age
        for age in age_loop:
            cmd = 'mkdir %(age)s' % vars()
            if verbose: print( now(), cmd)
            subprocess.call( cmd, shell=True )

            if (control_d['job']=='cluster'):
                # commands for batch file
                line = 'cp geodynamic_framework_defaults.conf %(cwd)s/%(age)s; ' % vars()
                line+= 'cd %(cwd)s/%(age)s; ' % vars()
                line+= 'source ~/.bash_profile; '
                line+= 'module load python; '
                line+= 'which python; '
                line+= 'module load gmt; '
                line+= 'make_history_for_age.py ' % vars()
                line+= '%(cwd)s/%(config_filename)s %(age)s ' % vars()
                line+= '%(IC)s\n' % vars()
                file.write( line )
                IC = 0

            if (control_d['job']=='raijin'):
                # commands for batch file
                line = 'cp geodynamic_framework_defaults.conf %(cwd)s/%(age)s; ' % vars()
                line+= 'cd %(cwd)s/%(age)s; ' % vars()
                line+= 'source ~/.profile; '
                line+= 'module load python3/3.3.0; '
                line+= 'module load python3/3.3.0-matplotlib; '
                line+= 'module load gmt/4.5.11; '
                line+= 'make_history_for_age.py ' % vars()
                line+= '%(cwd)s/%(config_filename)s %(age)s ' % vars()
                line+= '%(IC)s\n' % vars()
                file.write( line )
                IC = 0

            if (control_d['job']=='baloo'):
                # commands for batch file
                line = 'cp geodynamic_framework_defaults.conf %(cwd)s/%(age)s; ' % vars()
                line+= 'cd %(cwd)s/%(age)s; ' % vars()
                line+= 'source ~/.cshrc; '
                line+= 'make_history_for_age.py ' % vars()
                line+= '%(cwd)s/%(config_filename)s %(age)s ' % vars()
                line+= '%(IC)s\n' % vars()
                file.write( line )
                IC = 0

        file.close()

        # if cluster job:
        if (control_d['job']=='cluster'):
            make_pbs_sub_script( control_d )
        # if raijin cluster job:
        if (control_d['job']=='raijin'):
            make_raijin_pbs_sub_script( control_d )
        # if baloo cluster job:
        if (control_d['job']=='baloo'):
            make_baloo_pbs_sub_script( control_d )

        # for abigail
        #make_sbatch_sub_script( control_d )

        # submit to qsub
        qsub = control_d['qsub']
        cmd = 'qsub %(qsub)s' % vars()
        if verbose: print( now(), cmd)
        subprocess.call( cmd, shell=True )
Exemplo n.º 25
0
def main():
    print(now(), 'index_citcom.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])
    #Core_Util.tree_print( control_d )

    # set the pid file
    pid_file = control_d['pid_file']

    # get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    coor_d = master_d['coor_d']
    pid_d = master_d['pid_d']

    # Double check for essential data
    if master_d['time_d'] == None:
        print(now())
        print(
            'ERROR: Required file "[CASE_NAME].time:" is missing from this model run.'
        )
        print('       Aborting processing.')
        sys.exit(-1)

    # set up working variables
    datadir = pid_d['datadir']
    datafile = pid_d['datafile']
    startage = pid_d['start_age']
    output_format = pid_d['output_format']

    depth_list = coor_d['depth_km']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    found_depth_list = []

    # Check how to read and parse the time spec:
    read_time_d = True

    # Compute the timesteps to process
    if read_time_d:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'], master_d['time_d'])
    else:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'])
    print(now(), 'index_citcom.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # levels to process
    level_spec_d = Core_Util.get_spec_dictionary(control_d['level_spec'])
    print(now(), 'index_citcom.py: level_spec_d = ')
    Core_Util.tree_print(level_spec_d)

    #
    # Main looping, first over times, then sections, then levels
    #

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'index_citcom.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for T, time in enumerate(time_spec_d['time_list']):
        #print( now(), 'index_citcom.py: Processing time = ', time)

        if 'Ma' in time:
            # strip off units and make a number
            time = float(time.replace('Ma', ''))

            # determine what time steps are available for this age
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_age(
                master_d, 'temp', time)

        else:
            # model time steps
            time = float(time)

            # determine what time steps are available for this timestep
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_timestep(
                master_d, 'temp', time)

        # end of check on time format

        # set variables for subsequent loops
        timestep = found_d['found_timestep']
        runtime_Myr = found_d['found_runtime']
        # convert the found age to an int
        age_Ma = int(np.around(found_d['found_age']))

        print(now(),
              'index_citcom.py: time data: requested value ->found value ')
        print( now(), '  ', \
'age =', found_d['request_age'],      '->', age_Ma, \
'step =', found_d['request_timestep'], '->', timestep, \
'r_tm =', found_d['request_runtime'],  '->', runtime_Myr )

        # empty file_data
        file_data = []

        # Loop over sections (fields)
        for S, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            #print( now(), 'index_citcom.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            #print('')
            #print( now(), 'index_citcom.py: Processing: field =', field_name)

            # set the region
            #if nproc_surf == 12:
            #    grid_R = 'g'
            #    # optionally adjust the lon bounds of the grid to -180/180
            #    if 'shift_lon' in control_d :
            #        print( now(), 'index_citcom.py: grid_R set to to "d" : -180/+180/-90/90')
            #        grid_R = 'd'
            #    else :
            #        print( now(), 'index_citcom.py: grid_R set to to "g" : 0/360/-90/90')
            #else:
            #    grid_R  = str(pid_d['lon_min']) + '/' + str(pid_d['lon_max']) + '/'
            #    grid_R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']

            # report
            #print( now(), 'index_citcom.py: field = ', field_name, '; file_comp =', file_name_component, '; col =', field_column)
            # process data from Citcoms
            file_format = ''

            # check for various data dirs:
            if os.path.exists(datadir + '/0/'):
                file_format = datadir + '/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists(datadir + '/'):
                file_format = datadir + '/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('data'):
                file_patt = './data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('Data'):
                file_patt = './Data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            # report error
            else:
                print(now())
                print('ERROR: Cannot find output data.')
                print('       Skipping this section.')
                print(now(), 'index_citcom.py: file_format = ', file_format)
                continue  # to next section

            print(now(), 'index_citcom.py: file_format = ', file_format)

            #
            # Loop over levels
            #
            for L, level in enumerate(level_spec_d['list']):

                #    print( now(), 'index_citcom.py: Processing level = ', level)

                # ensure level is an int value
                level = int(level)
                depth = int(depth_list[level])
                found_depth_list.append(depth)

                #print( now(), '------------------------------------------------------------------------------')
                print( now(), 'index_citcom.py: ', s, \
': ts =', timestep, \
'; age =', age_Ma, \
#'; runtime_Myr =', runtime_Myr, \
'; level =', level, \
'; depth_km =', depth, \
'; field =', field_name,\
)
                #print( now(), '------------------------------------------------------------------------------')

                # FIXME: is it ok to chanage the default name to have age, rather than timestep?
                xyz_filename = datafile + '-' + field_name + '-' + str(
                    age_Ma) + 'Ma-' + str(depth) + '.xyz'
                #print( now(), 'index_citcom.py: xyz_filename =', xyz_filename)

                #xy_filename = ''
                #xy_path = master_d['geoframe_d']['gplates_line_dir']
                #xy_filename = xy_path + '/' + 'topology_platepolygons_' + age + '.00Ma.xy'
                #print( now(), 'index_citcom.py: xy_filename = ', xy_filename)

                # Make a plot of the grids

                # citcoms

            # end of loop over levels

        # end of loop over sections

    # end of loop over times

    print(now(), 'depth_list = ', depth_list)
    print(now(), 'found_depth_list = ', found_depth_list)
Exemplo n.º 26
0
def make_raijin_pbs_sub_script( control_d ):

    '''Write PBS submission script to a file.'''

    if verbose: print( now(), 'make_raijin_pbs_submission_script:' )

    # get variables
    jobname = control_d.get('jobname','Create_History_Parallel.py')
    nodes = control_d['nodes']
    # 12 hour default walltime if not specified
    walltime = control_d.get('walltime','12:00:00')
    # by default send job information to Nico
    email = control_d.get('email','*****@*****.**')
    # by default 8GB memory
    mem = control_d.get('mem','8')

    text='''#!/bin/bash
#PBS -N %(jobname)s
#PBS -l ncpus=%(nodes)s
#PBS -l mem=%(mem)dGB
#PBS -P q97
#PBS -l walltime=%(walltime)s
#PBS -r y
#PBS -m bae
#PBS -M %(email)s
#PBS -l wd
#PBS -j oe

# Set up job environment:
module load python3/3.3.0
module load python3/3.3.0-matplotlib
module load gmt/4.5.11
module load parallel/20150322

#change the working directory (default is home directory)
echo Working directory is $PBS_O_WORKDIR
cd $PBS_O_WORKDIR

# Write out some information on the job
echo Running on host `hostname`
echo Time is `date`

### Define number of processors
NPROCS=`wc -l < $PBS_NODEFILE`
echo This job has allocated $NPROCS cpus

# Tell me which nodes it is run on
echo " "
echo This jobs runs on the following processors:
echo `cat $PBS_NODEFILE`
echo " "

# 
# Run the parallel job
#

parallel -a commands.batch''' % vars()

    filename = '%(jobname)s.pbs' % vars()
    control_d['qsub'] = filename
    file = open( filename, 'w' )
    file.write( '%(text)s' % vars() )
    file.close()
Exemplo n.º 27
0
def create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                        rs_inp_cfg_suffix, age, timestep):
    '''read master run velo files and modify the temperature using z>some_node '''
    # (6) Read in velo file from master run for closest age (use read_proc_files_to_cap_list() )
    # (7) Modify the temperature using z>some_node to set temperatures to background for models
    #  without the lithosphere
    # (8) write out `new' IC files using write_cap_or_proc_list_to_files()

    lithosphere_depth_DT = control_d['lithosphere_depth_DT']
    lithosphere_temperature_DT = control_d['lithosphere_temperature_DT']

    # Get nodez from depth
    znode = Core_Citcom.get_znode_from_depth(master_run_d['coor_d'],
                                             lithosphere_depth_DT)
    print(now(), 'create_no_lith_temp: lithosphere_depth_DT = ',
          lithosphere_depth_DT, '; znode=', znode)

    # choose the field to process
    field_name = 'temp'

    # get params for the run
    pid_d = master_run_d['pid_d']
    datafile = pid_d['datafile']

    # get the data file name specifics for this field
    file_name_component = Core_Citcom.field_to_file_map[field_name]['file']
    print(now(), 'create_no_lith_temp: file_name_component = ',
          file_name_component)

    # process data from Citcoms
    if os.path.exists(master_run_d['pid_d']['datadir'] + '/0/'):
        file_format = master_run_d['pid_d']['datadir'] + '/#/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'] + '/'):
        file_format = master_run_d['pid_d']['datadir'] + '/' + master_run_d[
            'pid_d']['datafile'] + '.' + file_name_component + '.#.' + str(
                timestep)
    elif os.path.exists(master_run_d['pid_d']['datadir'].replace('%RANK',
                                                                 '0')):
        file_format = master_run_d['pid_d']['datadir'].replace(
            '%RANK', '#') + '/' + master_run_d['pid_d'][
                'datafile'] + '.' + file_name_component + '.#.' + str(timestep)
    else:
        file_format = 'data/#/' + datafile + '.' + file_name_component + '.#.' + str(
            timestep)
    print(now(), 'create_no_lith_temp: create_no_lith_temp: file_format = ',
          file_format)

    # read data by proc, e.g., velo, visc, comp_nd, surf, botm
    data_by_cap = Core_Citcom.read_proc_files_to_cap_list(
        master_run_d['pid_d'], file_format, field_name)

    # find index of all nodes in a cap that have znode > requested_znode
    # first, make array of znode number for a cap
    nodex = pid_d['nodex']
    nodey = pid_d['nodey']
    nodez = pid_d['nodez']

    # znodes for one cap (same for every cap)
    znode_array = np.tile(range(nodez), nodex * nodey)

    # this gives  a mask of all the znodes that we need to correct the temperature for
    mask = np.where(znode_array > znode, True, False)

    # loop over all cap lists
    for nn, cap_list in enumerate(data_by_cap):
        print(now(), 'create_no_lith_temp: working on cap number', nn)
        # convert to numpy array
        cap_array = np.array(cap_list)
        # swap in new temperature values for lithosphere
        # temperature is fourth column
        np.place(cap_array[:, 3], mask, lithosphere_temperature_DT)
        # update master list of data with corrected list
        data_by_cap[nn] = cap_array.tolist()

    # check values have been updated
    #if verbose: print( now(), 'create_no_lith_temp: spot check: data_by_cap[0][0:nodez]', data_by_cap[0][0:nodez])

    # map the data from cap lists to processor lists
    out_data_by_proc = Core_Citcom.get_proc_list_from_cap_list(
        master_run_d['pid_d'], data_by_cap)

    # set up output info
    rs_datafile = datafile + '_restart_' + str(int(np.around(age))) + 'Ma'

    ic_dir = rs_dir + '/ic_dir'
    Core_Util.make_dir(ic_dir)
    out_name = ic_dir + '/' + rs_datafile + '.velo.#.' + str(timestep)
    print(now(), 'create_no_lith_temp: out_name =', out_name)

    # now write out data to processor files (with header, necessary for restart)
    Core_Citcom.write_cap_or_proc_list_to_files(master_run_d['pid_d'],
                                                out_name, (out_data_by_proc, ),
                                                'proc', True)

    # Update control_d with file name patterns
    control_d['rs_datafile'] = rs_datafile
    control_d['rs_datadir'] = './ic_dir/'

    return
Exemplo n.º 28
0
def main():
    print(now(), 'grid_maker.py')

    # get the .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1], False, False)
    Core_Util.tree_print(control_d)

    # set the pid file
    pid_file = control_d['pid_file']

    # get the master dictionary and define aliases
    master_d = Core_Citcom.get_all_pid_data(pid_file)
    coor_d = master_d['coor_d']
    pid_d = master_d['pid_d']

    # Double check for essential data
    if master_d['time_d'] == None:
        print(now())
        print(
            'ERROR: Required file "[CASE_NAME].time:" is missing from this model run.'
        )
        print('       Aborting processing.')
        sys.exit(-1)

    # set up working variables
    # get basic info about the model run
    datadir = pid_d['datadir']
    datafile = pid_d['datafile']
    start_age = pid_d['start_age']
    output_format = pid_d['output_format']

    depth_list = coor_d['depth_km']
    nodez = pid_d['nodez']
    nproc_surf = pid_d['nproc_surf']

    # Check how to read and parse the time spec:
    read_time_d = True

    # Compute the timesteps to process
    if read_time_d:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'], master_d['time_d'])
    else:
        time_spec_d = Core_Citcom.get_time_spec_dictionary(
            control_d['time_spec'])
    print(now(), 'grid_maker.py: time_spec_d = ')
    Core_Util.tree_print(time_spec_d)

    # levels to process
    level_spec_d = Core_Util.get_spec_dictionary(control_d['level_spec'])
    print(now(), 'grid_maker.py: level_spec_d = ')
    Core_Util.tree_print(level_spec_d)

    # Get coordinate data
    lon = []
    lat = []

    # Check for existing coordinate data
    lon_file_cache = '_cache_lon_coords.txt'
    lat_file_cache = '_cache_lat_coords.txt'

    if os.path.exists(lon_file_cache) and os.path.exists(lat_file_cache):
        print(now(), 'grid_maker.py: loadtxt: ', lon_file_cache)
        print(now(), 'grid_maker.py: loadtxt: ', lat_file_cache)
        lon = np.loadtxt(lon_file_cache)
        lat = np.loadtxt(lat_file_cache)
    else:
        # gets lon, lat for one depth because these are depth-independent
        coord_file_format = control_d.get(
            'coord_dir', '') + '/%(datafile)s.coord.#' % vars()
        coord = Core_Citcom.read_citcom_surface_coor(master_d['pid_d'],
                                                     coord_file_format)

        # flatten data since we don't care about specific cap numbers for the loop over depth
        coord = Core_Util.flatten_nested_structure(coord)

        # extract data from tuples and make into numpy array
        lon = [line[0] for line in coord]
        lat = [line[1] for line in coord]

        # save the lat data
        np.savetxt(lon_file_cache, lon, fmt='%f')
        np.savetxt(lat_file_cache, lat, fmt='%f')

    # end of get coords
    print(now(), 'grid_maker.py: len(lon) = ', len(lon))
    print(now(), 'grid_maker.py: len(lat) = ', len(lat))

    #
    # Main looping, first over times, then sections, then levels
    #

    # Variables that will be updated each loop:
    # age_Ma will be a zero padded string value used for filenames and reporting
    # depth will be a zero padded string value used for filenames and reporting

    # Variables to hold data for all grids created
    # grid_list is a list of tuples: (grid_filename, age_Ma)
    grid_list = []

    print(
        now(),
        '========================================================================='
    )
    print(
        now(),
        'grid_maker.py: Main looping, first over times, then sections, then levels'
    )
    print(
        now(),
        '========================================================================='
    )

    # Loop over times
    for tt, time in enumerate(time_spec_d['time_list']):

        print(now(), 'grid_maker.py: Processing time = ', time)

        if 'Ma' in time:

            # strip off units and make a number
            time = float(time.replace('Ma', ''))

            # determine what time steps are available for this age
            # NOTE: 'temp' is requried to set which output files to check
            found_d = Core_Citcom.find_available_timestep_from_age(
                master_d, 'temp', time)
            print(
                now(),
                'grid_maker.py: WARNING: Adjusting times to match available data:'
            )
            print(now(), '  request_age =', found_d['request_age'],
                  '; request_timestep =', found_d['request_timestep'],
                  '; request_runtime =', found_d['request_runtime'])
            print(now(), '  found_age =', found_d['found_age'],
                  '; found_timestep =', found_d['found_timestep'],
                  '; found_runtime =', found_d['found_runtime'])

            # set variables for subsequent loops
            timestep = found_d['found_timestep']
            runtime_Myr = found_d['found_runtime']

            # convert the found age to an int
            age_Ma = int(np.around(found_d['found_age']))

            # make a string and pad with zeros
            age_Ma = '%03d' % age_Ma

        else:

            time = float(time)

            # determine what time steps are available for this timestep
            # NOTE: 'temp' is requried to set which output files to check

            found_d = Core_Citcom.find_available_timestep_from_timestep(
                master_d, 'temp', time)

            print(
                now(),
                'grid_maker.py: WARNING: Adjusting times to match available data:'
            )
            print(now(), '  request_age =', found_d['request_age'],
                  '; request_timestep =', found_d['request_timestep'],
                  '; request_runtime =', found_d['request_runtime'])
            print(now(), '  found_age =', found_d['found_age'],
                  '; found_timestep =', found_d['found_timestep'],
                  '; found_runtime =', found_d['found_runtime'])

            # set variables for subsequent loops
            timestep = found_d['found_timestep']
            runtime_Myr = found_d['found_runtime']

            # convert the found age to an int
            age_Ma = int(np.around(found_d['found_age']))

            # make a string and pad with zeros
            age_Ma = '%03d' % age_Ma

        # report on integer age
        print(now(), '  age_Ma =', age_Ma)

        # empty file_data
        file_data = []

        # cache for the file_format
        file_format_cache = ''

        # Loop over sections (fields)
        for ss, s in enumerate(control_d['_SECTIONS_']):

            # FIXME: this extra indent is probably from when sections loop was inside level loop ?

            print(now(), 'grid_maker.py: Processing section = ', s)

            # check for required parameter 'field'
            if not 'field' in control_d[s]:
                print(
                    'ERROR: Required parameter "field" missing from section.')
                print('       Skipping this section.')
                continue  # to next section

            # get the field name
            field_name = control_d[s]['field']

            # check for compound field
            field_name_req = ''
            if field_name == 'horiz_vmag':
                # save the requested name
                field_name_req = field_name
                # reset to get one component
                field_name = 'vx'

            print('')
            print(now(), 'grid_maker.py: Processing: field =', field_name)

            # set the region
            if nproc_surf == 12:
                grid_R = 'g'
                # optionally adjust the lon bounds of the grid to -180/180
                if 'shift_lon' in control_d:
                    print(
                        now(),
                        'grid_maker.py: grid_R set to to "d" : -180/+180/-90/90'
                    )
                    grid_R = 'd'
                else:
                    print(
                        now(),
                        'grid_maker.py: grid_R set to to "g" : 0/360/-90/90')
            else:
                grid_R = str(pid_d['lon_min']) + '/' + str(
                    pid_d['lon_max']) + '/'
                grid_R += str(pid_d['lat_min']) + '/' + str(pid_d['lat_max'])

            # get the data file name specifics for this field
            file_name_component = Core_Citcom.field_to_file_map[field_name][
                'file']
            print(now(), 'grid_maker.py: file_name_component = ',
                  file_name_component)

            # get the data file column name specifics for this field
            field_column = Core_Citcom.field_to_file_map[field_name]['column']
            print(now(), 'grid_maker.py: field_column = ', field_column)

            # create the total citcoms data filenames to read
            file_format = ''

            # check for various data dirs
            if os.path.exists(datadir + '/0/'):
                print(now(), 'grid_maker.py: path found = ', datadir + '/0/')
                file_format = datadir + '/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists(datadir + '/'):
                print(now(), 'grid_maker.py: path found = ', datadir + '/')
                file_format = datadir + '/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('data'):
                print(now(), 'grid_maker.py: path found = ', 'data')
                file_format = './data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            elif os.path.exists('Data'):
                print(now(), 'grid_maker.py: path found = ', 'Data')
                file_format = './Data/#/' + datafile + '.' + file_name_component + '.#.' + str(
                    timestep)

            # report error
            else:
                print(now())
                print('ERROR: Cannot find output data.')
                print('       Skipping this section.')
                print(now(), 'grid_maker.py: file_format = ', file_format)
                continue  # to next section

            print(now(), 'grid_maker.py: file_format = ', file_format)

            # check if this file data has already been read in
            if not file_format == file_format_cache:

                # read data by proc, e.g., velo, visc, comp_nd, surf, botm
                file_data = Core_Citcom.read_proc_files_to_cap_list(
                    master_d['pid_d'], file_format, field_name)
                # flatten data since we don't care about specific cap numbers for the loop over levels/depths
                file_data = Core_Util.flatten_nested_structure(file_data)
                print(now(), 'grid_maker.py: len(file_data) = ',
                      len(file_data))

                # update cache for next pass in loop over fields
                file_format_cache = file_format

            # Get the specific column for this field_name
            field_data = np.array([line[field_column] for line in file_data])
            print(now(), 'grid_maker.py:  len(field_data) = ', len(field_data))

            # Check for compound field
            if field_name_req == 'horiz_vmag':

                # Get the second component data ('vy')
                field_column = 1
                # read data by proc, e.g., velo, visc, comp_nd, surf, botm
                file_data2 = Core_Citcom.read_proc_files_to_cap_list(
                    master_d['pid_d'], file_format, field_name)
                # flatten data since we don't care about specific cap numbers for the loop over levels/depths
                file_data2 = Core_Util.flatten_nested_structure(file_data2)
                print(now(), 'grid_maker.py: len(file_data2) = ',
                      len(file_data2))
                field_data2 = np.array(
                    [line[field_column] for line in file_data2])
                print(now(), 'grid_maker.py:  len(field_data2) = ',
                      len(field_data))

                # combine the data and rest the main variable
                field_data3 = np.hypot(field_data, field_data2)
                field_data = field_data3

                # put back field name to requested name
                field_name = field_name_req
            # end if check on compound field

            print(now(), 'grid_maker.py:  len(field_data) = ', len(field_data))
            print(now())

            #
            # Loop over levels
            #
            for ll, level in enumerate(level_spec_d['list']):

                print(now(), 'grid_maker.py: Processing level = ', level)

                # ensure level is an int value
                level = int(level)
                depth = int(depth_list[level])
                # pad the depth value
                depth = '%04d' % depth

                print(
                    now(),
                    '------------------------------------------------------------------------------'
                )
                print(now(), 'grid_maker.py: tt,ss,ll = ', tt, ',', ss, ',',
                      ll, ';')
                print(now(), 'grid_maker.py: summary for', s, ': timestep =',
                      timestep, '; age =', age_Ma, '; runtime_Myr =',
                      runtime_Myr, '; level =', level, '; depth =', depth,
                      ' km; field_name =', field_name)
                print(
                    now(),
                    '------------------------------------------------------------------------------'
                )

                if field_name.startswith('vertical_'):
                    # perform a z slice for citcom data
                    field_slice = field_data[
                        level::nodez]  # FIXME : how to get a v slice
                    xyz_filename = datafile + '-' + field_name + '-' + str(
                        age_Ma) + 'Ma-' + str(depth) + 'km.xyz'
                else:
                    # perform a z slice for citcom data
                    field_slice = field_data[level::nodez]
                    #xyz_filename = datafile + '-' + field_name + '-' + str(timestep) + '-' + str(depth) + '.xyz'
                    xyz_filename = datafile + '-' + field_name + '-' + str(
                        age_Ma) + 'Ma-' + str(depth) + 'km.xyz'

                print(now(), 'grid_maker.py: xyz_filename =', xyz_filename)

                if field_name == 'visc': field_slice = np.log10(field_slice)

                print(now(), 'grid_maker.py: type(field_slice) = ',
                      type(field_slice))
                print(now(), 'grid_maker.py:  len(field_slice) = ',
                      len(field_slice))
                print(now())

                # create the xyz data
                xyz_data = np.column_stack((lon, lat, field_slice))
                np.savetxt(xyz_filename, xyz_data, fmt='%f %f %f')

                #print( now(), 'grid_maker.py: type(xyz_data) = ', type(xyz_data) )
                #print( now(), 'grid_maker.py:  len(xyz_data) = ', len(xyz_data) )
                #print( now() )

                # recast the slice
                #fs = np.array( field_slice )
                #fs.shape = ( len(lat), len(lon) )
                #print( now(), 'grid_maker.py: type(fs) = ', type(field_slice) )
                #print( now(), 'grid_maker.py:  len(fs) = ', len(field_slice) )
                #print( now() )

                # check for a grid_R
                if 'R' in control_d[s]:
                    grid_R = control_d[s]['R']

                # create the median file
                median_xyz_filename = xyz_filename.rstrip('xyz') + 'median.xyz'

                blockmedian_I = control_d[s].get('blockmedian_I', '0.5')
                cmd = xyz_filename + ' -I' + str(
                    blockmedian_I) + ' -R' + grid_R

                Core_GMT.callgmt('blockmedian', cmd, '', '>',
                                 median_xyz_filename)

                # get a T value for median file
                if not 'Ll' in control_d[s] or not 'Lu' in control_d[s]:
                    T = Core_GMT.get_T_from_minmax(median_xyz_filename)
                else:
                    dt = (control_d[s]['Lu'] - control_d[s]['Ll']) / 10
                    T = '-T' + str(control_d[s]['Ll']) + '/'
                    T += str(control_d[s]['Lu']) + '/' + str(dt)

                print(now(), 'grid_maker.py: T =', T)

                # create the grid
                grid_filename = xyz_filename.rstrip('xyz') + 'grd'

                surface_I = control_d[s].get('surface_I', '0.25')
                cmd = median_xyz_filename + ' -I' + str(
                    surface_I) + ' -R' + grid_R

                if 'Ll' in control_d[s]:
                    cmd += ' -Ll' + str(control_d[s]['Ll'])
                if 'Lu' in control_d[s]:
                    cmd += ' -Lu' + str(control_d[s]['Lu'])
                if 'T' in control_d[s]:
                    cmd += ' -T' + str(control_d[s]['T'])

                #opt_a =
                Core_GMT.callgmt('surface', cmd, '', '', ' -G' + grid_filename)

                # label the variables

                # −Dxname/yname/zname/scale/offset/title/remark
                cmd = grid_filename + ' -D/=/=/' + str(
                    field_name) + '/=/=/' + str(field_name) + '/' + str(
                        field_name)
                Core_GMT.callgmt('grdedit', cmd, '', '', '')

                # Dimensionalize grid

                if control_d[s].get('dimensional'):
                    print(now(), 'grid_maker.py: dimensional = ',
                          control_d[s]['dimensional'])
                    dim_grid_name = grid_filename.replace(
                        '.grd', '.dimensional.grd')
                    Core_Citcom.dimensionalize_grid(pid_file, field_name,
                                                    grid_filename,
                                                    dim_grid_name)

                    # FIXME: for dynamic topo remove  mean
                    # grdinfo to get mean ; see To_Refactor for example

                # save this grid and its age in a list
                if control_d[s].get('dimensional'):
                    grid_list.append((dim_grid_name, age_Ma))
                else:
                    grid_list.append((grid_filename, age_Ma))

                # Optional step to transform grid to plate frame
                if 'make_plate_frame_grid' in control_d:
                    cmd = 'frame_change_pygplates.py %(age_Ma)s %(grid_filename)s %(grid_R)s' % vars(
                    )
                    print(now(), 'grid_maker.py: cmd =', cmd)
                    os.system(cmd)

                # Assoicate this grid with GPlates exported line data in .xy format:
                # compute age value
                age_float = 0.0

                # time_list values for citcom data uses timesteps; get age
                time_triple = Core_Citcom.get_time_triple_from_timestep(
                    master_d['time_d']['triples'], timestep)
                age_float = time_triple[1]

                # truncate to nearest int and make a string for the gplates .xy file name
                if age_float < 0: age_float = 0.0
                xy_path = master_d['geoframe_d']['gplates_line_dir']
                xy_filename = xy_path + '/' + 'topology_platepolygons_' + str(
                    int(age_float)) + '.00Ma.xy'
                print(now(), 'grid_maker.py: xy_filename = ', xy_filename)

                # Make a plot of the grids
                J = 'X5/3'  #'R0/6'
                #J = 'M5/3'
                if 'J' in control_d[s]:
                    J = control_d[s]['J']

                C = 'polar'
                if 'C' in control_d[s]:
                    C = control_d[s]['C']

                # citcoms
                # plot non-dimensional grid
                Core_GMT.plot_grid(grid_filename, xy_filename, grid_R, T, J, C)

                # also plot dimensional grid
                if control_d[s].get('dimensional'):
                    print(now(), 'grid_maker.py: plotting dimensional = ',
                          control_d[s]['dimensional'])
                    dim_grid_name = grid_filename.replace(
                        '.grd', '.dimensional.grd')
                    T = Core_GMT.get_T_from_grdinfo(dim_grid_name)
                    Core_GMT.plot_grid(dim_grid_name, xy_filename, grid_R, T,
                                       J)

                # plot plate frame grid
                if 'make_plate_frame_grid' in control_d:
                    plateframe_grid_name = grid_filename.replace(
                        '.grd', '-plateframe.grd')
                    xy_filename = ''
                    xy_path = master_d['geoframe_d']['gplates_line_dir']
                    # present day plate outlines : use '0'
                    xy_filename = xy_path + '/' + 'topology_platepolygons_0.00Ma.xy'
                    print(now(), 'grid_maker.py: xy_filename = ', xy_filename)

                    T = Core_GMT.get_T_from_grdinfo(plateframe_grid_name)
                    print(now(), 'grid_maker.py: T =', T)
                    Core_GMT.plot_grid(plateframe_grid_name, xy_filename,
                                       grid_R, T, J)
Exemplo n.º 29
0
def main():
    '''Main sequence of script actions.'''

    print(now(), 'plot_lith_age.py:')
    print(now(), 'main:')

    if len(sys.argv) != 3:
        usage()

    # parameters
    pid_file = sys.argv[1]
    time_spec = sys.argv[2]

    ### parse and setup dictionaries ###

    master_d = Core_Citcom.get_all_pid_data(pid_file)
    pid_d = master_d['pid_d']
    time_d = Core_Citcom.get_time_spec_dictionary(time_spec,
                                                  master_d['time_d'])
    runtime_Myr = time_d['runtime_Myr'][0]
    age = int(round(time_d['age_Ma'][0], 0))
    datafile = pid_d['datafile']
    lith_age_depth = pid_d['lith_age_depth']
    start_age = pid_d['start_age']
    time = time_d['time_list'][0]
    geoframe_d = master_d['geoframe_d']
    depth_km = master_d['coor_d']['depth_km']
    mantle_temp = pid_d['mantle_temp']
    radius = master_d['coor_d']['radius']
    radius_outer = pid_d['radius_outer']
    radius_km = pid_d['radius_km']
    scalet = pid_d['scalet']
    rm_list = []  # list of files to remove

    ###################################
    ### input directories and files ###
    ###################################

    # reconstructed cross-section (plate frame of reference)
    cross_section_dir = 'aus_xsect/'
    cross_section_name = cross_section_dir + 'reconstructed_%(age)s.00Ma.xy' % vars(
    )

    # continental grids
    cont_dir = geoframe_d['age_grid_cont_dir'] + '/'
    cont_name = cont_dir + geoframe_d[
        'age_grid_cont_prefix'] + '%(age)s.grd' % vars()

    # directory of lith_age3_%(age)s.grd files from make_history_for_age.py
    lith_age_dir = '/net/beno2/nobackup1/danb/global/lith_age/'
    lith_age_name = lith_age_dir + 'lith_age3_%(age)s.grd' % vars()

    ### end input directories and files ###

    ### process cross_section_name ###

    infile = open(cross_section_name, 'r')
    lines = infile.readlines()
    infile.close
    out = []
    for line in lines:
        if line.startswith('>'):
            pass
        else:
            out.append(line.strip())

    # profile start location
    lon0 = float(out[0].split()[0])
    lat0 = float(out[0].split()[1])
    print(now(), '(lon0, lat0)', lon0, lat0)
    # profile end location
    lon1 = float(out[1].split()[0])
    lat1 = float(out[1].split()[1])
    print(now(), '(lon1, lat1)', lon1, lat1)

    # min and max bounds for GMT region (R)
    lon_min = min(lon0, lon1) - 10
    lon_max = max(lon0, lon1) + 10
    lat_min = min(lat0, lat1) - 15
    lat_max = max(lat0, lat1) + 15
    print(now(), '(lon_min, lat_min)', lon_min, lat_min)
    print(now(), '(lon_max, lat_max)', lon_max, lat_max)

    # Nico's 1-D profile
    # interpolate for data points between end values
    proj_name = cross_section_name.rstrip('xy') + 'p.xy'
    rm_list.append(proj_name)
    dlon = lon1 - lon0
    dlat = lat1 - lat0

    outfile = open(proj_name, 'w')
    outfile.write('%(lon0)s %(lat0)s %(lon0)s\n' % vars())

    lon = lon0
    lat = lat0
    while 1:
        lon += dlon / 500
        lat += dlat / 500
        if lon <= lon1:  #and lat <= lat1:
            lineout = '%(lon)s %(lat)s %(lon)s\n' % vars()
            outfile.write(lineout)
        else:
            break

    outfile.close()

    # purple circles
    # map
    lon_markers = cross_section_dir + 'lon_markers_map.xy'
    rm_list.append(lon_markers)
    ofile = open(lon_markers, 'w')
    lon_floor = int(np.floor(lon0))
    lon_ceil = int(np.ceil(lon1))
    for lon in range(lon_floor, lon_ceil + 1):
        if not lon % 5:
            olat = (lon - lon0) / dlon * dlat + lat0
            outline = '%(lon)s %(olat)s\n' % vars()
            ofile.write(outline)
    ofile.close()

    # annulus
    lon_markers_ann = cross_section_dir + 'lon_markers_ann.xy'
    rm_list.append(lon_markers_ann)
    plon, plat = np.loadtxt(lon_markers, unpack=True)
    prad = np.tile(radius_outer, len(plon))
    np.savetxt(lon_markers_ann, np.column_stack((plon, prad)))

    ### end process cross_section_name ###

    ### build list of temperature grids to track through ###
    # these grids must have previously been created using grid_maker.py
    gpfx = 'grid/' + datafile
    temp_list = []
    for depth in depth_km:
        gsfx = '.temp.' + str(int(depth)) + '.' + str(time) + '.grd'
        temp_list.append(gpfx + gsfx)

    # take just from 500 km depth and less
    depth_km_array = np.array(depth_km)
    znode = np.min(np.where(depth_km_array < 500)) - 1

    temp_list = temp_list[znode:]

    ### end of build temperature grid list ###

    #### idealized thermal structure from age grids
    ideal_lith_xyz = cross_section_dir + 'ideal.lith.%(age)s.xyz' % vars()
    rm_list.append(ideal_lith_xyz)
    Core_Util.find_value_on_line(proj_name, lith_age_name, ideal_lith_xyz)
    lithlon, lithlat, lithdist1, lithage_Ma = np.loadtxt(ideal_lith_xyz,
                                                         unpack=True)
    lithdist = np.tile(lithdist1, pid_d['nodez'])
    lithage_Ma = np.tile(lithage_Ma, pid_d['nodez'])
    lithrad = []
    for rad in radius:
        lithrad.extend([rad for xx in range(len(lithdist1))])
    lithrad = np.array(lithrad)
    lithtemp = erf((1.0 - lithrad) / (2.0 * np.sqrt(lithage_Ma / scalet)))
    lithtemp *= float(mantle_temp)

    nan = np.where(np.isnan(lithtemp))
    #nnan = np.where(~np.isnan( lithtemp ))
    np.savetxt(ideal_lith_xyz, np.column_stack((lithdist, lithrad, lithtemp)))

    #nan_values = np.ones( np.size( nan ) )*-1
    #f_handle = open( ideal_lith_xyz, 'ab')
    #np.savetxt(f_handle, np.column_stack( (lithdist[nan], lithrad[nan], nan_values) ))
    #f_handle.close()

    #### end of idealized thermal structure from age grids

    # make temperature xyz
    temp_xyz = cross_section_dir + 'citcom.temp.%(age)s.xyz' % vars()
    rm_list.append(temp_xyz)
    # this is hacky, but loop over only the top 500 km
    master_d['coor_d']['radius'] = master_d['coor_d']['radius'][znode:]
    pao, x_ann_max = Core_Util.make_annulus_xyz(master_d, proj_name, temp_xyz,
                                                temp_list)

    ### make idealized lithosphere and citcom temperature grid ###
    blockmedian_I = '0.2/0.0035'
    surface_I = '0.1/0.00125'
    surface_T = '0.25'
    rad_in = '0.92151939'
    rad_out = '1.0'
    # for plotting data
    R_ann = str(lon0) + '/' + str(lon1) + '/' + rad_in + '/' + rad_out
    # for dimensional psbasemap
    psbase_R = str(lon0) + '/' + str(lon1) + '/' + str(5871) + '/' + str(
        radius_km)

    grid_names = []
    for xyz in [temp_xyz, ideal_lith_xyz]:
        block_name = xyz.rstrip('xyz') + 'b.xyz'
        rm_list.append(block_name)
        grid_name = block_name.rstrip('xyz') + 'grd'
        grid_names.append(grid_name)
        rm_list.append(grid_name)
        cmd = xyz + ' -I' + blockmedian_I + ' -R' + R_ann
        callgmt('blockmedian', cmd, '', '>', block_name)
        cmd = block_name + ' -I' + surface_I + ' -R' + R_ann
        cmd += ' -T' + surface_T
        cmd += ' -Ll0 -Lu1'
        callgmt('surface', cmd, '', '', '-G' + grid_name)

    ### end of make temperature grids ###

    ### percentage error between temperature fields ###
    cmd = grid_names[0] + ' ' + grid_names[1] + ' SUB '
    cmd += grid_names[1] + ' DIV'
    cmd += ' 100 MUL'
    temp_diff_grid = cross_section_dir + 'temp.difference.grd'
    grid_names.append(temp_diff_grid)
    rm_list.append(temp_diff_grid)
    callgmt('grdmath', cmd, '', '=', temp_diff_grid)

    ### end percentage error

    ### lith_age_depth overlay line
    xy = cross_section_dir + 'lith_depth.xy'
    rm_list.append(xy)
    lith_age_radius = pid_d['radius_outer'] - pid_d['lith_age_depth']
    lith_depth = np.tile(lith_age_radius, len(lithdist1))
    np.savetxt(xy, np.column_stack((lithdist1, lith_depth)))

    ### end overlay line

    ### make cpts ###

    # age grid
    cpt_pfx = cross_section_dir
    cpt_name = cpt_pfx + 'age.cpt'
    rm_list.append(cpt_name)
    cmd = '-Crainbow -T0/370/10'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # continental types
    cpt_name = cpt_pfx + 'cont.cpt'
    rm_list.append(cpt_name)
    cmd = '-Crainbow -T-4/0/1'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # differential temperature
    cpt_name = cpt_pfx + 'diff.cpt'
    rm_list.append(cpt_name)
    cmd = '-Cpolar -T-10/10/1'
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # temperature
    cpt_name = cpt_pfx + 'temp.cpt'
    cmd = '-Cpolar -T0/1/0.0675'
    rm_list.append(cpt_name)
    callgmt('makecpt', cmd, '', '>', cpt_name)

    # for temperature contours
    cpt_name = cpt_pfx + 'temp.cont'
    cmd = '-Cjet -T0.1/0.4/0.1'
    rm_list.append(cpt_name)
    callgmt('makecpt', cmd, '', '>', cpt_name)

    ### plotting ###
    ps = datafile + '.lith.age.analysis.%(age)sMa.ps' % vars()

    callgmt('gmtset', 'PAGE_ORIENTATION', '', '', 'portrait')
    callgmt('gmtset', 'LABEL_FONT_SIZE', '', '', '12')
    callgmt('gmtset', 'LABEL_FONT', '', '', '4')
    callgmt('gmtset', 'LABEL_OFFSET', '', '', '0.02')
    callgmt('gmtset', 'ANNOT_FONT_SIZE_PRIMARY', '', '', '10p')
    callgmt('gmtset', 'ANNOT_FONT_PRIMARY', '', '', '4')

    opts_d = Core_GMT.start_postscript(ps)

    # pre-initialize for pstext commands
    pstext_d = opts_d.copy()
    pstext_d['R'] = '0/8.5/0/11'
    pstext_d['J'] = 'x1.0'

    # title information
    stdin = '1 10.5 14 0 4 ML Model = %(datafile)s\n' % vars()
    stdin += '1 10.3 14 0 4 ML lith_age_depth = %(lith_age_depth)s\n' % vars()
    stdin += '7.5 10.5 14 0 4 MR Current Age = %(age)s Ma\n' % vars()
    stdin += '7.5 10.3 14 0 4 MR start_age = %(start_age)s Ma\nEOF' % vars()
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    # plot maps #
    map_d = opts_d.copy()
    map_d['B'] = 'a20f10/a10f5::WeSn'
    map_d['R'] = '%(lon_min)s/%(lon_max)s/%(lat_min)s/%(lat_max)s' % vars()
    map_d['C'] = cross_section_dir + 'age.cpt'
    map_d['J'] = 'M3'
    map_d['X'] = 'a1'
    map_d['Y'] = 'a8'
    map_grid = lith_age_name

    callgmt('grdimage', lith_age_name, map_d, '>>', ps)

    C = cross_section_dir + 'age.cpt'
    cmd = '-Ba50f10:"Age (Ma)": -D2.5/7.5/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    del map_d['B']
    del map_d['C']
    map_d['m'] = ' '
    map_d['W'] = '5,white'
    callgmt('psxy', proj_name, map_d, '>>', ps)
    del map_d['m']
    del map_d['W']
    map_d['G'] = 'purple'
    map_d['S'] = 'c0.05'
    callgmt('psxy', lon_markers, map_d, '>>', ps)
    del map_d['G']
    del map_d['S']

    # continental types
    map_d['B'] = 'a20f10/a10f5::wESn'
    map_d['C'] = cross_section_dir + 'cont.cpt'
    map_d['X'] = 'a4.5'
    map_d['Y'] = 'a8'
    callgmt('grdimage', cont_name, map_d, '>>', ps)

    C = cross_section_dir + 'cont.cpt'
    cmd = '-Ba1:"Continental type (stencil value)": -D6/7.5/2.5/0.1h -C%(C)s -K -O' % vars(
    )
    callgmt('psscale', cmd, '', '>>', ps)

    del map_d['B']
    del map_d['C']
    map_d['m'] = ' '
    map_d['W'] = '5,black'
    callgmt('psxy', proj_name, map_d, '>>', ps)
    del map_d['m']
    del map_d['W']
    map_d['G'] = 'purple'
    map_d['S'] = 'c0.05'
    callgmt('psxy', lon_markers, map_d, '>>', ps)
    del map_d['G']
    del map_d['S']

    # end plot maps #

    # plot cross-sections

    # temperature cross-section
    psbase_d = opts_d.copy()
    psbase_d['B'] = 'a10/500::WsNe'
    psbase_d['J'] = 'Pa6/' + str(pao) + 'z'
    psbase_d['R'] = psbase_R
    psbase_d['X'] = 'a1.25'
    psbase_d['Y'] = 'a5.25'
    callgmt('psbasemap', '', psbase_d, '>>', ps)

    opts_d['C'] = cross_section_dir + 'temp.cpt'
    opts_d['J'] = 'Pa6/' + str(pao)
    opts_d['R'] = R_ann
    opts_d['X'] = 'a1.25'
    opts_d['Y'] = 'a5.25'
    callgmt('grdimage', grid_names[0], opts_d, '>>', ps)

    # profile of lith_age_depth on this cross-section
    del opts_d['C']
    opts_d['W'] = '3,black,-'
    callgmt('psxy', xy, opts_d, '>>', ps)
    del opts_d['W']
    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 6.25 12 0 4 ML CitcomS\n'
    stdin += '7.5 6.25 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    C = cross_section_dir + 'temp.cpt'
    cmd = '-Ba0.2f0.1 -D4.25/5.7/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    # idealized lith temperature cross-section
    psbase_d['Y'] = 'a3.75'
    callgmt('psbasemap', '', psbase_d, '>>', ps)

    opts_d['C'] = cross_section_dir + 'temp.cpt'
    opts_d['Y'] = 'a3.75'
    callgmt('grdimage', grid_names[1], opts_d, '>>', ps)
    del opts_d['C']

    # profile of lith_age_depth on this cross-section
    opts_d['W'] = '3,black,-'
    callgmt('psxy', xy, opts_d, '>>', ps)
    del opts_d['W']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 4.75 12 0 4 ML Idealised\n'
    stdin += '7.5 4.75 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    C = cross_section_dir + 'temp.cpt'
    cmd = '-Ba0.2f0.1 -D4.25/4.2/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    # contours plot
    psbase_d['Y'] = 'a2.25'
    callgmt('psbasemap', '', psbase_d, '>>', ps)
    opts_d['Y'] = 'a2.25'
    opts_d['C'] = cross_section_dir + 'temp.cont'
    opts_d['W'] = '3,red'
    callgmt('grdcontour', grid_names[0], opts_d, '>>', ps)
    opts_d['W'] = '3,green'
    callgmt('grdcontour', grid_names[1], opts_d, '>>', ps)
    del opts_d['C']
    del opts_d['W']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    stdin = '1 3.25 12 0 4 ML Contours\n'
    stdin += '7.5 3.25 12 0 4 MR Temp\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    # difference of temperature fields (relative)
    psbase_d['Y'] = 'a0.75'
    callgmt('psbasemap', '', psbase_d, '>>', ps)
    opts_d['C'] = cross_section_dir + 'diff.cpt'
    opts_d['Y'] = 'a0.75'
    callgmt('grdimage', grid_names[2], opts_d, '>>', ps)
    del opts_d['C']

    opts_d['G'] = 'purple'
    opts_d['N'] = ' '
    opts_d['S'] = 'c0.06'
    callgmt('psxy', lon_markers_ann, opts_d, '>>', ps)
    del opts_d['G']
    del opts_d['N']
    del opts_d['S']

    C = cross_section_dir + 'diff.cpt'
    cmd = '-Ba5f1 -D4.25/1.2/2.5/0.1h -C%(C)s -K -O' % vars()
    callgmt('psscale', cmd, '', '>>', ps)

    stdin = '1 1.75 12 0 4 ML Delta (\045)\n'
    stdin += '7.5 1.75 12 0 4 MR Temp\nEOF'
    #stdin += '4.25 0.6 12 0 4 MC Note: No assimilation regions are shown in BLACK\nEOF'
    callgmt('pstext', '', pstext_d, '<< EOF >>', ps + '\n' + stdin)

    Core_GMT.end_postscript(ps)

    # clean up temporary files
    Core_Util.remove_files(rm_list)
Exemplo n.º 30
0
def main():
    '''This is the main function for restart_citcoms.py'''
    print(now(), 'restart_citcoms.py: START')

    # get the control .cfg file as a dictionary
    control_d = Core_Util.parse_configuration_file(sys.argv[1])

    # parse master run input pid file
    master_run_cfg = control_d['master_run_cfg']
    master_run_cfg_d = Core_Util.parse_configuration_file(master_run_cfg)

    # parse master run output pid file
    master_run_pid = control_d['master_run_pid']

    # get the master dictionary and define aliases
    master_run_d = Core_Citcom.get_all_pid_data(master_run_pid)
    master_run_d['control_d'] = control_d
    master_run_pid_d = master_run_d['pid_d']

    # report details of input data
    if verbose:
        print(now(), 'restart_citcoms: control_d = ')
        Core_Util.tree_print(control_d)
        print(now(), 'restart_citcoms: master_run_cfg_d = ')
        Core_Util.tree_print(master_run_cfg_d)
        print(now(), 'restart_citcoms: master_run_pid_d = ')
        Core_Util.tree_print(master_run_pid_d)

    # SAVE, might need later ... ?
    # copy of the geo frame defaults
    #geoframe_d = master_run_d['geoframe_d']

    # process the control entry to get a list of ages
    time_spec_d = Core_Citcom.get_time_spec_dictionary(
        control_d['restart_ages'], master_run_d['time_d'])

    print(now(), 'restart_citcoms: time_spec_d =')
    Core_Util.tree_print(time_spec_d)

    # Get the restart type and local copy of the restart parameter replacement dictionary
    rs_replace_d = {}
    rs_type = control_d['restart_type']
    if rs_type == 'dynamic_topography':
        rs_replace_d = Core_Citcom.dynamic_topography_restart_params
    elif rs_type == 'total_topography':
        rs_replace_d = Core_Citcom.total_topography_restart_params
    else:
        print(now(), 'restart_citcoms: ERROR: unknown value for restart_type.')
        print(now(),
              'Valid values are "dynamic_topography", or "total_topography"')
        sys.exit(-1)

    # Now update rs_replace_d values directly from those set in control_d
    for p in sorted(control_d):
        if p.startswith('CitcomS.'):
            rs_replace_d[p] = control_d[p]

    # Show the final rs_replace_d that will pass to the input creation function
    if verbose:
        print(now(), 'restart_citcoms: rs_replace_d = ')
        Core_Util.tree_print(rs_replace_d)

    # Set placeholders for the directory and file structre  and names
    rs_dir_prefix = 'restart_' + rs_type
    rs_inp_cfg_suffix = ''

    rs_structure = control_d['restart_structure']
    if rs_structure == 'all-in-one':
        # create the all-in-one restart directory from section name
        Core_Util.make_dir(rs_dir_prefix)

    # Now it's time to Loop over restart ages and create restart files for that age
    for a in time_spec_d['age_Ma']:

        # determine what time steps are available for this age
        # NOTE: 'temp' is requried to set which output files to check
        found_d = Core_Citcom.find_available_timestep_from_age(
            master_run_d, 'temp', a)

        timestep = found_d['found_timestep']

        # convert the found age to an int
        age = int(np.around(found_d['found_age']))

        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )
        print(now(), 'Creating files for restart run at age:', age, '(',
              str(a), 'Ma; timestep = ', timestep, ')')
        print(
            now(),
            '--------------------------------------------------------------------------------------------'
        )

        # Set the name of the restart directory
        rs_dir = ''
        if rs_structure == 'separate':
            # create restart directory from section name
            rs_dir = rs_dir_prefix + '_' + str(age) + 'Ma'
            Core_Util.make_dir(rs_dir)
            Core_Util.make_dir(rs_dir + f'/Age{age}Ma')
        else:
            # this is an all-in-on case
            rs_dir = rs_dir_prefix

        # update the new restart input cfg file name suffix
        rs_inp_cfg_suffix = rs_type + '_' + str(age) + 'Ma'

        # create a new set of initial conditions for the restart run,
        # and set file name patterns in control_d
        if rs_type == 'dynamic_topography':
            create_no_lith_temp(control_d, master_run_d, rs_replace_d, rs_dir,
                                rs_inp_cfg_suffix, age, timestep)

        # else, no need to adjust files for 'total_topography' runs

        # create new run input .cfg for this restart run
        restart_run_cfg = {}
        restart_run_cfg = create_restart_run_cfg(control_d, master_run_cfg_d,
                                                 rs_replace_d, rs_dir,
                                                 rs_inp_cfg_suffix, age,
                                                 timestep)

    # End of loop over restart runs

    # Close up shop
    sys.exit(0)