def generate_predicted_sedimentation_grid(
        time, predict_sedimentation_script, scale_sedimentation_rate, mean_age,
        mean_distance, variance_age, variance_distance, max_age, max_distance,
        age_distance_polynomial_coefficients, output_dir):

    py_cmd = 'python3'
    if shutil.which('python3') is None:
        py_cmd = 'python'
    command_line = [
        py_cmd, predict_sedimentation_script, '-d',
        '{0}/{1}_{2}.nc'.format(distance_grid_dir, distance_base_name, time),
        '-g', '{0}/{1}{2}.{3}'.format(age_grid_dir, agegrid_filename, time,
                                      agegrid_filename_ext), '-i',
        str(grid_spacing), '-w', '-m',
        str(mean_age),
        str(mean_distance), '-v',
        str(variance_age),
        str(variance_distance), '-x',
        str(max_age),
        str(max_distance), '-f'
    ]
    command_line.extend(
        str(coeff) for coeff in age_distance_polynomial_coefficients)
    # Only sediment rate requires scaling (sediment thickness does not)...
    if scale_sedimentation_rate is not None:
        command_line.extend(['-s', str(scale_sedimentation_rate)])
    command_line.extend(
        ['--', '{0}/sed_{1}_{2}'.format(output_dir, grid_spacing, time)])

    #print('Time:', time)
    #print(command_line)

    #print(' '.join(command_line))

    # Execute the command.
    call_system_command(command_line)

    # Rename the average sedimentation rate and sediment thicknesses files so that 'time' is at the
    # end of the base filename - this way we can import them as time-dependent raster into GPlates.
    for ext in ('xy', 'nc'):

        src_sed_rate = '{0}/sed_{1}_{2}_sed_rate.{3}'.format(
            output_dir, grid_spacing, time, ext)
        dst_sed_rate = '{0}/sed_rate_{1}d_{2}.{3}'.format(
            output_dir, grid_spacing, time, ext)

        if os.access(dst_sed_rate, os.R_OK):
            os.remove(dst_sed_rate)
        if os.path.exists(src_sed_rate):
            os.rename(src_sed_rate, dst_sed_rate)

        src_sed_thick = '{0}/sed_{1}_{2}_sed_thick.{3}'.format(
            output_dir, grid_spacing, time, ext)
        dst_sed_thick = '{0}/sed_thick_{1}d_{2}.{3}'.format(
            output_dir, grid_spacing, time, ext)

        if os.access(dst_sed_thick, os.R_OK):
            os.remove(dst_sed_thick)
        if os.path.exists(src_sed_thick):
            os.rename(src_sed_thick, dst_sed_thick)
コード例 #2
0
def grdcontour2feature(grdfile,clevel,return_polygons=True):

    # call GMT to get a single contour at the specified value of clevel
    call_system_command(['gmt',
                         'grdcontour',
                         grdfile,
                         '-C+%0.8f' % clevel,
                         '-S4',
                         '-Dcontour_%c.txt',
                         '-V'])

    # read in the GMT delimited xyz ascii file, 
    # create a list of lists with polygon coordinates
    f = open('./contour_C.txt', 'r')

    polygons = []
    contourlist = []
    for line in f:
        if line[0] == '>':
            if len(contourlist)>0:
                polygons.append(contourlist)
            contourlist = []
        else:
            line = line.split()
            contourlist.append([float(j) for j in line])
            #break

    # create gplates-format features
    polyline_features = []
    for p in polygons:
        pf = pygplates.PolylineOnSphere(zip(list(zip(*p))[1],list(zip(*p))[0]))
        polyline_features.append(pf)

    # use join to handle polylines split across dateline
    joined_polyline_features = pygplates.PolylineOnSphere.join(polyline_features)

    if return_polygons:
    # force polylines to be polygons
        joined_polygon_features = []
        for geom in joined_polyline_features:
            polygon = pygplates.Feature()
            polygon.set_geometry(pygplates.PolygonOnSphere(geom))
            joined_polygon_features.append(polygon)
            
        return joined_polygon_features

    else:
        return joined_polyline_features
def write_grd_file_from_xyz(grd_filename, xyz_filename, grid_spacing, num_grid_longitudes, num_grid_latitudes):
    
    # The command-line strings to execute GMT 'nearneighbor'.
    # For example "nearneighbor output_mean_distance.xy -R-180/180/-90/90 -I1 -N4 -S1d -Goutput_mean_distance.nc".
    gmt_command_line = [
            "gmt",
            "nearneighbor",
            xyz_filename,
            "-N4",
            "-S{0}d".format(1.5 * grid_spacing),
            "-I{0}".format(grid_spacing),
            "-R{0}/{1}/{2}/{3}".format(-180, 180, -90, 90),
            # Use GMT gridline registration since our input point grid has data points on the grid lines.
            # Gridline registration is the default so we don't need to force pixel registration...
            #"-r", # Force pixel registration since data points are at centre of cells.
            "-G{0}".format(grd_filename)]
    call_system_command(gmt_command_line)
def get_positions_and_scalars(input_points, scalar_grid_filename, max_scalar=None):
    
    input_points_data = ''.join('{0} {1}\n'.format(lon, lat) for lon, lat in input_points)

    # The command-line strings to execute GMT 'grdtrack'.
    grdtrack_command_line = ["gmt", "grdtrack", "-nl", "-G{0}".format(scalar_grid_filename)]
    stdout_data = call_system_command(grdtrack_command_line, stdin=input_points_data.encode('utf-8'), return_stdout=True)
    
    lon_lat_scalar_list = []
    
    # Read lon, lat and scalar values from the output of 'grdtrack'.
    for line in stdout_data.splitlines():
        if line.strip().startswith(b'#'):
            continue
        
        line_data = line.split()
        num_values = len(line_data)
        
        # If just a line containing white-space then skip to next line.
        if num_values == 0:
            continue
        
        if num_values < 3:
            print('Ignoring line "{0}" - has fewer than 3 white-space separated numbers.'.format(line), file=sys.stderr)
            continue
            
        try:
            # Convert strings to numbers.
            lon = float(line_data[0])
            lat = float(line_data[1])
            
            # The scalar got appended to the last column by 'grdtrack'.
            scalar = float(line_data[-1])
            
            # If the point is outside the grid then the scalar grid will return 'NaN'.
            if math.isnan(scalar):
                #print('Ignoring line "{0}" - point is outside scalar grid.'.format(line), file=sys.stderr)
                continue
            
            # Clamp to max value if requested.
            if (max_scalar is not None and
                scalar > max_scalar):
                scalar = max_scalar
            
        except ValueError:
            print('Ignoring line "{0}" - cannot read floating-point lon, lat and scalar values.'.format(line), file=sys.stderr)
            continue
        
        lon_lat_scalar_list.append((lon, lat, scalar))
    
    return lon_lat_scalar_list
コード例 #5
0
    return alldata


def write_alldata_file(alldata_filename, alldata):
    with open(alldata_filename, 'w') as alldata_file:
        for alldata_line in alldata:
            alldata_file.write(' '.join(str(item)
                                        for item in alldata_line) + '\n')


# Restrict latitude range to -70/80 because that corresponds to the extent of the sediment thickness grid data.
# Note: We don't need to use "-T" to convert from pixel to grid registration since distance grids are already in grid registration
# (grid registration gives us pixel values at 1 degree integer lon/lat locations used by other data in 'alldata').
call_system_command([
    "gmt", "grdsample", "-I1", "-fg", "-R-180/180/-70/80",
    mean_distance_grid_input_filename,
    "-G{0}".format(mean_distance_grid_output_filename)
])

# Convert grd to xyz.
mean_distance_output = call_system_command(
    ["gmt", "grd2xyz", "-fg", mean_distance_grid_output_filename],
    return_stdout=True)

mean_distance_data = []
for line in mean_distance_output.strip().split('\n'):
    line = line.split()
    mean_distance_data.append((float(line[0]), float(line[1]), float(line[2])))
#print(mean_distance_data)

# Create a dictionary mapping lon/lat locations to mean distance.
コード例 #6
0
def generate_distance_grid(time):
    py_cmd = 'python3'
    if shutil.which('python3') is None:
        py_cmd = 'python'

    command_line = [py_cmd, 'ocean_basin_proximity.py']
    command_line.extend(['-r'])
    command_line.extend('{0}'.format(rotation_filename)
                        for rotation_filename in rotation_filenames)
    command_line.extend(['-m'])
    command_line.extend(
        '{0}'.format(proximity_features_file)
        for proximity_features_file in proximity_features_files)
    command_line.extend(['-s'])
    command_line.extend('{0}'.format(topology_filename)
                        for topology_filename in topology_filenames)
    command_line.extend([
        '-g',
        '{0}/{1}{2}.{3}'.format(age_grid_dir, age_grid_filename, time,
                                age_grid_filename_ext),
        '-y {0}'.format(time),
        '-n',
        # Use all feature types in proximity file (according to Dietmar)...
        #'-b',
        #'PassiveContinentalBoundary',
        '-x',
        '{0}'.format(max_time),
        '-t',
        '1',
        '-i',
        '{0}'.format(grid_spacing),
        #'-q',
        #str(proximity_threshold_kms),
        #'-d', # output distance with time
        '-j',
        '-w',
        '-c',
        str(1),
        '{0}/distance_{1}_{2}'.format(output_dir, grid_spacing, time)
    ])

    print('Time:', time)

    #print(' '.join(command_line))
    call_system_command(command_line)

    # Clamp the mean distance grids (and remove xy files).
    # Also rename the mean distance grids so that 'time' is at the end of the base filename -
    # this way we can import them as time-dependent raster into GPlates version 2.0 and earlier.
    #

    src_mean_distance_basename = '{0}/distance_{1}_{2}_mean_distance'.format(
        output_dir, grid_spacing, time)
    dst_mean_distance_basename = '{0}/mean_distance_{1}d_{2}'.format(
        output_dir, grid_spacing, time)

    src_mean_distance_xy = src_mean_distance_basename + '.xy'
    if os.access(src_mean_distance_xy, os.R_OK):
        os.remove(src_mean_distance_xy)

    src_mean_distance_grid = src_mean_distance_basename + '.nc'
    dst_mean_distance_grid = dst_mean_distance_basename + '.nc'

    if os.access(dst_mean_distance_grid, os.R_OK):
        os.remove(dst_mean_distance_grid)

    # Clamp mean distances.
    call_system_command([
        "gmt", "grdmath", "-fg",
        str(proximity_threshold_kms), src_mean_distance_grid, "MIN", "=",
        dst_mean_distance_grid
    ])
    os.remove(src_mean_distance_grid)
コード例 #7
0
#
# Convert any NetCDF3 grids to NetCDF4 (since it's compressed).
# We simply use GMT grdconvert to do this.
#
# We also strip away all
#

input_path = r'E:\Users\John\Downloads\dynamic_topography_reconstruction\models\M7\Dynamic_topography\MantleFrame'
output_path = r'C:\Users\John\Development\Usyd\gplates\source-code\pygplates_scripts\Other\Backstrip\backstrip\bundle_data\dynamic_topography\models\Muller2017\M7'
grid_ext = 'nc'
grid_spacing_degrees = 1.0

grid_filenames = glob.glob(os.path.join(input_path, '*.{0}'.format(grid_ext)))
for grid_filename in grid_filenames:
    # Search for the last integer and assume that is the age.
    age = float(re.findall(r'\d+', grid_filename)[-1])
    output_grid_filename = os.path.join(output_path,
                                        '{0:.2f}.{1}'.format(age, grid_ext))
    call_system_command([
        'gmt',
        'grdfilter',
        grid_filename,
        '-G{0}'.format(output_grid_filename),
        '-D4',
        '-Fc{0}'.format(
            200.0 * grid_spacing_degrees
        ),  # 200km filter width (100km radius) per degree of grid spacing
        '-I{0}'.format(grid_spacing_degrees)
    ])
コード例 #8
0
def get_raster_grid_positions_and_scalars(raster_filename, nodata_value=None):
    """
    Returns a 2-tuple of lists.
    The first is a list of pygplates.PointOnSphere at all pixel node locations in raster.
    The second is a list of raster scalar values at those pixel node locations.
    Both list have the same length.
    
    Note that this excludes points that have the no-data value.
    If 'nodata_value' is not specified then it defaults to NaN.
    """

    # The command-line strings to execute GMT 'grd2xyz'.
    grd2xyz_command_line = ["gmt", "grd2xyz", "-s"]
    if nodata_value is not None:
        grd2xyz_command_line.append("-di{0}".format(nodata_value))
    grd2xyz_command_line.append(raster_filename)

    stdout_data = call_system_command(grd2xyz_command_line, return_stdout=True)

    # Create a list of points and a list of scalars.
    raster_grid_points = []
    raster_grid_scalars = []

    # Read lon, lat and scalar values from the output of 'grd2xyz'.
    for line in stdout_data.splitlines():
        if line.strip().startswith('#'):
            continue

        line_data = line.split()
        num_values = len(line_data)

        # If just a line containing white-space then skip to next line.
        if num_values == 0:
            continue

        if num_values < 3:
            print(
                'Ignoring line "{0}" - has fewer than 3 white-space separated numbers.'
                .format(line),
                file=sys.stderr)
            continue

        try:
            # Convert strings to numbers.
            lon = float(line_data[0])
            lat = float(line_data[1])

            # The scalar got appended to the last column by 'grd2xyz'.
            # Note: We should not have NaN values because the '-s' option to 'grd2xyz' removes them.
            scalar = float(line_data[-1])

        except ValueError:
            print(
                'Ignoring line "{0}" - cannot read floating-point lon, lat and scalar values.'
                .format(line),
                file=sys.stderr)
            continue

        raster_grid_points.append(pygplates.PointOnSphere(lat, lon))
        raster_grid_scalars.append(scalar)

    return raster_grid_points, raster_grid_scalars